From 70394ab774f9e7d40c71d159aeeb04094c839773 Mon Sep 17 00:00:00 2001 From: R4PaSs Date: Thu, 26 Jun 2014 14:59:28 -0400 Subject: Added Nit Lexer to Pygments --- pygments/lexers/compiled.py | 43 ++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 42 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..480e4a8f 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -32,7 +32,7 @@ __all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', 'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer', 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer', 'PikeLexer', 'ChapelLexer', 'EiffelLexer', 'Inform6Lexer', 'Inform7Lexer', - 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer'] + 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer', 'NitLexer'] class CFamilyLexer(RegexLexer): @@ -5190,3 +5190,44 @@ class SwiftLexer(ObjectiveCLexer): elif value in self.operators: token = Operator yield index, token, value + +class NitLexer(RegexLexer): + """ + For `nit `_ source. + """ + + name = 'Nit' + aliases = ['nit'] + filenames = ['*.nit'] + tokens = { + 'root': [ + (r'#.*?$', Comment.Single), + (r'(package|module|import|class|abstract|interface|' + 'universal|enum|end|fun|type|init|redef|isa|do|' + 'readable|writable|var|intern|extern|public|protected|' + 'private|intrude|if|then|else|while|loop|for|in|and|' + 'or|not|implies|return|continue|break|abort|assert|' + 'new|is|once|super|self|true|false|nullable|null|as|' + 'isset|label|__debug__)(?=( |\n|\t|\r|\())', Keyword), + (r'[A-Z][A-Za-z0-9_]*', Name.Class), + (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), #Simple long string + (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), #Simple long string alt + (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), #Start long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), #Mid long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), #End long string + (r'"(\\.|([^"}{\\]))*"', String), #Simple String + (r'"(\\.|([^"}{\\]))*{', String), #Start string + (r'}(\\.|([^"}{\\]))*{', String), #Mid String + (r'}(\\.|([^"}{\\]))*"', String), #End String + (r'(\'[^\'\\]\')|(\'\\.\')', String.Char), + (r'[0-9]+', Number.Integer), + (r'[0-9]*.[0-9]+', Number.Float), + (r'0(x|X)[0-9A-Fa-f]+', Number.Hex), + (r'[a-z][A-Za-z0-9_]*', Name), + (r'_[A-Za-z0-9_]+', Name.Variable.Instance), + (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator), + (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation), + (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit + ('(\r|\n| |\t)+', Text)] + } + -- cgit v1.2.1 From 318728914ec120c42bd892905f98414d01589d3b Mon Sep 17 00:00:00 2001 From: R4PaSs Date: Thu, 26 Jun 2014 15:01:39 -0400 Subject: Added tests for Nit --- tests/examplefiles/calculator.nit | 272 ++++++++++++++++++++++++ tests/examplefiles/callback_chimpanze.nit | 45 ++++ tests/examplefiles/callback_monkey.nit | 92 ++++++++ tests/examplefiles/circular_list.nit | 167 +++++++++++++++ tests/examplefiles/clock.nit | 78 +++++++ tests/examplefiles/clock_more.nit | 60 ++++++ tests/examplefiles/curl_http.nit | 113 ++++++++++ tests/examplefiles/curl_mail.nit | 59 +++++ tests/examplefiles/draw_operation.nit | 243 +++++++++++++++++++++ tests/examplefiles/drop_privileges.nit | 46 ++++ tests/examplefiles/extern_methods.nit | 69 ++++++ tests/examplefiles/fibonacci.nit | 43 ++++ tests/examplefiles/hello_world.nit | 1 + tests/examplefiles/html_page.nit | 105 +++++++++ tests/examplefiles/int_stack.nit | 100 +++++++++ tests/examplefiles/opengles2_hello_triangle.nit | 193 +++++++++++++++++ tests/examplefiles/print_arguments.nit | 22 ++ tests/examplefiles/procedural_array.nit | 48 +++++ tests/examplefiles/socket_client.nit | 38 ++++ tests/examplefiles/socket_server.nit | 52 +++++ tests/examplefiles/tmpl_composer.nit | 94 ++++++++ tests/examplefiles/websocket_server.nit | 46 ++++ 22 files changed, 1986 insertions(+) create mode 100644 tests/examplefiles/calculator.nit create mode 100644 tests/examplefiles/callback_chimpanze.nit create mode 100644 tests/examplefiles/callback_monkey.nit create mode 100644 tests/examplefiles/circular_list.nit create mode 100644 tests/examplefiles/clock.nit create mode 100644 tests/examplefiles/clock_more.nit create mode 100644 tests/examplefiles/curl_http.nit create mode 100644 tests/examplefiles/curl_mail.nit create mode 100644 tests/examplefiles/draw_operation.nit create mode 100644 tests/examplefiles/drop_privileges.nit create mode 100644 tests/examplefiles/extern_methods.nit create mode 100644 tests/examplefiles/fibonacci.nit create mode 100644 tests/examplefiles/hello_world.nit create mode 100644 tests/examplefiles/html_page.nit create mode 100644 tests/examplefiles/int_stack.nit create mode 100644 tests/examplefiles/opengles2_hello_triangle.nit create mode 100644 tests/examplefiles/print_arguments.nit create mode 100644 tests/examplefiles/procedural_array.nit create mode 100644 tests/examplefiles/socket_client.nit create mode 100644 tests/examplefiles/socket_server.nit create mode 100644 tests/examplefiles/tmpl_composer.nit create mode 100644 tests/examplefiles/websocket_server.nit diff --git a/tests/examplefiles/calculator.nit b/tests/examplefiles/calculator.nit new file mode 100644 index 00000000..541f4d28 --- /dev/null +++ b/tests/examplefiles/calculator.nit @@ -0,0 +1,272 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gtk + +class CalculatorContext + var result : nullable Float = null + + var last_op : nullable Char = null + + var current : nullable Float = null + var after_point : nullable Int = null + + fun push_op( op : Char ) + do + apply_last_op_if_any + if op == 'C' then + self.result = 0.0 + last_op = null + else + last_op = op # store for next push_op + end + + # prepare next current + after_point = null + current = null + end + + fun push_digit( digit : Int ) + do + var current = current + if current == null then current = 0.0 + + var after_point = after_point + if after_point == null then + current = current * 10.0 + digit.to_f + else + current = current + digit.to_f * 10.0.pow(after_point.to_f) + self.after_point -= 1 + end + + self.current = current + end + + fun switch_to_decimals + do + if self.current == null then current = 0.0 + if after_point != null then return + + after_point = -1 + end + + fun apply_last_op_if_any + do + var op = last_op + + var result = result + if result == null then result = 0.0 + + var current = current + if current == null then current = 0.0 + + if op == null then + result = current + else if op == '+' then + result = result + current + else if op == '-' then + result = result - current + else if op == '/' then + result = result / current + else if op == '*' then + result = result * current + end + self.result = result + self.current = null + end +end + +class CalculatorGui + super GtkCallable + + var win : GtkWindow + var container : GtkGrid + + var lbl_disp : GtkLabel + var but_eq : GtkButton + var but_dot : GtkButton + + var context = new CalculatorContext + + redef fun signal( sender, user_data ) + do + var after_point = context.after_point + if after_point == null then + after_point = 0 + else + after_point = (after_point.abs) + end + + if user_data isa Char then # is an operation + var c = user_data + if c == '.' then + but_dot.sensitive= false + context.switch_to_decimals + lbl_disp.text = "{context.current.to_i}." + else + but_dot.sensitive= true + context.push_op( c ) + + var s = context.result.to_precision_native(6) + var index : nullable Int = null + for i in s.length.times do + var chiffre = s.chars[i] + if chiffre == '0' and index == null then + index = i + else if chiffre != '0' then + index = null + end + end + if index != null then + s = s.substring(0, index) + if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1) + end + lbl_disp.text = s + end + else if user_data isa Int then # is a number + var n = user_data + context.push_digit( n ) + lbl_disp.text = context.current.to_precision_native(after_point) + end + end + + init + do + init_gtk + + win = new GtkWindow( 0 ) + + container = new GtkGrid(5,5,true) + win.add( container ) + + lbl_disp = new GtkLabel( "_" ) + container.attach( lbl_disp, 0, 0, 5, 1 ) + + # digits + for n in [0..9] do + var but = new GtkButton.with_label( n.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, n ) + if n == 0 then + container.attach( but, 0, 4, 1, 1 ) + else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 ) + end + + # operators + var r = 1 + for op in ['+', '-', '*', '/' ] do + var but = new GtkButton.with_label( op.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, op ) + container.attach( but, 3, r, 1, 1 ) + r+=1 + end + + # = + but_eq = new GtkButton.with_label( "=" ) + but_eq.request_size( 64, 64 ) + but_eq.signal_connect( "clicked", self, '=' ) + container.attach( but_eq, 4, 3, 1, 2 ) + + # . + but_dot = new GtkButton.with_label( "." ) + but_dot.request_size( 64, 64 ) + but_dot.signal_connect( "clicked", self, '.' ) + container.attach( but_dot, 1, 4, 1, 1 ) + + #C + var but_c = new GtkButton.with_label( "C" ) + but_c.request_size( 64, 64 ) + but_c.signal_connect("clicked", self, 'C') + container.attach( but_c, 2, 4, 1, 1 ) + + win.show_all + end +end + +# context tests +var context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_op( '+' ) +context.push_digit( 3 ) +context.push_op( '*' ) +context.push_digit( 2 ) +context.push_op( '=' ) +var r = context.result.to_precision( 2 ) +assert r == "30.00" else print r + +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 4 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_op( '*' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "42.30" else print r + +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "53.30" else print r + +context = new CalculatorContext +context.push_digit( 4 ) +context.push_digit( 2 ) +context.switch_to_decimals +context.push_digit( 3 ) +context.push_op( '/' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "14.10" else print r + +#test multiple decimals +context = new CalculatorContext +context.push_digit( 5 ) +context.push_digit( 0 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_digit( 3 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 3 ) +assert r == "51.123" else print r + +#test 'C' button +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '=' ) +context.push_op( 'C' ) +r = context.result.to_precision( 1 ) +assert r == "0.0" else print r + +# graphical application + +if "NIT_TESTING".environ != "true" then + var app = new CalculatorGui + run_gtk +end diff --git a/tests/examplefiles/callback_chimpanze.nit b/tests/examplefiles/callback_chimpanze.nit new file mode 100644 index 00000000..2ca8dc3a --- /dev/null +++ b/tests/examplefiles/callback_chimpanze.nit @@ -0,0 +1,45 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_chimpanze +import callback_monkey + +class Chimpanze + super MonkeyActionCallable + + fun create + do + var monkey = new Monkey + print "Hum, I'm sleeping ..." + # Invoking method which will take some time to compute, and + # will be back in wokeUp method with information. + # - Callback method defined in MonkeyActionCallable Interface + monkey.wokeUpAction(self, "Hey, I'm awake.") + end + + # Inherit callback method, defined by MonkeyActionCallable interface + # - Back of wokeUpAction method + redef fun wokeUp( sender:Monkey, message:Object ) + do + print message + end +end + +var m = new Chimpanze +m.create diff --git a/tests/examplefiles/callback_monkey.nit b/tests/examplefiles/callback_monkey.nit new file mode 100644 index 00000000..6e1ed262 --- /dev/null +++ b/tests/examplefiles/callback_monkey.nit @@ -0,0 +1,92 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_monkey + +in "C header" `{ + #include + #include + + typedef struct { + int id; + int age; + } CMonkey; + + typedef struct { + MonkeyActionCallable toCall; + Object message; + } MonkeyAction; +`} + +in "C body" `{ + // Method which reproduce a callback answer + // Please note that a function pointer is only used to reproduce the callback + void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data) + { + sleep(2); + callbackFunc( mkey, data ); + } + + // Back of background treatment, will be redirected to callback function + void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data ) + { + // To call a your method, the signature must be written like this : + // _... + MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message ); + } +`} + +# Implementable interface to get callback in defined methods +interface MonkeyActionCallable + fun wokeUp( sender:Monkey, message: Object) is abstract +end + +# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey) +extern class Monkey `{ CMonkey * `} + + new `{ + CMonkey *monkey = malloc( sizeof(CMonkey) ); + monkey->age = 10; + monkey->id = 1; + return monkey; + `} + + # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface + # Must be defined as Nit/C method because of C call inside + fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{ + + // Allocating memory to keep reference of received parameters : + // - Object receiver + // - Message + MonkeyAction *data = malloc( sizeof(MonkeyAction) ); + + // Incrementing reference counter to prevent from releasing + MonkeyActionCallable_incr_ref( toCall ); + Object_incr_ref( message ); + + data->toCall = toCall; + data->message = message; + + // Calling method which reproduce a callback by passing : + // - Receiver + // - Function pointer to object return method + // - Datas + cbMonkey( recv, &nit_monkey_callback_func, data ); + `} +end diff --git a/tests/examplefiles/circular_list.nit b/tests/examplefiles/circular_list.nit new file mode 100644 index 00000000..c3ba1edb --- /dev/null +++ b/tests/examplefiles/circular_list.nit @@ -0,0 +1,167 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Implementation of circular lists +# This example shows the usage of generics and somewhat a specialisation of collections. +module circular_list + +# Sequences of elements implemented with a double-linked circular list +class CircularList[E] + # Like standard Array or LinkedList, CircularList is a Sequence. + super Sequence[E] + + # The first node of the list if any + # The special case of an empty list is handled by a null node + private var node: nullable CLNode[E] = null + + redef fun iterator do return new CircularListIterator[E](self) + + redef fun first do return self.node.item + + redef fun push(e) + do + var new_node = new CLNode[E](e) + var n = self.node + if n == null then + # the first node + self.node = new_node + else + # not the first one, so attach nodes correctly. + var old_last_node = n.prev + new_node.next = n + new_node.prev = old_last_node + old_last_node.next = new_node + n.prev = new_node + end + end + + redef fun pop + do + var n = self.node + assert n != null + var prev = n.prev + if prev == n then + # the only node + self.node = null + return n.item + end + # not the only one do detach nodes correctly. + var prev_prev = prev.prev + n.prev = prev_prev + prev_prev.next = n + return prev.item + end + + redef fun unshift(e) + do + # Circularity has benefits. + push(e) + self.node = self.node.prev + end + + redef fun shift + do + # Circularity has benefits. + self.node = self.node.next + return self.pop + end + + # Move the first at the last position, the second at the first, etc. + fun rotate + do + var n = self.node + if n == null then return + self.node = n.next + end + + # Sort the list using the Josephus algorithm. + fun josephus(step: Int) + do + var res = new CircularList[E] + while not self.is_empty do + # count 'step' + for i in [1..step[ do self.rotate + # kill + var x = self.shift + res.add(x) + end + self.node = res.node + end +end + +# Nodes of a CircularList +private class CLNode[E] + # The current item + var item: E + + # The next item in the circular list. + # Because of circularity, there is always a next; + # so by default let it be self + var next: CLNode[E] = self + + # The previous item in the circular list. + # Coherence between next and previous nodes has to be maintained by the + # circular list. + var prev: CLNode[E] = self +end + +# An iterator of a CircularList. +private class CircularListIterator[E] + super IndexedIterator[E] + + redef var index: Int + + # The current node pointed. + # Is null if the list is empty. + var node: nullable CLNode[E] + + # The list iterated. + var list: CircularList[E] + + redef fun is_ok + do + # Empty lists are not OK. + # Pointing again the first node is not OK. + return self.node != null and (self.index == 0 or self.node != self.list.node) + end + + redef fun next + do + self.node = self.node.next + self.index += 1 + end + + redef fun item do return self.node.item + + init(list: CircularList[E]) + do + self.node = list.node + self.list = list + self.index = 0 + end +end + +var i = new CircularList[Int] +i.add_all([1, 2, 3, 4, 5, 6, 7]) +print i.first +print i.join(":") + +i.push(8) +print i.shift +print i.pop +i.unshift(0) +print i.join(":") + +i.josephus(3) +print i.join(":") diff --git a/tests/examplefiles/clock.nit b/tests/examplefiles/clock.nit new file mode 100644 index 00000000..8fdb9abd --- /dev/null +++ b/tests/examplefiles/clock.nit @@ -0,0 +1,78 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module provide a simple wall clock. +# It is an example of getters and setters. +# A beefed-up module is available in clock_more +module clock + +# A simple wall clock with 60 minutes and 12 hours. +class Clock + # total number of minutes from 0 to 719 + var total_minutes: Int + # Note: only the read acces is public, the write access is private. + + # number of minutes in the current hour (from 0 to 59) + fun minutes: Int do return self.total_minutes % 60 + + # set the number of minutes in the current hour. + # if m < 0 or m >= 60, the hour will be changed accordinlgy + fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m + + # number of hours (from 0 to 11) + fun hours: Int do return self.total_minutes / 60 + + # set the number of hours + # the minutes will not be updated + fun hours=(h: Int) do self.total_minutes = h * 60 + minutes + + # the position of the hour arrow in the [0..60[ interval + fun hour_pos: Int do return total_minutes / 12 + + # replace the arrow of hours (from 0 to 59). + # the hours and the minutes will be updated. + fun hour_pos=(h: Int) do self.total_minutes = h * 12 + + redef fun to_s do return "{hours}:{minutes}" + + fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes + + init(hours, minutes: Int) do self.reset(hours, minutes) + + redef fun ==(o) + do + # Note: o is a nullable Object, a type test is required + # Thanks to adaptive typing, there is no downcast + # i.e. the code is safe! + return o isa Clock and self.total_minutes == o.total_minutes + end +end + +var c = new Clock(10,50) +print "It's {c} o'clock." + +c.minutes += 22 +print "Now it's {c} o'clock." + +print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}." + +c.hours -= 2 +print "Now it's {c} o'clock." + +var c2 = new Clock(9, 11) +print "It's {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." +c2.minutes += 1 +print "It's now {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." diff --git a/tests/examplefiles/clock_more.nit b/tests/examplefiles/clock_more.nit new file mode 100644 index 00000000..d2ef89e2 --- /dev/null +++ b/tests/examplefiles/clock_more.nit @@ -0,0 +1,60 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module beef up the clock module by allowing a clock to be comparable. +# It show the usage of class refinement +module clock_more + +import clock + +redef class Clock + # Clock are now comparable + super Comparable + + # Comparaison of a clock make only sense with an other clock + redef type OTHER: Clock + + redef fun <(o) + do + # Note: < is the only abstract method of Comparable. + # All other operators and methods rely on < and ==. + return self.total_minutes < o.total_minutes + end +end + +var c1 = new Clock(8, 12) +var c2 = new Clock(8, 13) +var c3 = new Clock(9, 13) + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" + +print "-" + +c1.minutes += 1 + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" diff --git a/tests/examplefiles/curl_http.nit b/tests/examplefiles/curl_http.nit new file mode 100644 index 00000000..079f12c8 --- /dev/null +++ b/tests/examplefiles/curl_http.nit @@ -0,0 +1,113 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample of the Curl module. +module curl_http + +import curl + +# Small class to represent an Http Fetcher +class MyHttpFetcher + super CurlCallbacks + + var curl: Curl + var our_body: String = "" + + init(curl: Curl) do self.curl = curl + + # Release curl object + fun destroy do self.curl.destroy + + # Header callback + redef fun header_callback(line: String) do + # We keep this callback silent for testing purposes + #if not line.has_prefix("Date:") then print "Header_callback : {line}" + end + + # Body callback + redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}" + + # Stream callback - Cf : No one is registered + redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}" +end + + +# Program +if args.length < 2 then + print "Usage: curl_http " +else + var curl = new Curl + var url = args[1] + var request = new CurlHTTPRequest(url, curl) + + # HTTP Get Request + if args[0] == "GET" then + request.verbose = false + var getResponse = request.execute + + if getResponse isa CurlResponseSuccess then + print "Status code : {getResponse.status_code}" + print "Body : {getResponse.body_str}" + else if getResponse isa CurlResponseFailed then + print "Error code : {getResponse.error_code}" + print "Error msg : {getResponse.error_msg}" + end + + # HTTP Post Request + else if args[0] == "POST" then + var myHttpFetcher = new MyHttpFetcher(curl) + request.delegate = myHttpFetcher + + var postDatas = new HeaderMap + postDatas["Bugs Bunny"] = "Daffy Duck" + postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*" + postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one" + request.datas = postDatas + request.verbose = false + var postResponse = request.execute + + print "Our body from the callback : {myHttpFetcher.our_body}" + + if postResponse isa CurlResponseSuccess then + print "*** Answer ***" + print "Status code : {postResponse.status_code}" + print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}" + else if postResponse isa CurlResponseFailed then + print "Error code : {postResponse.error_code}" + print "Error msg : {postResponse.error_msg}" + end + + # HTTP Get to file Request + else if args[0] == "GET_FILE" then + var headers = new HeaderMap + headers["Accept"] = "Moo" + request.headers = headers + request.verbose = false + var downloadResponse = request.download_to_file(null) + + if downloadResponse isa CurlFileResponseSuccess then + print "*** Answer ***" + print "Status code : {downloadResponse.status_code}" + print "Size downloaded : {downloadResponse.size_download}" + else if downloadResponse isa CurlResponseFailed then + print "Error code : {downloadResponse.error_code}" + print "Error msg : {downloadResponse.error_msg}" + end + # Program logic + else + print "Usage : Method[POST, GET, GET_FILE]" + end +end diff --git a/tests/examplefiles/curl_mail.nit b/tests/examplefiles/curl_mail.nit new file mode 100644 index 00000000..b28f5a4c --- /dev/null +++ b/tests/examplefiles/curl_mail.nit @@ -0,0 +1,59 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Mail sender sample using the Curl module +module curl_mail + +import curl + +var curl = new Curl +var mail_request = new CurlMailRequest(curl) + +# Networks +var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword") +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +end + +# Headers +mail_request.from = "Billy Bob" +mail_request.to = ["user@example.org"] +mail_request.cc = ["bob@example.org"] +mail_request.bcc = null + +var headers_body = new HeaderMap +headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\"" +headers_body["Content-Transfer-Encoding:"] = "quoted-printable" +mail_request.headers_body = headers_body + +# Content +mail_request.body = "

Here you can write HTML stuff.

" +mail_request.subject = "Hello From My Nit Program" + +# Others +mail_request.verbose = false + +# Send mail +response = mail_request.execute +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +else if response isa CurlMailResponseSuccess then + print "Mail Sent" +else + print "Unknown Curl Response type" +end diff --git a/tests/examplefiles/draw_operation.nit b/tests/examplefiles/draw_operation.nit new file mode 100644 index 00000000..cada8318 --- /dev/null +++ b/tests/examplefiles/draw_operation.nit @@ -0,0 +1,243 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Draws an arithmetic operation to the terminal +module draw_operation + +redef enum Int + fun n_chars: Int `{ + int c; + if ( abs(recv) >= 10 ) + c = 1+(int)log10f( (float)abs(recv) ); + else + c = 1; + if ( recv < 0 ) c ++; + return c; + `} +end + +redef enum Char + fun as_operator(a, b: Int): Int + do + if self == '+' then return a + b + if self == '-' then return a - b + if self == '*' then return a * b + if self == '/' then return a / b + if self == '%' then return a % b + abort + end + + fun override_dispc: Bool + do + return self == '+' or self == '-' or self == '*' or self == '/' or self == '%' + end + + fun lines(s: Int): Array[Line] + do + if self == '+' then + return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)] + else if self == '-' then + return [new Line(new P(0,s/2),1,0,s)] + else if self == '*' then + var lines = new Array[Line] + for y in [1..s-1[ do + lines.add( new Line(new P(1,y), 1,0,s-2) ) + end + return lines + else if self == '/' then + return [new Line(new P(s-1,0), -1,1, s )] + else if self == '%' then + var q4 = s/4 + var lines = [new Line(new P(s-1,0),-1,1,s)] + for l in [0..q4[ do + lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ]) + end + return lines + else if self == '1' then + return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s), + new Line( new P(s/2,0),-1,1,s/2)] + else if self == '2' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '3' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)] + else if self == '4' then + return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '5' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '6' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '7' then + var tl = new P(0,0) + var tr = new P(s-1,0) + return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)] + else if self == '8' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '9' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '0' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)] + end + return new Array[Line] + end +end + +class P + var x : Int + var y : Int +end + +redef class String + # hack is to support a bug in the evaluation software + fun draw(dispc: Char, size, gap: Int, hack: Bool) + do + var w = size * length +(length-1)*gap + var h = size + var map = new Array[Array[Char]] + for x in [0..w[ do + map[x] = new Array[Char].filled_with( ' ', h ) + end + + var ci = 0 + for c in self.chars do + var local_dispc + if c.override_dispc then + local_dispc = c + else + local_dispc = dispc + end + + var lines = c.lines( size ) + for line in lines do + var x = line.o.x+ci*size + x += ci*gap + var y = line.o.y + for s in [0..line.len[ do + assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}" + map[x][y] = local_dispc + x += line.step_x + y += line.step_y + end + end + + ci += 1 + end + + if hack then + for c in [0..size[ do + map[c][0] = map[map.length-size+c][0] + map[map.length-size+c][0] = ' ' + end + end + + for y in [0..h[ do + for x in [0..w[ do + printn map[x][y] + end + print "" + end + end +end + +class Line + var o : P + var step_x : Int + var step_y : Int + var len : Int +end + +var a +var b +var op_char +var disp_char +var disp_size +var disp_gap + +if "NIT_TESTING".environ == "true" then + a = 567 + b = 13 + op_char = '*' + disp_char = 'O' + disp_size = 8 + disp_gap = 1 +else + printn "Left operand: " + a = gets.to_i + + printn "Right operand: " + b = gets.to_i + + printn "Operator (+, -, *, /, %): " + op_char = gets.chars[0] + + printn "Char to display: " + disp_char = gets.chars[0] + + printn "Size of text: " + disp_size = gets.to_i + + printn "Space between digits: " + disp_gap = gets.to_i +end + +var result = op_char.as_operator( a, b ) + +var len_a = a.n_chars +var len_b = b.n_chars +var len_res = result.n_chars +var max_len = len_a.max( len_b.max( len_res ) ) + 1 + +# draw first line +var d = max_len - len_a +var line_a = "" +for i in [0..d[ do line_a += " " +line_a += a.to_s +line_a.draw( disp_char, disp_size, disp_gap, false ) + +print "" +# draw second line +d = max_len - len_b-1 +var line_b = op_char.to_s +for i in [0..d[ do line_b += " " +line_b += b.to_s +line_b.draw( disp_char, disp_size, disp_gap, false ) + +# draw ----- +print "" +for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do + printn "_" +end +print "" +print "" + +# draw result +d = max_len - len_res +var line_res = "" +for i in [0..d[ do line_res += " " +line_res += result.to_s +line_res.draw( disp_char, disp_size, disp_gap, false ) diff --git a/tests/examplefiles/drop_privileges.nit b/tests/examplefiles/drop_privileges.nit new file mode 100644 index 00000000..932a87be --- /dev/null +++ b/tests/examplefiles/drop_privileges.nit @@ -0,0 +1,46 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Example using the privileges module to drop privileges from root +module drop_privileges + +import privileges + +# basic command line options +var opts = new OptionContext +var opt_ug = new OptionUserAndGroup.for_dropping_privileges +opt_ug.mandatory = true +opts.add_option(opt_ug) + +# parse and check command line options +opts.parse(args) +if not opts.errors.is_empty then + print opts.errors + print "Usage: drop_privileges [options]" + opts.usage + exit 1 +end + +# original user +print "before {sys.uid}:{sys.gid}" + +# make the switch +var user_group = opt_ug.value +assert user_group != null +user_group.drop_privileges + +# final user +print "after {sys.uid}:{sys.egid}" diff --git a/tests/examplefiles/extern_methods.nit b/tests/examplefiles/extern_methods.nit new file mode 100644 index 00000000..00c6b684 --- /dev/null +++ b/tests/examplefiles/extern_methods.nit @@ -0,0 +1,69 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module illustrates some uses of the FFI, specifically +# how to use extern methods. Which means to implement a Nit method in C. +module extern_methods + +redef enum Int + # Returns self'th fibonnaci number + # implemented here in C for optimization purposes + fun fib : Int import fib `{ + if ( recv < 2 ) + return recv; + else + return Int_fib( recv-1 ) + Int_fib( recv-2 ); + `} + + # System call to sleep for "self" seconds + fun sleep `{ + sleep( recv ); + `} + + # Return atan2l( self, x ) from libmath + fun atan_with( x : Int ) : Float `{ + return atan2( recv, x ); + `} + + # This method callback to Nit methods from C code + # It will use from C code: + # * the local fib method + # * the + operator, a method of Int + # * to_s, a method of all objects + # * String.to_cstring, a method of String to return an equivalent char* + fun foo import fib, +, to_s, String.to_cstring `{ + long recv_fib = Int_fib( recv ); + long recv_plus_fib = Int__plus( recv, recv_fib ); + + String nit_string = Int_to_s( recv_plus_fib ); + char *c_string = String_to_cstring( nit_string ); + + printf( "from C: self + fib(self) = %s\n", c_string ); + `} + + # Equivalent to foo but written in pure Nit + fun bar do print "from Nit: self + fib(self) = {self+self.fib}" +end + +print 12.fib + +print "sleeping 1 second..." +1.sleep + +print 100.atan_with( 200 ) +8.foo +8.bar + diff --git a/tests/examplefiles/fibonacci.nit b/tests/examplefiles/fibonacci.nit new file mode 100644 index 00000000..e1a72c9e --- /dev/null +++ b/tests/examplefiles/fibonacci.nit @@ -0,0 +1,43 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A simple exemple of refinement where a method is added to the integer class. +module fibonacci + +redef class Int + # Calculate the self-th element of the fibonacci sequence. + fun fibonacci: Int + do + if self < 2 then + return 1 + else + return (self-2).fibonacci + (self-1).fibonacci + end + end +end + +# Print usage and exit. +fun usage +do + print "Usage: fibonnaci " + exit 0 +end + +# Main part +if args.length != 1 then + usage +end +print args.first.to_i.fibonacci diff --git a/tests/examplefiles/hello_world.nit b/tests/examplefiles/hello_world.nit new file mode 100644 index 00000000..da6849ae --- /dev/null +++ b/tests/examplefiles/hello_world.nit @@ -0,0 +1 @@ +print "hello world" diff --git a/tests/examplefiles/html_page.nit b/tests/examplefiles/html_page.nit new file mode 100644 index 00000000..cf76665d --- /dev/null +++ b/tests/examplefiles/html_page.nit @@ -0,0 +1,105 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import html + +class NitHomepage + super HTMLPage + + redef fun head do + add("meta").attr("charset", "utf-8") + add("title").text("Nit") + add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css") + end + + redef fun body do + open("article").add_class("page") + open("section").add_class("pageheader") + add_html("theNitProgramming Language") + open("header").add_class("header") + open("div").add_class("topsubtitle") + add("p").text("A Fun Language for Serious Programming") + close("div") + close("header") + close("section") + + open("div").attr("id", "pagebody") + open("section").attr("id", "content") + add("h1").text("# What is Nit?") + add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.") + add("p").text("So, what does the famous hello world program look like, in Nit?") + add_html("
print 'Hello, World!'
") + + add("h1").text("# Feature Highlights") + add("h2").text("Usability") + add("p").text("Nit's goal is to be usable by real programmers for real projects") + + open("ul") + open("li") + add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle") + close("li") + add("li").text("Script-like language without verbosity nor cryptic statements") + add("li").text("Painless static types: static typing should help programmers") + add("li").text("Efficient development, efficient execution, efficient evolution.") + close("ul") + + add("h2").text("Robustness") + add("p").text("Nit will help you to write bug-free programs") + + open("ul") + add("li").text("Strong static typing") + add("li").text("No more NullPointerException") + close("ul") + + add("h2").text("Object-Oriented") + add("p").text("Nit's guideline is to follow the most powerful OO principles") + + open("ul") + open("li") + add("a").attr("href", "./everything_is_an_object/").text("Everything is an object") + close("li") + open("li") + add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance") + close("li") + open("li") + add("a").attr("href", "./refinement/").text("Open classes") + close("li") + open("li") + add("a").attr("href", "./virtual_types/").text("Virtual types") + close("li") + close("ul") + + + add("h1").text("# Getting Started") + add("p").text("Get Nit from its Git repository:") + + add_html("
$ git clone http://nitlanguage.org/nit.git
") + add("p").text("Build the compiler (may be long):") + add_html("
$ cd nit\n")
+					add_html("$ make
") + add("p").text("Compile a program:") + add_html("
$ bin/nitc examples/hello_world.nit
") + add("p").text("Execute the program:") + add_html("
$ ./hello_world
") + close("section") + close("div") + close("article") + end +end + +var page = new NitHomepage +page.write_to stdout +page.write_to_file("nit.html") diff --git a/tests/examplefiles/int_stack.nit b/tests/examplefiles/int_stack.nit new file mode 100644 index 00000000..1109bbbc --- /dev/null +++ b/tests/examplefiles/int_stack.nit @@ -0,0 +1,100 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# An example that defines and uses stacks of integers. +# The implementation is done with a simple linked list. +# It features: free constructors, nullable types and some adaptive typing. +module int_stack + +# A stack of integer implemented by a simple linked list. +# Note that this is only a toy class since a real linked list will gain to use +# generics and extends interfaces, like Collection, from the standard library. +class IntStack + # The head node of the list. + # Null means that the stack is empty. + private var head: nullable ISNode = null + + # Add a new integer in the stack. + fun push(val: Int) + do + self.head = new ISNode(val, self.head) + end + + # Remove and return the last pushed integer. + # Return null if the stack is empty. + fun pop: nullable Int + do + var head = self.head + if head == null then return null + # Note: the followings are statically safe because of the + # previous 'if'. + var val = head.val + self.head = head.next + return val + end + + # Return the sum of all integers of the stack. + # Return 0 if the stack is empty. + fun sumall: Int + do + var sum = 0 + var cur = self.head + while cur != null do + # Note: the followings are statically safe because of + # the condition of the 'while'. + sum += cur.val + cur = cur.next + end + return sum + end + + # Note: Because all attributes have a default value, a free constructor + # "init()" is implicitly defined. +end + +# A node of a IntStack +private class ISNode + # The integer value stored in the node. + var val: Int + + # The next node, if any. + var next: nullable ISNode + + # Note: A free constructor "init(val: Int, next: nullable ISNode)" is + # implicitly defined. +end + +var l = new IntStack +l.push(1) +l.push(2) +l.push(3) + +print l.sumall + +# Note: the 'for' control structure cannot be used on IntStack in its current state. +# It requires a more advanced topic. +# However, why not using the 'loop' control structure? +loop + var i = l.pop + if i == null then break + # The following is statically safe because of the previous 'if'. + print i * 10 +end + +# Note: 'or else' is used to give an alternative of a null expression. +l.push(5) +print l.pop or else 0 # l.pop gives 5, so print 5 +print l.pop or else 0 # l.pop gives null, so print the alternative: 0 + + diff --git a/tests/examplefiles/opengles2_hello_triangle.nit b/tests/examplefiles/opengles2_hello_triangle.nit new file mode 100644 index 00000000..2b39b1ba --- /dev/null +++ b/tests/examplefiles/opengles2_hello_triangle.nit @@ -0,0 +1,193 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide. +# +# Code reference: +# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c +module opengles2_hello_triangle + +import glesv2 +import egl +import mnit_linux # for sdl +import x11 + +if "NIT_TESTING".environ == "true" then exit(0) + +var window_width = 800 +var window_height = 600 + +# +## SDL +# +var sdl_display = new SDLDisplay(window_width, window_height) +var sdl_wm_info = new SDLSystemWindowManagerInfo +var x11_window_handle = sdl_wm_info.x11_window_handle + +# +## X11 +# +var x_display = x_open_default_display +assert x_display != 0 else print "x11 fail" + +# +## EGL +# +var egl_display = new EGLDisplay(x_display) +assert egl_display.is_valid else print "EGL display is not valid" +egl_display.initialize + +print "EGL version: {egl_display.version}" +print "EGL vendor: {egl_display.vendor}" +print "EGL extensions: {egl_display.extensions.join(", ")}" +print "EGL client APIs: {egl_display.client_apis.join(", ")}" + +assert egl_display.is_valid else print egl_display.error + +var config_chooser = new EGLConfigChooser +#config_chooser.surface_type_egl +config_chooser.blue_size = 8 +config_chooser.green_size = 8 +config_chooser.red_size = 8 +#config_chooser.alpha_size = 8 +#config_chooser.depth_size = 8 +#config_chooser.stencil_size = 8 +#config_chooser.sample_buffers = 1 +config_chooser.close + +var configs = config_chooser.choose(egl_display) +assert configs != null else print "choosing config failed: {egl_display.error}" +assert not configs.is_empty else print "no EGL config" + +print "{configs.length} EGL configs available" +for config in configs do + var attribs = config.attribs(egl_display) + print "* caveats: {attribs.caveat}" + print " conformant to: {attribs.conformant}" + print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}" + print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}" +end + +var config = configs.first + +var format = config.attribs(egl_display).native_visual_id + +# TODO android part +# Opengles1Display_midway_init(recv, format); + +var surface = egl_display.create_window_surface(config, x11_window_handle, [0]) +assert surface.is_ok else print egl_display.error + +var context = egl_display.create_context(config) +assert context.is_ok else print egl_display.error + +var make_current_res = egl_display.make_current(surface, surface, context) +assert make_current_res + +var width = surface.attribs(egl_display).width +var height = surface.attribs(egl_display).height +print "Width: {width}" +print "Height: {height}" + +assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}" + +# +## GLESv2 +# + +print "Can compile shaders? {gl_shader_compiler}" +assert_no_gl_error + +assert gl_shader_compiler else print "Cannot compile shaders" + +# gl program +print gl_error.to_s +var program = new GLProgram +if not program.is_ok then + print "Program is not ok: {gl_error.to_s}\nLog:" + print program.info_log + abort +end +assert_no_gl_error + +# vertex shader +var vertex_shader = new GLVertexShader +assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}" +vertex_shader.source = """ +attribute vec4 vPosition; +void main() +{ + gl_Position = vPosition; +} """ +vertex_shader.compile +assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}" +assert_no_gl_error + +# fragment shader +var fragment_shader = new GLFragmentShader +assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}" +fragment_shader.source = """ +precision mediump float; +void main() +{ + gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); +} +""" +fragment_shader.compile +assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}" +assert_no_gl_error + +program.attach_shader vertex_shader +program.attach_shader fragment_shader +program.bind_attrib_location(0, "vPosition") +program.link +assert program.is_linked else print "Linking failed: {program.info_log}" +assert_no_gl_error + +# draw! +var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0] +var vertex_array = new VertexArray(0, 3, vertices) +vertex_array.attrib_pointer +gl_clear_color(0.5, 0.0, 0.5, 1.0) +for i in [0..10000[ do + printn "." + assert_no_gl_error + gl_viewport(0, 0, width, height) + gl_clear_color_buffer + program.use + vertex_array.enable + vertex_array.draw_arrays_triangles + egl_display.swap_buffers(surface) +end + +# delete +program.delete +vertex_shader.delete +fragment_shader.delete + +# +## EGL +# +# close +egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none) +egl_display.destroy_context(context) +egl_display.destroy_surface(surface) + +# +## SDL +# +# close +sdl_display.destroy diff --git a/tests/examplefiles/print_arguments.nit b/tests/examplefiles/print_arguments.nit new file mode 100644 index 00000000..3bdddc62 --- /dev/null +++ b/tests/examplefiles/print_arguments.nit @@ -0,0 +1,22 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# How to print arguments of the command line. +module print_arguments + +for a in args do + print a +end diff --git a/tests/examplefiles/procedural_array.nit b/tests/examplefiles/procedural_array.nit new file mode 100644 index 00000000..838bda02 --- /dev/null +++ b/tests/examplefiles/procedural_array.nit @@ -0,0 +1,48 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A procedural program (without explicit class definition). +# This program manipulates arrays of integers. +module procedural_array + +# The sum of the elements of `a'. +# Uses a 'for' control structure. +fun array_sum(a: Array[Int]): Int +do + var sum = 0 + for i in a do + sum = sum + i + end + return sum +end + +# The sum of the elements of `a' (alternative version). +# Uses a 'while' control structure. +fun array_sum_alt(a: Array[Int]): Int +do + var sum = 0 + var i = 0 + while i < a.length do + sum = sum + a[i] + i = i + 1 + end + return sum +end + +# The main part of the program. +var a = [10, 5, 8, 9] +print(array_sum(a)) +print(array_sum_alt(a)) diff --git a/tests/examplefiles/socket_client.nit b/tests/examplefiles/socket_client.nit new file mode 100644 index 00000000..0ba19132 --- /dev/null +++ b/tests/examplefiles/socket_client.nit @@ -0,0 +1,38 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Client sample using the Socket module which connect to the server sample. +module socket_client + +import socket + +if args.length < 2 then + print "Usage : socket_client " + return +end + +var s = new Socket.client(args[0], args[1].to_i) +print "[HOST ADDRESS] : {s.address}" +print "[HOST] : {s.host}" +print "[PORT] : {s.port}" +print "Connecting ... {s.connected}" +if s.connected then + print "Writing ... Hello server !" + s.write("Hello server !") + print "[Response from server] : {s.read(100)}" + print "Closing ..." + s.close +end diff --git a/tests/examplefiles/socket_server.nit b/tests/examplefiles/socket_server.nit new file mode 100644 index 00000000..aa77a759 --- /dev/null +++ b/tests/examplefiles/socket_server.nit @@ -0,0 +1,52 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Server sample using the Socket module which allow client to connect +module socket_server + +import socket + +if args.is_empty then + print "Usage : socket_server " + return +end + +var socket = new Socket.server(args[0].to_i, 1) +print "[PORT] : {socket.port.to_s}" + +var clients = new Array[Socket] +var max = socket +loop + var fs = new SocketObserver(true, true, true) + fs.readset.set(socket) + + for c in clients do fs.readset.set(c) + + if fs.select(max, 4, 0) == 0 then + print "Error occured in select {sys.errno.strerror}" + break + end + + if fs.readset.is_set(socket) then + var ns = socket.accept + print "Accepting {ns.address} ... " + print "[Message from {ns.address}] : {ns.read(100)}" + ns.write("Goodbye client.") + print "Closing {ns.address} ..." + ns.close + end +end + diff --git a/tests/examplefiles/tmpl_composer.nit b/tests/examplefiles/tmpl_composer.nit new file mode 100644 index 00000000..6160b1a8 --- /dev/null +++ b/tests/examplefiles/tmpl_composer.nit @@ -0,0 +1,94 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import template + +### Here, definition of the specific templates + +# The root template for composers +class TmplComposers + super Template + + # Short list of composers + var composers = new Array[TmplComposer] + + # Detailled list of composers + var composer_details = new Array[TmplComposerDetail] + + # Add a composer in both lists + fun add_composer(firstname, lastname: String, birth, death: Int) + do + composers.add(new TmplComposer(lastname)) + composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death)) + end + + redef fun rendering do + add """ +COMPOSERS +========= +""" + add_all composers + add """ + +DETAILS +======= +""" + add_all composer_details + end +end + +# A composer in the short list of composers +class TmplComposer + super Template + + # Short name + var name: String + + init(name: String) do self.name = name + + redef fun rendering do add "- {name}\n" +end + +# A composer in the detailled list of composers +class TmplComposerDetail + super Template + + var firstname: String + var lastname: String + var birth: Int + var death: Int + + init(firstname, lastname: String, birth, death: Int) do + self.firstname = firstname + self.lastname = lastname + self.birth = birth + self.death = death + end + + redef fun rendering do add """ + +COMPOSER: {{{firstname}}} {{{lastname}}} +BIRTH...: {{{birth}}} +DEATH...: {{{death}}} +""" + +end + +### Here a simple usage of the templates + +var f = new TmplComposers +f.add_composer("Johann Sebastian", "Bach", 1685, 1750) +f.add_composer("George Frideric", "Handel", 1685, 1759) +f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791) +f.write_to(stdout) diff --git a/tests/examplefiles/websocket_server.nit b/tests/examplefiles/websocket_server.nit new file mode 100644 index 00000000..38029c37 --- /dev/null +++ b/tests/examplefiles/websocket_server.nit @@ -0,0 +1,46 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Lucas Bajolet +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample module for a minimal chat server using Websockets on port 8088 +module websocket_server + +import websocket + +var sock = new WebSocket(8088, 1) + +var msg: String + +if sock.listener.eof then + print sys.errno.strerror +end + +sock.accept + +while not sock.listener.eof do + if not sock.connected then sock.accept + if sys.stdin.poll_in then + msg = gets + printn "Received message : {msg}" + if msg == "exit" then sock.close + if msg == "disconnect" then sock.disconnect_client + sock.write(msg) + end + if sock.can_read(10) then + msg = sock.read_line + if msg != "" then print msg + end +end + -- cgit v1.2.1 From 318f724a204eacd8b3fef8300e23468c71dc5ed7 Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Fri, 27 Jun 2014 02:40:40 +0300 Subject: Fix regular expressions for identifiers in ElixirLexer --- pygments/lexers/functional.py | 5 +++-- tests/examplefiles/example_elixir.ex | 7 ++++++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index a22c4f55..dd823fe3 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3290,8 +3290,9 @@ class ElixirLexer(RegexLexer): op1_re = "|".join(re.escape(s) for s in OPERATORS1) ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) - name_re = r'[a-z_][a-zA-Z_0-9]*[!\?]?' - modname_re = r'[A-Z][A-Za-z_]*(?:\.[A-Z][A-Za-z_]*)*' + alnum = '[A-Za-z_0-9]' + name_re = r'[a-z_]%s*[!\?]?' % alnum + modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})' diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 0912d099..09870443 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -82,6 +82,11 @@ end # String interpolation "String #{inspect "interpolation"} is quite #{1+4+7} difficult" +# Identifiers +abc_123 = 1 +_018OP = 2 +A__0 = 3 + # Modules defmodule Long.Module.Name do @moduledoc "Simple module docstring" @@ -182,7 +187,7 @@ end # Lexical scope modifiers import Kernel, except: [spawn: 1, +: 2, /: 2, Unless: 2] -alias Long.Module.Name, as: Namen +alias Long.Module.Name, as: N0men123_and4 use Bitwise 4 &&& 5 -- cgit v1.2.1 From 3e7a48d7395dd6f2cbac9340db1c5ca6f38d0241 Mon Sep 17 00:00:00 2001 From: "Jeffrey B. Arnold" Date: Mon, 30 Jun 2014 22:44:14 -0400 Subject: updated StanLexer to Stan version 2.3.0 --- pygments/lexers/_stan_builtins.py | 904 ++++++++++++++++++++------------------ pygments/lexers/math.py | 6 +- 2 files changed, 468 insertions(+), 442 deletions(-) diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index 583b44a8..212d5be2 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -4,451 +4,477 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This file contains the names of functions for Stan used by - ``pygments.lexers.math.StanLexer``. These builtins are from Stan language v2.2.0. + ``pygments.lexers.math.StanLexer. This is for Stan language version 2.2.0. - :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :copyright: Copyright 2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -KEYWORDS = ['else', 'for', 'if', 'in', 'lp__', 'print', 'while'] +KEYWORDS = [ u'else', + u'for', + u'if', + u'in', + u'lp__', + u'print', + u'return', + u'void', + u'while'] -TYPES = [ 'cholesky_factor_cov', - 'corr_matrix', - 'cov_matrix', - 'int', - 'matrix', - 'ordered', - 'positive_ordered', - 'real', - 'row_vector', - 'simplex', - 'unit_vector', - 'vector'] +TYPES = [ u'cholesky_factor_cov', + u'corr_matrix', + u'cov_matrix', + u'int', + u'matrix', + u'ordered', + u'positive_ordered', + u'real', + u'row_vector', + u'simplex', + u'unit_vector', + u'vector'] -FUNCTIONS = [ 'Phi', - 'Phi_approx', - 'abs', - 'acos', - 'acosh', - 'asin', - 'asinh', - 'atan', - 'atan2', - 'atanh', - 'bernoulli_ccdf_log', - 'bernoulli_cdf', - 'bernoulli_cdf_log', - 'bernoulli_log', - 'bernoulli_logit_log', - 'bernoulli_rng', - 'bessel_first_kind', - 'bessel_second_kind', - 'beta_binomial_ccdf_log', - 'beta_binomial_cdf', - 'beta_binomial_cdf_log', - 'beta_binomial_log', - 'beta_binomial_rng', - 'beta_ccdf_log', - 'beta_cdf', - 'beta_cdf_log', - 'beta_log', - 'beta_rng', - 'binary_log_loss', - 'binomial_ccdf_log', - 'binomial_cdf', - 'binomial_cdf_log', - 'binomial_coefficient_log', - 'binomial_log', - 'binomial_logit_log', - 'binomial_rng', - 'block', - 'categorical_log', - 'categorical_logit_log', - 'categorical_rng', - 'cauchy_ccdf_log', - 'cauchy_cdf', - 'cauchy_cdf_log', - 'cauchy_log', - 'cauchy_rng', - 'cbrt', - 'ceil', - 'chi_square_ccdf_log', - 'chi_square_cdf', - 'chi_square_cdf_log', - 'chi_square_log', - 'chi_square_rng', - 'cholesky_decompose', - 'col', - 'cols', - 'columns_dot_product', - 'columns_dot_self', - 'cos', - 'cosh', - 'crossprod', - 'cumulative_sum', - 'determinant', - 'diag_matrix', - 'diag_post_multiply', - 'diag_pre_multiply', - 'diagonal', - 'digamma', - 'dims', - 'dirichlet_log', - 'dirichlet_rng', - 'distance', - 'dot_product', - 'dot_self', - 'double_exponential_ccdf_log', - 'double_exponential_cdf', - 'double_exponential_cdf_log', - 'double_exponential_log', - 'double_exponential_rng', - 'e', - 'eigenvalues_sym', - 'eigenvectors_sym', - 'erf', - 'erfc', - 'exp', - 'exp2', - 'exp_mod_normal_ccdf_log', - 'exp_mod_normal_cdf', - 'exp_mod_normal_cdf_log', - 'exp_mod_normal_log', - 'exp_mod_normal_rng', - 'expm1', - 'exponential_ccdf_log', - 'exponential_cdf', - 'exponential_cdf_log', - 'exponential_log', - 'exponential_rng', - 'fabs', - 'falling_factorial', - 'fdim', - 'floor', - 'fma', - 'fmax', - 'fmin', - 'fmod', - 'gamma_ccdf_log', - 'gamma_cdf', - 'gamma_cdf_log', - 'gamma_log', - 'gamma_p', - 'gamma_q', - 'gamma_rng', - 'gaussian_dlm_obs_log', - 'gumbel_ccdf_log', - 'gumbel_cdf', - 'gumbel_cdf_log', - 'gumbel_log', - 'gumbel_rng', - 'head', - 'hypergeometric_log', - 'hypergeometric_rng', - 'hypot', - 'if_else', - 'increment_log_prob', - 'int_step', - 'inv', - 'inv_chi_square_ccdf_log', - 'inv_chi_square_cdf', - 'inv_chi_square_cdf_log', - 'inv_chi_square_log', - 'inv_chi_square_rng', - 'inv_cloglog', - 'inv_gamma_ccdf_log', - 'inv_gamma_cdf', - 'inv_gamma_cdf_log', - 'inv_gamma_log', - 'inv_gamma_rng', - 'inv_logit', - 'inv_sqrt', - 'inv_square', - 'inv_wishart_log', - 'inv_wishart_rng', - 'inverse', - 'inverse_spd', - 'lbeta', - 'lgamma', - 'lkj_corr_log', - 'lkj_corr_rng', - 'lmgamma', - 'log', - 'log10', - 'log1m', - 'log1m_exp', - 'log1m_inv_logit', - 'log1p', - 'log1p_exp', - 'log2', - 'log_determinant', - 'log_diff_exp', - 'log_falling_factorial', - 'log_inv_logit', - 'log_rising_factorial', - 'log_softmax', - 'log_sum_exp', - 'logistic_ccdf_log', - 'logistic_cdf', - 'logistic_cdf_log', - 'logistic_log', - 'logistic_rng', - 'logit', - 'lognormal_ccdf_log', - 'lognormal_cdf', - 'lognormal_cdf_log', - 'lognormal_log', - 'lognormal_rng', - 'machine_precision', - 'max', - 'mdivide_left_tri_low', - 'mdivide_right_tri_low', - 'mean', - 'min', - 'modified_bessel_first_kind', - 'modified_bessel_second_kind', - 'multi_normal_cholesky_log', - 'multi_normal_log', - 'multi_normal_prec_log', - 'multi_normal_rng', - 'multi_student_t_log', - 'multi_student_t_rng', - 'multinomial_log', - 'multinomial_rng', - 'multiply_log', - 'multiply_lower_tri_self_transpose', - 'neg_binomial_ccdf_log', - 'neg_binomial_cdf', - 'neg_binomial_cdf_log', - 'neg_binomial_log', - 'neg_binomial_rng', - 'negative_infinity', - 'normal_ccdf_log', - 'normal_cdf', - 'normal_cdf_log', - 'normal_log', - 'normal_rng', - 'not_a_number', - 'ordered_logistic_log', - 'ordered_logistic_rng', - 'owens_t', - 'pareto_ccdf_log', - 'pareto_cdf', - 'pareto_cdf_log', - 'pareto_log', - 'pareto_rng', - 'pi', - 'poisson_ccdf_log', - 'poisson_cdf', - 'poisson_cdf_log', - 'poisson_log', - 'poisson_log_log', - 'poisson_rng', - 'positive_infinity', - 'pow', - 'prod', - 'quad_form', - 'rank', - 'rayleigh_ccdf_log', - 'rayleigh_cdf', - 'rayleigh_cdf_log', - 'rayleigh_log', - 'rayleigh_rng', - 'rep_array', - 'rep_matrix', - 'rep_row_vector', - 'rep_vector', - 'rising_factorial', - 'round', - 'row', - 'rows', - 'rows_dot_product', - 'rows_dot_self', - 'scaled_inv_chi_square_ccdf_log', - 'scaled_inv_chi_square_cdf', - 'scaled_inv_chi_square_cdf_log', - 'scaled_inv_chi_square_log', - 'scaled_inv_chi_square_rng', - 'sd', - 'segment', - 'sin', - 'singular_values', - 'sinh', - 'size', - 'skew_normal_ccdf_log', - 'skew_normal_cdf', - 'skew_normal_cdf_log', - 'skew_normal_log', - 'skew_normal_rng', - 'softmax', - 'sort_asc', - 'sort_desc', - 'sqrt', - 'sqrt2', - 'square', - 'squared_distance', - 'step', - 'student_t_ccdf_log', - 'student_t_cdf', - 'student_t_cdf_log', - 'student_t_log', - 'student_t_rng', - 'sub_col', - 'sub_row', - 'sum', - 'tail', - 'tan', - 'tanh', - 'tcrossprod', - 'tgamma', - 'to_vector', - 'trace', - 'trace_gen_quad_form', - 'trace_quad_form', - 'trigamma', - 'trunc', - 'uniform_ccdf_log', - 'uniform_cdf', - 'uniform_cdf_log', - 'uniform_log', - 'uniform_rng', - 'variance', - 'von_mises_log', - 'weibull_ccdf_log', - 'weibull_cdf', - 'weibull_cdf_log', - 'weibull_log', - 'weibull_rng', - 'wishart_log', - 'wishart_rng'] +FUNCTIONS = [ u'Phi', + u'Phi_approx', + u'abs', + u'acos', + u'acosh', + u'asin', + u'asinh', + u'atan', + u'atan2', + u'atanh', + u'bernoulli_ccdf_log', + u'bernoulli_cdf', + u'bernoulli_cdf_log', + u'bernoulli_log', + u'bernoulli_logit_log', + u'bernoulli_rng', + u'bessel_first_kind', + u'bessel_second_kind', + u'beta_binomial_ccdf_log', + u'beta_binomial_cdf', + u'beta_binomial_cdf_log', + u'beta_binomial_log', + u'beta_binomial_rng', + u'beta_ccdf_log', + u'beta_cdf', + u'beta_cdf_log', + u'beta_log', + u'beta_rng', + u'binary_log_loss', + u'binomial_ccdf_log', + u'binomial_cdf', + u'binomial_cdf_log', + u'binomial_coefficient_log', + u'binomial_log', + u'binomial_logit_log', + u'binomial_rng', + u'block', + u'categorical_log', + u'categorical_logit_log', + u'categorical_rng', + u'cauchy_ccdf_log', + u'cauchy_cdf', + u'cauchy_cdf_log', + u'cauchy_log', + u'cauchy_rng', + u'cbrt', + u'ceil', + u'chi_square_ccdf_log', + u'chi_square_cdf', + u'chi_square_cdf_log', + u'chi_square_log', + u'chi_square_rng', + u'cholesky_decompose', + u'col', + u'cols', + u'columns_dot_product', + u'columns_dot_self', + u'cos', + u'cosh', + u'crossprod', + u'cumulative_sum', + u'determinant', + u'diag_matrix', + u'diag_post_multiply', + u'diag_pre_multiply', + u'diagonal', + u'digamma', + u'dims', + u'dirichlet_log', + u'dirichlet_rng', + u'distance', + u'dot_product', + u'dot_self', + u'double_exponential_ccdf_log', + u'double_exponential_cdf', + u'double_exponential_cdf_log', + u'double_exponential_log', + u'double_exponential_rng', + u'e', + u'eigenvalues_sym', + u'eigenvectors_sym', + u'erf', + u'erfc', + u'exp', + u'exp2', + u'exp_mod_normal_ccdf_log', + u'exp_mod_normal_cdf', + u'exp_mod_normal_cdf_log', + u'exp_mod_normal_log', + u'exp_mod_normal_rng', + u'expm1', + u'exponential_ccdf_log', + u'exponential_cdf', + u'exponential_cdf_log', + u'exponential_log', + u'exponential_rng', + u'fabs', + u'falling_factorial', + u'fdim', + u'floor', + u'fma', + u'fmax', + u'fmin', + u'fmod', + u'gamma_ccdf_log', + u'gamma_cdf', + u'gamma_cdf_log', + u'gamma_log', + u'gamma_p', + u'gamma_q', + u'gamma_rng', + u'gaussian_dlm_obs_log', + u'gumbel_ccdf_log', + u'gumbel_cdf', + u'gumbel_cdf_log', + u'gumbel_log', + u'gumbel_rng', + u'head', + u'hypergeometric_log', + u'hypergeometric_rng', + u'hypot', + u'if_else', + u'increment_log_prob', + u'int_step', + u'inv', + u'inv_chi_square_ccdf_log', + u'inv_chi_square_cdf', + u'inv_chi_square_cdf_log', + u'inv_chi_square_log', + u'inv_chi_square_rng', + u'inv_cloglog', + u'inv_gamma_ccdf_log', + u'inv_gamma_cdf', + u'inv_gamma_cdf_log', + u'inv_gamma_log', + u'inv_gamma_rng', + u'inv_logit', + u'inv_sqrt', + u'inv_square', + u'inv_wishart_log', + u'inv_wishart_rng', + u'inverse', + u'inverse_spd', + u'lbeta', + u'lgamma', + u'lkj_corr_log', + u'lkj_corr_rng', + u'lmgamma', + u'log', + u'log10', + u'log1m', + u'log1m_exp', + u'log1m_inv_logit', + u'log1p', + u'log1p_exp', + u'log2', + u'log_determinant', + u'log_diff_exp', + u'log_falling_factorial', + u'log_inv_logit', + u'log_rising_factorial', + u'log_softmax', + u'log_sum_exp', + u'logistic_ccdf_log', + u'logistic_cdf', + u'logistic_cdf_log', + u'logistic_log', + u'logistic_rng', + u'logit', + u'lognormal_ccdf_log', + u'lognormal_cdf', + u'lognormal_cdf_log', + u'lognormal_log', + u'lognormal_rng', + u'machine_precision', + u'max', + u'mdivide_left_tri_low', + u'mdivide_right_tri_low', + u'mean', + u'min', + u'modified_bessel_first_kind', + u'modified_bessel_second_kind', + u'multi_gp_log', + u'multi_normal_cholesky_log', + u'multi_normal_log', + u'multi_normal_prec_log', + u'multi_normal_rng', + u'multi_student_t_log', + u'multi_student_t_rng', + u'multinomial_log', + u'multinomial_rng', + u'multiply_log', + u'multiply_lower_tri_self_transpose', + u'neg_binomial_2_log', + u'neg_binomial_2_log_log', + u'neg_binomial_2_log_rng', + u'neg_binomial_2_rng', + u'neg_binomial_ccdf_log', + u'neg_binomial_cdf', + u'neg_binomial_cdf_log', + u'neg_binomial_log', + u'neg_binomial_rng', + u'negative_infinity', + u'normal_ccdf_log', + u'normal_cdf', + u'normal_cdf_log', + u'normal_log', + u'normal_rng', + u'not_a_number', + u'ordered_logistic_log', + u'ordered_logistic_rng', + u'owens_t', + u'pareto_ccdf_log', + u'pareto_cdf', + u'pareto_cdf_log', + u'pareto_log', + u'pareto_rng', + u'pi', + u'poisson_ccdf_log', + u'poisson_cdf', + u'poisson_cdf_log', + u'poisson_log', + u'poisson_log_log', + u'poisson_rng', + u'positive_infinity', + u'pow', + u'prod', + u'qr_Q', + u'qr_R', + u'quad_form', + u'quad_form_diag', + u'quad_form_sym', + u'rank', + u'rayleigh_ccdf_log', + u'rayleigh_cdf', + u'rayleigh_cdf_log', + u'rayleigh_log', + u'rayleigh_rng', + u'rep_array', + u'rep_matrix', + u'rep_row_vector', + u'rep_vector', + u'rising_factorial', + u'round', + u'row', + u'rows', + u'rows_dot_product', + u'rows_dot_self', + u'scaled_inv_chi_square_ccdf_log', + u'scaled_inv_chi_square_cdf', + u'scaled_inv_chi_square_cdf_log', + u'scaled_inv_chi_square_log', + u'scaled_inv_chi_square_rng', + u'sd', + u'segment', + u'sin', + u'singular_values', + u'sinh', + u'size', + u'skew_normal_ccdf_log', + u'skew_normal_cdf', + u'skew_normal_cdf_log', + u'skew_normal_log', + u'skew_normal_rng', + u'softmax', + u'sort_asc', + u'sort_desc', + u'sort_indices_asc', + u'sort_indices_desc', + u'sqrt', + u'sqrt2', + u'square', + u'squared_distance', + u'step', + u'student_t_ccdf_log', + u'student_t_cdf', + u'student_t_cdf_log', + u'student_t_log', + u'student_t_rng', + u'sub_col', + u'sub_row', + u'sum', + u'tail', + u'tan', + u'tanh', + u'tcrossprod', + u'tgamma', + u'to_array_1d', + u'to_array_2d', + u'to_matrix', + u'to_row_vector', + u'to_vector', + u'trace', + u'trace_gen_quad_form', + u'trace_quad_form', + u'trigamma', + u'trunc', + u'uniform_ccdf_log', + u'uniform_cdf', + u'uniform_cdf_log', + u'uniform_log', + u'uniform_rng', + u'variance', + u'von_mises_log', + u'weibull_ccdf_log', + u'weibull_cdf', + u'weibull_cdf_log', + u'weibull_log', + u'weibull_rng', + u'wishart_log', + u'wishart_rng'] -DISTRIBUTIONS = [ 'bernoulli', - 'bernoulli_logit', - 'beta', - 'beta_binomial', - 'binomial', - 'binomial_logit', - 'categorical', - 'categorical_logit', - 'cauchy', - 'chi_square', - 'dirichlet', - 'double_exponential', - 'exp_mod_normal', - 'exponential', - 'gamma', - 'gaussian_dlm_obs', - 'gumbel', - 'hypergeometric', - 'inv_chi_square', - 'inv_gamma', - 'inv_wishart', - 'lkj_corr', - 'logistic', - 'lognormal', - 'multi_normal', - 'multi_normal_cholesky', - 'multi_normal_prec', - 'multi_student_t', - 'multinomial', - 'neg_binomial', - 'normal', - 'ordered_logistic', - 'pareto', - 'poisson', - 'poisson_log', - 'rayleigh', - 'scaled_inv_chi_square', - 'skew_normal', - 'student_t', - 'uniform', - 'von_mises', - 'weibull', - 'wishart'] +DISTRIBUTIONS = [ u'bernoulli', + u'bernoulli_logit', + u'beta', + u'beta_binomial', + u'binomial', + u'binomial_logit', + u'categorical', + u'categorical_logit', + u'cauchy', + u'chi_square', + u'dirichlet', + u'double_exponential', + u'exp_mod_normal', + u'exponential', + u'gamma', + u'gaussian_dlm_obs', + u'gumbel', + u'hypergeometric', + u'inv_chi_square', + u'inv_gamma', + u'inv_wishart', + u'lkj_corr', + u'logistic', + u'lognormal', + u'multi_gp', + u'multi_normal', + u'multi_normal_cholesky', + u'multi_normal_prec', + u'multi_student_t', + u'multinomial', + u'neg_binomial', + u'neg_binomial_2', + u'neg_binomial_2_log', + u'normal', + u'ordered_logistic', + u'pareto', + u'poisson', + u'poisson_log', + u'rayleigh', + u'scaled_inv_chi_square', + u'skew_normal', + u'student_t', + u'uniform', + u'von_mises', + u'weibull', + u'wishart'] -RESERVED = [ 'alignas', - 'alignof', - 'and', - 'and_eq', - 'asm', - 'auto', - 'bitand', - 'bitor', - 'bool', - 'break', - 'case', - 'catch', - 'char', - 'char16_t', - 'char32_t', - 'class', - 'compl', - 'const', - 'const_cast', - 'constexpr', - 'continue', - 'decltype', - 'default', - 'delete', - 'do', - 'double', - 'dynamic_cast', - 'enum', - 'explicit', - 'export', - 'extern', - 'false', - 'false', - 'float', - 'friend', - 'goto', - 'inline', - 'int', - 'long', - 'mutable', - 'namespace', - 'new', - 'noexcept', - 'not', - 'not_eq', - 'nullptr', - 'operator', - 'or', - 'or_eq', - 'private', - 'protected', - 'public', - 'register', - 'reinterpret_cast', - 'repeat', - 'return', - 'short', - 'signed', - 'sizeof', - 'static', - 'static_assert', - 'static_cast', - 'struct', - 'switch', - 'template', - 'then', - 'this', - 'thread_local', - 'throw', - 'true', - 'true', - 'try', - 'typedef', - 'typeid', - 'typename', - 'union', - 'unsigned', - 'until', - 'using', - 'virtual', - 'void', - 'volatile', - 'wchar_t', - 'xor', - 'xor_eq'] +RESERVED = [ u'alignas', + u'alignof', + u'and', + u'and_eq', + u'asm', + u'auto', + u'bitand', + u'bitor', + u'bool', + u'break', + u'case', + u'catch', + u'char', + u'char16_t', + u'char32_t', + u'class', + u'compl', + u'const', + u'const_cast', + u'constexpr', + u'continue', + u'decltype', + u'default', + u'delete', + u'do', + u'double', + u'dynamic_cast', + u'enum', + u'explicit', + u'export', + u'extern', + u'false', + u'false', + u'float', + u'friend', + u'fvar', + u'goto', + u'inline', + u'int', + u'long', + u'mutable', + u'namespace', + u'new', + u'noexcept', + u'not', + u'not_eq', + u'nullptr', + u'operator', + u'or', + u'or_eq', + u'private', + u'protected', + u'public', + u'register', + u'reinterpret_cast', + u'repeat', + u'short', + u'signed', + u'sizeof', + u'static', + u'static_assert', + u'static_cast', + u'struct', + u'switch', + u'template', + u'then', + u'this', + u'thread_local', + u'throw', + u'true', + u'true', + u'try', + u'typedef', + u'typeid', + u'typename', + u'union', + u'unsigned', + u'until', + u'using', + u'var', + u'virtual', + u'volatile', + u'wchar_t', + u'xor', + u'xor_eq'] diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index b324c614..d005c0e8 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -1572,8 +1572,8 @@ class JagsLexer(RegexLexer): class StanLexer(RegexLexer): """Pygments Lexer for Stan models. - The Stan modeling language is specified in the *Stan Modeling Language User's Guide and Reference Manual, v2.2.0*, - `pdf `__. + The Stan modeling language is specified in the *Stan Modeling Language User's Guide and Reference Manual, v2.3.0*, + `pdf `__. .. versionadded:: 1.6 """ @@ -1600,7 +1600,7 @@ class StanLexer(RegexLexer): include('whitespace'), # Block start (r'(%s)(\s*)({)' % - r'|'.join(('data', r'transformed\s+?data', + r'|'.join(('functions', 'data', r'transformed\s+?data', 'parameters', r'transformed\s+parameters', 'model', r'generated\s+quantities')), bygroups(Keyword.Namespace, Text, Punctuation)), -- cgit v1.2.1 From 76f1913368ca68af86dbdc235c5531ae209ea3a3 Mon Sep 17 00:00:00 2001 From: "Jeffrey B. Arnold" Date: Mon, 30 Jun 2014 23:17:26 -0400 Subject: moved void to type and added example file --- pygments/lexers/_stan_builtins.py | 8 ++++---- tests/examplefiles/example.stan | 5 +++++ 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index 212d5be2..e148724a 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -4,9 +4,9 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This file contains the names of functions for Stan used by - ``pygments.lexers.math.StanLexer. This is for Stan language version 2.2.0. + ``pygments.lexers.math.StanLexer. This is for Stan language version 2.3.0. - :copyright: Copyright 2014 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ @@ -17,7 +17,6 @@ KEYWORDS = [ u'else', u'lp__', u'print', u'return', - u'void', u'while'] TYPES = [ u'cholesky_factor_cov', @@ -31,7 +30,8 @@ TYPES = [ u'cholesky_factor_cov', u'row_vector', u'simplex', u'unit_vector', - u'vector'] + u'vector', + u'void'] FUNCTIONS = [ u'Phi', u'Phi_approx', diff --git a/tests/examplefiles/example.stan b/tests/examplefiles/example.stan index 7eb6fdfc..341089ae 100644 --- a/tests/examplefiles/example.stan +++ b/tests/examplefiles/example.stan @@ -5,6 +5,11 @@ It is not a real model and will not compile */ # also a comment // also a comment +functions { + void func1(real a) { + return 1 / a; + } +} data { // valid name int abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_abc; -- cgit v1.2.1 From a0f35d17627456cdc8e608cc0dc2fa6a32dec6d0 Mon Sep 17 00:00:00 2001 From: Thomas Van Doren Date: Thu, 3 Jul 2014 15:00:21 -0700 Subject: Update ChapelLexer prior to Pygments 2.0 release. * Adds `align`, `noinit`, and `pragma` keywords. * Updates decimal integer definition to allow any number of leading zeros, as per spec. * Adds `<~>` operator, which is used for convenience when reading or writing input and output. --- pygments/lexers/compiled.py | 10 +++++----- tests/examplefiles/99_bottles_of_beer.chpl | 31 ++++++++++++++++++++++++++++++ 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..041d56e2 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3920,10 +3920,10 @@ class ChapelLexer(RegexLexer): (r'(false|nil|true)\b', Keyword.Constant), (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', Keyword.Type), - (r'(atomic|begin|break|by|cobegin|coforall|continue|iter|' + (r'(align|atomic|begin|break|by|cobegin|coforall|continue|' r'delete|dmapped|do|domain|else|enum|export|extern|for|forall|' - r'if|index|inline|label|lambda|let|local|new|on|otherwise|' - r'reduce|return|scan|select|serial|single|sparse|' + r'if|index|inline|iter|label|lambda|let|local|new|noinit|on|' + r'otherwise|pragma|reduce|return|scan|select|serial|single|sparse|' r'subdomain|sync|then|use|when|where|while|yield|zip)\b', Keyword), (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), @@ -3947,14 +3947,14 @@ class ChapelLexer(RegexLexer): # -- hex (r'0[xX][0-9a-fA-F]+', Number.Hex), # -- decimal - (r'(0|[1-9][0-9]*)', Number.Integer), + (r'[0-9]+', Number.Integer), # strings (r'["\'](\\\\|\\"|[^"\'])*["\']', String), # tokens (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' - r'<=>|\.\.|by|#|\.\.\.|' + r'<=>|<~>|\.\.|by|#|\.\.\.|' r'&&|\|\||!|&|\||\^|~|<<|>>|' r'==|!=|<=|>=|<|>|' r'[+\-*/%]|\*\*)', Operator), diff --git a/tests/examplefiles/99_bottles_of_beer.chpl b/tests/examplefiles/99_bottles_of_beer.chpl index f73be7b1..10c3e48a 100644 --- a/tests/examplefiles/99_bottles_of_beer.chpl +++ b/tests/examplefiles/99_bottles_of_beer.chpl @@ -112,7 +112,38 @@ c = nil; c = new Oval(r=1.0, r2=2.0); writeln("Area of oval: " + c.area()); +// This is a valid decimal integer: +var x = 0000000000012; + union U { var i: int; var r: real; } + +// chapel ranges are awesome. +var r1 = 1..10, // 1 2 3 4 5 6 7 8 9 10 + r2 = 10..1, // no values in this range + r3 = 1..10 by -1, // 10 9 8 7 6 5 4 3 2 1 + r4 = 1..10 by 2, // 1 3 5 7 9 + r5 = 1..10 by 2 align 0, // 2 4 6 8 10 + r6 = 1..10 by 2 align 2, // 2 4 6 8 10 + r7 = 1..10 # 3, // 1 2 3 + r8 = 1..10 # -2, // 9 10 + r9 = 1..100 # 10 by 2, // 1 3 5 7 9 + ra = 1..100 by 2 # 10, // 1 3 5 7 9 11 13 15 17 19 + rb = 1.. # 100 by 10; // 1 11 21 31 41 51 61 71 81 91 + +// create a variable with default initialization +var myVarWithoutInit: real = noinit; +myVarWithoutInit = 1.0; + +// Chapel has <~> operator for read and write I/O operations. +class IntPair { + var x: int; + var y: int; + proc readWriteThis(f) { + f <~> x <~> new ioLiteral(",") <~> y <~> new ioNewline(); + } +} +var ip = new IntPair(17,2); +write(ip); -- cgit v1.2.1 -- cgit v1.2.1 From 53733f9430a6f2bcc27f24b11ffec739d465c295 Mon Sep 17 00:00:00 2001 From: Evan Jenkins Date: Sat, 12 Jul 2014 05:18:34 +0000 Subject: `exaxt` => `exact` in Idris lexer --- pygments/lexers/functional.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index a22c4f55..a28345a3 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -1530,7 +1530,7 @@ class IdrisLexer(RegexLexer): 'let','proof','of','then','static','where','_','with', 'pattern', 'term', 'syntax','prefix', 'postulate','parameters','record','dsl','impossible','implicit', - 'tactics','intros','intro','compute','refine','exaxt','trivial'] + 'tactics','intros','intro','compute','refine','exact','trivial'] ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK', 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE', -- cgit v1.2.1 From 78afd7ec55755e82dcfe4c9fc198d2fbb88a3a1e Mon Sep 17 00:00:00 2001 From: Adam Hirst Date: Fri, 25 Jul 2014 11:37:26 +0000 Subject: Added missing Fortran 2003/8 keywords/intrinsics. Modelled after https://github.com/geany/geany/commit/05dda6b001ac71d0fc8592f480e30b8a52e2def5 which I submitted to Geany earlier this year. Source: "Modern Fortran Explained" by Metcalf --- pygments/lexers/compiled.py | 81 +++++++++++++++++++++++++-------------------- 1 file changed, 46 insertions(+), 35 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..1d1ed6ee 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1582,17 +1582,20 @@ class FortranLexer(RegexLexer): ], 'core': [ # Statements - (r'\b(ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|ASYNCHRONOUS|' - r'BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|COMMON|CONTAINS|' - r'CONTINUE|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|' - r'ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDIF|ENTRY|ENUMERATOR|EQUIVALENCE|' - r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|' - r'GOTO|IF|IMPLICIT|IMPORT|INCLUDE|INQUIRE|INTENT|INTERFACE|' - r'INTRINSIC|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|' - r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|' - r'PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|' - r'RECURSIVE|RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBROUTINE|' - r'TARGET|THEN|TYPE|USE|VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b', + (r'\b(ALL|ALLSTOP|ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' + r'ASYNCHRONOUS|BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|' + r'CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|CONTAINS|CONTINUE|CRITICAL|' + r'CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|ELEMENTAL|ELSE|' + r'ENCODE|END( FILE)?|ENDCRITICAL|ENDIF|ENDPROCEDURE|ENTRY|ENUM|' + r'ENUMERATOR|EQUIVALENCE|EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' + r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPUREINCLUDE|' + r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' + r'NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|' + r'OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|' + r'PROTECTED|PUBLIC|PURE|READ|RECURSIVE|RESULT|RETURN|REWIND|SAVE|' + r'SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|SYNC|SYNCALL|SYNCIMAGES|' + r'SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|VALUE|VOLATILE|WHERE|WRITE|' + r'WHILE)\s*\b', Keyword), # Data Types @@ -1614,37 +1617,45 @@ class FortranLexer(RegexLexer): (r'[()\[\],:&%;]', Punctuation), # Intrinsics - (r'\b(Abort|Abs|Access|AChar|ACos|AdjustL|AdjustR|AImag|AInt|Alarm|' - r'All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|Associated|' - r'ATan|BesJ|BesJN|BesY|BesYN|Bit_Size|BTest|CAbs|CCos|Ceiling|' + (r'\b(Abort|Abs|Access|AChar|ACos|ACosH|AdjustL|AdjustR|AImag|AInt|' + r'Alarm|All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|ASinH|' + r'Associated|ATan|ATanH|Atomic_Define|Atomic_Ref|BesJ|BesJN|' + r'Bessel_J0|Bessel_J1|Bessel_JN|Bessel_Y0|Bessel_Y1|Bessel_YN|' + r'BesY|BesYN|BGE|BGT|BLE|BLT|Bit_Size|BTest|CAbs|CCos|Ceiling|' r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|' r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|' - r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|' - r'C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|C_New_Line|' + r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer| + r'C_F_ProcPointer|C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|' + r'C_FunLoc|C_Loc|C_SizeofC_New_Line|' r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|' r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|' r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|' r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|' - r'DProd|DSign|DSinH|DSin|DSqRt|DTanH|DTan|DTime|EOShift|Epsilon|' - r'ErF|ErFC|ETime|Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|' - r'FGetC|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|' - r'FTell|GError|GetArg|Get_Command|Get_Command_Argument|' + r'DProd|DSign|DSinH|DShiftL|DShiftR|DSin|DSqRt|DTanH|DTan|DTime| + r'EOShift|Epsilon|ErF|ErFC|ErFC_Scaled|ETime|Execute_Command_Line|' + r'Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|FGetC|' + r'FindLoc|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|' + r'FTell|Gamma|GError|GetArg|Get_Command|Get_Command_Argument|' r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|' - r'GetUId|GMTime|HostNm|Huge|IAbs|IAChar|IAnd|IArgC|IBClr|IBits|' - r'IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|IErrNo|IFix|Imag|' - r'ImagPart|Index|Int|IOr|IRand|IsaTty|IShft|IShftC|ISign|' - r'Iso_C_Binding|Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|' - r'LBound|Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|' - r'Logical|Long|LShift|LStat|LTime|MatMul|Max|MaxExponent|MaxLoc|' - r'MaxVal|MClock|Merge|Move_Alloc|Min|MinExponent|MinLoc|MinVal|' - r'Mod|Modulo|MvBits|Nearest|New_Line|NInt|Not|Or|Pack|PError|' - r'Precision|Present|Product|Radix|Rand|Random_Number|Random_Seed|' - r'Range|Real|RealPart|Rename|Repeat|Reshape|RRSpacing|RShift|' - r'Same_Type_As|Scale|Scan|Second|Selected_Int_Kind|' - r'Selected_Real_Kind|Set_Exponent|Shape|Short|Sign|Signal|SinH|' - r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Sum|SymLnk|' - r'System|System_Clock|Tan|TanH|Time|Tiny|Transfer|Transpose|Trim|' - r'TtyNam|UBound|UMask|Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|' + r'GetUId|GMTime|HostNm|Huge|Hypot|IAbs|IAChar|IAll|IAnd|IAny|' + r'IArgC|IBClr|IBits|IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|' + r'IErrNo|IFix|Imag|ImagPart|Image_Index|Index|Int|IOr|IParity|' + r'IRand|IsaTty|IShft|IShftC|ISign|Iso_C_Binding|Is_Contiguous|' + r'Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|LBound|LCoBound|' + r'Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|Log_Gamma|' + r'Logical|Long|LShift|LStat|LTime|MaskL|MaskR|MatMul|Max|' + r'MaxExponent|MaxLoc|MaxVal|MClock|Merge|Merge_Bits|Move_Alloc|' + r'Min|MinExponent|MinLoc|MinVal|Mod|Modulo|MvBits|Nearest|' + r'New_Line|NInt|Norm2|Not|Null|Num_Images|Or|Pack|Parity|PError|' + r'Precision|Present|Product|Radix|Rand|Random_Number|' + r'Random_Seed|Range|Real|RealPart|Rename|Repeat|Reshape|' + r'RRSpacing|RShift|Same_Type_As|Scale|Scan|Second|' + r'Selected_Char_Kind|Selected_Int_Kind|Selected_Real_Kind|' + r'Set_Exponent|Shape|ShiftA|ShiftL|ShiftR|Short|Sign|Signal|SinH|' + r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Storage_Size|Sum|' + r'SymLnk|System|System_Clock|Tan|TanH|Time|This_Image|Tiny|' + r'TrailZ|Transfer|Transpose|Trim|TtyNam|UBound|UCoBound|UMask|' + r'Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|' r'ZLog|ZSin|ZSqRt)\s*\b', Name.Builtin), -- cgit v1.2.1 From 475ede62dcb880f5ad08afde4afbcb304dfabd60 Mon Sep 17 00:00:00 2001 From: Adam Hirst Date: Fri, 25 Jul 2014 13:58:16 +0000 Subject: Corrected typographical error. --- pygments/lexers/compiled.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 1d1ed6ee..1421b0e1 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1582,13 +1582,13 @@ class FortranLexer(RegexLexer): ], 'core': [ # Statements - (r'\b(ALL|ALLSTOP|ABSTRACT|ACCEPT|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' + (r'\b(ABSTRACT|ACCEPT|ALL|ALLSTOP|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' r'ASYNCHRONOUS|BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|' r'CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|CONTAINS|CONTINUE|CRITICAL|' r'CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|ELEMENTAL|ELSE|' r'ENCODE|END( FILE)?|ENDCRITICAL|ENDIF|ENDPROCEDURE|ENTRY|ENUM|' r'ENUMERATOR|EQUIVALENCE|EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' - r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPUREINCLUDE|' + r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|' r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' r'NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|' r'OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|' -- cgit v1.2.1 From 04682453bfada53ed746b306f7a8c0553c8610ab Mon Sep 17 00:00:00 2001 From: "Jeffrey B. Arnold" Date: Sun, 27 Jul 2014 22:04:39 -0400 Subject: Update StanLexer to support Stan v2.4.0 --- pygments/lexers/_stan_builtins.py | 15 ++++++++++++--- pygments/lexers/math.py | 8 ++++---- tests/examplefiles/example.stan | 8 ++++++-- 3 files changed, 22 insertions(+), 9 deletions(-) diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index e148724a..4c4a27c1 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -4,7 +4,7 @@ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ This file contains the names of functions for Stan used by - ``pygments.lexers.math.StanLexer. This is for Stan language version 2.3.0. + ``pygments.lexers.math.StanLexer. This is for Stan language version 2.4.0. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. @@ -14,12 +14,14 @@ KEYWORDS = [ u'else', u'for', u'if', u'in', + u'increment_log_prob', u'lp__', u'print', u'return', u'while'] -TYPES = [ u'cholesky_factor_cov', +TYPES = [ u'cholesky_factor_corr', + u'cholesky_factor_cov', u'corr_matrix', u'cov_matrix', u'int', @@ -155,7 +157,6 @@ FUNCTIONS = [ u'Phi', u'hypergeometric_rng', u'hypot', u'if_else', - u'increment_log_prob', u'int_step', u'inv', u'inv_chi_square_ccdf_log', @@ -178,8 +179,11 @@ FUNCTIONS = [ u'Phi', u'inverse_spd', u'lbeta', u'lgamma', + u'lkj_corr_cholesky_log', + u'lkj_corr_cholesky_rng', u'lkj_corr_log', u'lkj_corr_rng', + u'lkj_cov_log', u'lmgamma', u'log', u'log10', @@ -217,6 +221,7 @@ FUNCTIONS = [ u'Phi', u'modified_bessel_second_kind', u'multi_gp_log', u'multi_normal_cholesky_log', + u'multi_normal_cholesky_rng', u'multi_normal_log', u'multi_normal_prec_log', u'multi_normal_rng', @@ -337,6 +342,7 @@ FUNCTIONS = [ u'Phi', u'uniform_rng', u'variance', u'von_mises_log', + u'von_mises_rng', u'weibull_ccdf_log', u'weibull_cdf', u'weibull_cdf_log', @@ -367,6 +373,8 @@ DISTRIBUTIONS = [ u'bernoulli', u'inv_gamma', u'inv_wishart', u'lkj_corr', + u'lkj_corr_cholesky', + u'lkj_cov', u'logistic', u'lognormal', u'multi_gp', @@ -473,6 +481,7 @@ RESERVED = [ u'alignas', u'using', u'var', u'virtual', + u'void', u'volatile', u'wchar_t', u'xor', diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index d005c0e8..c51403e2 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -1572,8 +1572,8 @@ class JagsLexer(RegexLexer): class StanLexer(RegexLexer): """Pygments Lexer for Stan models. - The Stan modeling language is specified in the *Stan Modeling Language User's Guide and Reference Manual, v2.3.0*, - `pdf `__. + The Stan modeling language is specified in the *Stan Modeling Language User's Guide and Reference Manual, v2.4.0*, + `pdf `__. .. versionadded:: 1.6 """ @@ -1630,8 +1630,8 @@ class StanLexer(RegexLexer): # Assignment operators # SLexer makes these tokens Operators. (r'<-|~', Operator), - # Infix and prefix operators (and = ) - (r"\+|-|\.?\*|\.?/|\\|'|==?|!=?|<=?|>=?|\|\||&&", Operator), + # Infix, prefix and postfix operators (and = ) + (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator), # Block delimiters (r'[{}]', Punctuation), ] diff --git a/tests/examplefiles/example.stan b/tests/examplefiles/example.stan index 341089ae..716b4d12 100644 --- a/tests/examplefiles/example.stan +++ b/tests/examplefiles/example.stan @@ -6,9 +6,12 @@ It is not a real model and will not compile # also a comment // also a comment functions { - void func1(real a) { + void f1(void a, real b) { return 1 / a; } + real f2(int a, vector b, real c) { + return a + b + c; + } } data { // valid name @@ -25,6 +28,7 @@ data { corr_matrix[3] grault; cov_matrix[3] garply; cholesky_factor_cov[3] waldo; + cholesky_factor_corr[3] waldo2; real foo1; real foo2; @@ -92,6 +96,7 @@ model { tmp / tmp; tmp .* tmp; tmp ./ tmp; + tmp ^ tmp; ! tmp; - tmp; + tmp; @@ -112,4 +117,3 @@ generated quantities { real bar1; bar1 <- foo + 1; } - -- cgit v1.2.1 From 2362ca7d7bb5a27f6c4a90b791b8adb32a78e27e Mon Sep 17 00:00:00 2001 From: Adam Hirst Date: Tue, 29 Jul 2014 18:49:41 +0000 Subject: Added ASSOCIATE, ENDASSOCIATE, and ENDBLOCK --- pygments/lexers/compiled.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 1421b0e1..a519af5c 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1583,19 +1583,19 @@ class FortranLexer(RegexLexer): 'core': [ # Statements (r'\b(ABSTRACT|ACCEPT|ALL|ALLSTOP|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' - r'ASYNCHRONOUS|BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|CLASS|CLOSE|' - r'CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|CONTAINS|CONTINUE|CRITICAL|' - r'CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|DO|ELEMENTAL|ELSE|' - r'ENCODE|END( FILE)?|ENDCRITICAL|ENDIF|ENDPROCEDURE|ENTRY|ENUM|' - r'ENUMERATOR|EQUIVALENCE|EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' - r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|' - r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' - r'NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|' - r'OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|' - r'PROTECTED|PUBLIC|PURE|READ|RECURSIVE|RESULT|RETURN|REWIND|SAVE|' - r'SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|SYNC|SYNCALL|SYNCIMAGES|' - r'SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|VALUE|VOLATILE|WHERE|WRITE|' - r'WHILE)\s*\b', + r'ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|' + r'CLASS|CLOSE|CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|CONTAINS|' + r'CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|' + r'DO|ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDASSOCIATE|ENDBLOCK|' + r'ENDCRITICAL|ENDIF|ENDPROCEDURE|ENTRY|ENUM|ENUMERATOR|EQUIVALENCE|' + r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|GOTO|' + r'IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|INQUIRE|INTENT|INTERFACE|' + r'INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|' + r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|PAUSE|' + r'POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|RECURSIVE|' + r'RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|' + r'SYNC|SYNCALL|SYNCIMAGES|SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|' + r'VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b', Keyword), # Data Types -- cgit v1.2.1 From 2e4de38f5cce9f53dbe6a4984be23b401c774447 Mon Sep 17 00:00:00 2001 From: Adam Hirst Date: Wed, 30 Jul 2014 12:09:59 +0000 Subject: Further increased the Fortran KEYWORD list, specifically the various END* variants which do not include the insignificant blank. --- pygments/lexers/compiled.py | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index a519af5c..beb48bfc 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1583,19 +1583,22 @@ class FortranLexer(RegexLexer): 'core': [ # Statements (r'\b(ABSTRACT|ACCEPT|ALL|ALLSTOP|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' - r'ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK( DATA)?|BYTE|CALL|CASE|' - r'CLASS|CLOSE|CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|CONTAINS|' - r'CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|DIMENSION|' - r'DO|ELEMENTAL|ELSE|ENCODE|END( FILE)?|ENDASSOCIATE|ENDBLOCK|' - r'ENDCRITICAL|ENDIF|ENDPROCEDURE|ENTRY|ENUM|ENUMERATOR|EQUIVALENCE|' - r'EXIT|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|FUNCTION|GENERIC|GOTO|' - r'IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|INQUIRE|INTENT|INTERFACE|' - r'INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|NULLIFY|NONE|NON_INTRINSIC|' - r'NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|OPTIONS|PARAMETER|PASS|PAUSE|' - r'POINTER|PRINT|PRIVATE|PROGRAM|PROTECTED|PUBLIC|PURE|READ|RECURSIVE|' - r'RESULT|RETURN|REWIND|SAVE|SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|' - r'SYNC|SYNCALL|SYNCIMAGES|SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|' - r'VALUE|VOLATILE|WHERE|WRITE|WHILE)\s*\b', + r'ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK|BLOCKDATA|BYTE|CALL|' + r'CASE|CLASS|CLOSE|CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|' + r'CONTAINS|CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|' + r'DIMENSION|DO|ELEMENTAL|ELSE|ENCODE|ENDASSOCIATE|ENDBLOCK|' + r'ENDBLOCKDATA|ENDCRITICAL|ENDDO|ENDFILE|ENDFORALL|ENDFUNCTION|ENDIF|' + r'ENDINTERFACE|ENDMODULE|ENDPROCEDURE|ENDPROGRAM|ENDSELECT|' + r'ENDSUBMODULE|ENDSUBROUTINE|ENDTYPE|ENDWHERE|ENTRY|ENUM|ENUMERATOR|' + r'EQUIVALENCE|EXIT|EXTENDS|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' + r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|' + r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' + r'NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|' + r'OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|' + r'PROTECTED|PUBLIC|PURE|READ|RECURSIVE|RESULT|RETURN|REWIND|SAVE|' + r'SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|SYNC|SYNCALL|SYNCIMAGES|' + r'SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|VALUE|VOLATILE|WHERE|WRITE|' + r'WHILE)\s*\b', Keyword), # Data Types -- cgit v1.2.1 From ab3fc81f632662a604e7e963457a199a28be9e2a Mon Sep 17 00:00:00 2001 From: Masanori Ogino Date: Sun, 3 Aug 2014 01:12:59 +0900 Subject: Fix RustLexer to handle nested block comments. Since Rust 0.9, block comments (e.g. /* ... */) may be nested. --- pygments/lexers/compiled.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..8bfa6058 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3321,7 +3321,7 @@ class RustLexer(RegexLexer): (r'\s+', Text), (r'//[/!](.*?)\n', Comment.Doc), (r'//(.*?)\n', Comment.Single), - (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'/\*', Comment.Multiline, 'comment'), # Keywords (r'(as|box|break|continue' @@ -3403,6 +3403,12 @@ class RustLexer(RegexLexer): (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\(', bygroups(Comment.Preproc, Name), 'macro('), ], + 'comment': [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ], 'number_lit': [ (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'), ], -- cgit v1.2.1 From 750ca02ad15f5c6a8090e090f08c659748cfc592 Mon Sep 17 00:00:00 2001 From: David Corbett Date: Fri, 8 Aug 2014 17:07:45 -0400 Subject: Add forgotten commas between strings --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/agile.py | 6 +++--- pygments/lexers/compiled.py | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f8454357..b028aee6 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -20,7 +20,7 @@ LEXERS = { 'APLLexer': ('pygments.lexers.other', 'APL', ('apl',), ('*.apl',), ()), 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), - 'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), + 'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), 'AlloyLexer': ('pygments.lexers.other', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), 'AmbientTalkLexer': ('pygments.lexers.other', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 576f44ed..1ae369b9 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -2010,8 +2010,8 @@ class Perl6Lexer(ExtendedRegexLexer): PERL6_BUILTINS = ( 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH', 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh', - 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh' - 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by' + 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh', + 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by', 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat', 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot', 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes', @@ -2396,7 +2396,7 @@ class HyLexer(RegexLexer): ] declarations = [ - 'def' 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' + 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' ] hy_builtins = [] diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 25c7a4d8..38bd901f 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -2437,7 +2437,7 @@ class AdaLexer(RegexLexer): """ name = 'Ada' - aliases = ['ada', 'ada95' 'ada2005'] + aliases = ['ada', 'ada95', 'ada2005'] filenames = ['*.adb', '*.ads', '*.ada'] mimetypes = ['text/x-ada'] -- cgit v1.2.1 From 423b7668dce62f50b29cc38f2faf31619f021cee Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Fri, 15 Aug 2014 03:19:44 +0300 Subject: [Elixir] No longer balance sigil terminators --- pygments/lexers/functional.py | 44 ++++++++++-------------------------- tests/examplefiles/example_elixir.ex | 2 ++ 2 files changed, 14 insertions(+), 32 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index dd823fe3..5a796433 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3218,20 +3218,19 @@ class ElixirLexer(RegexLexer): yield index, token, value def gen_elixir_sigil_rules(): - # these braces are balanced inside the sigil string - braces = [ + # all valid sigil terminators (excluding heredocs) + terminators = [ (r'\{', r'\}', 'cb'), (r'\[', r'\]', 'sb'), (r'\(', r'\)', 'pa'), (r'\<', r'\>', 'ab'), + (r'/', r'/', 'slas'), + (r'\|', r'\|', 'pipe'), + ('"', '"', 'quot'), + ("'", "'", 'apos'), ] - # these are also valid sigil terminators, they are not balanced - terms = [ - (r'/', 'slas'), (r'\|', 'pipe'), ('"', 'quot'), ("'", 'apos'), - ] - - # heredocs have slightly different rules, they are not balanced + # heredocs have slightly different rules triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')] token = String.Other @@ -3255,33 +3254,14 @@ class ElixirLexer(RegexLexer): include('heredoc_no_interpol'), ] - for term, name in terms: + for lterm, rterm, name in terminators: states['sigils'] += [ - (r'~[a-z]' + term, token, name + '-intp'), - (r'~[A-Z]' + term, token, name + '-no-intp'), + (r'~[a-z]' + lterm, token, name + '-intp'), + (r'~[A-Z]' + lterm, token, name + '-no-intp'), ] - - # Similar states to the braced sigils, but no balancing of - # terminators - states[name +'-intp'] = gen_elixir_sigstr_rules(term, token) + states[name +'-intp'] = gen_elixir_sigstr_rules(rterm, token) states[name +'-no-intp'] = \ - gen_elixir_sigstr_rules(term, token, interpol=False) - - for lbrace, rbrace, name in braces: - states['sigils'] += [ - (r'~[a-z]' + lbrace, token, name + '-intp'), - (r'~[A-Z]' + lbrace, token, name + '-no-intp') - ] - - states[name +'-intp'] = [ - (r'\\.', token), - (lbrace, token, '#push'), - ] + gen_elixir_sigstr_rules(rbrace, token) - - states[name +'-no-intp'] = [ - (r'\\.', token), - (lbrace, token, '#push'), - ] + gen_elixir_sigstr_rules(rbrace, token, interpol=False) + gen_elixir_sigstr_rules(rterm, token, interpol=False) return states diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 09870443..775b30cc 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -45,6 +45,8 @@ atom" ~w(hello #{ ["has" <> "123", '\c\d', "\123 interpol" | []] } world)s ~W(hello #{no "123" \c\d \123 interpol} world)s +~s{Escapes terminators \{ and \}, but no {balancing} # outside of sigil here } + ~S"No escapes \s\t\n and no #{interpolation}" :"atoms work #{"to" <> "o"}" -- cgit v1.2.1 From 5bd7fce68011149a7850aa011889292bb5277dd2 Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Fri, 15 Aug 2014 03:29:25 +0300 Subject: [Elixir] Remove "xor" from keywords and update regex patterns for numbers --- pygments/lexers/functional.py | 8 ++++---- tests/examplefiles/example_elixir.ex | 6 +++++- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 5a796433..83c5d04e 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3169,7 +3169,7 @@ class ElixirLexer(RegexLexer): mimetypes = ['text/x-elixir'] KEYWORD = ['fn', 'do', 'end', 'after', 'else', 'rescue', 'catch'] - KEYWORD_OPERATOR = ['not', 'and', 'or', 'xor', 'when', 'in'] + KEYWORD_OPERATOR = ['not', 'and', 'or', 'when', 'in'] BUILTIN = [ 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise', 'quote', 'unquote', 'unquote_splicing', 'throw', 'super' @@ -3318,9 +3318,9 @@ class ElixirLexer(RegexLexer): (modname_re, Name.Class), # numbers - (r'0[bB][01]+', Number.Bin), - (r'0[0-7]+', Number.Oct), - (r'(?i)0x[\da-f]+', Number.Hex), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[\da-fA-F]+', Number.Hex), (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float), (r'\d(_?\d)*', Number.Integer), diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 775b30cc..b2b4dc28 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -1,9 +1,13 @@ # Numbers 0b0101011 -1234 ; 0x1A ; 0xbeef ; 0763 +1234 ; 0x1A ; 0xbeef ; 0763 ; 0o123 3.14 ; 5.0e21 ; 0.5e-12 100_000_000 +# these are not valid numbers +0b012 ; 0xboar ; 0o888 +0B01 ; 0XAF ; 0O123 + # Characters ?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?, ?\x{12} ; ?\x{abcd} -- cgit v1.2.1 From eeaff14517a7779c28d1e3eaacdc9ad501e862e8 Mon Sep 17 00:00:00 2001 From: Alex Date: Fri, 15 Aug 2014 12:11:02 +0300 Subject: [Elixir] Update character escape rules --- pygments/lexers/functional.py | 19 ++++++++++--------- tests/examplefiles/example_elixir.ex | 8 +++++--- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 83c5d04e..9c010b1e 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3276,20 +3276,22 @@ class ElixirLexer(RegexLexer): complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})' + long_hex_char_re = r'(\\x{)([\da-fA-F]+)(})' + hex_char_re = r'(\\x[\da-fA-F]{1,2})' + escape_char_re = r'(\\[abdefnrstv])' + tokens = { 'root': [ (r'\s+', Text), (r'#.*$', Comment.Single), # Various kinds of characters - (r'(?i)(\?)(\\x{)([\da-f]+)(})', + (r'(\?)' + long_hex_char_re, bygroups(String.Char, String.Escape, Number.Hex, String.Escape)), - (r'(?i)(\?)(\\x[\da-f]{1,2})', - bygroups(String.Char, String.Escape)), - (r'(\?)(\\[0-7]{1,3})', + (r'(\?)' + hex_char_re, bygroups(String.Char, String.Escape)), - (r'(\?)(\\[abdefnrstv])', + (r'(\?)' + escape_char_re, bygroups(String.Char, String.Escape)), (r'\?\\?.', String.Char), @@ -3353,11 +3355,10 @@ class ElixirLexer(RegexLexer): (r'\n+', String.Heredoc), ], 'escapes': [ - (r'(?i)(\\x{)([\da-f]+)(})', + (long_hex_char_re, bygroups(String.Escape, Number.Hex, String.Escape)), - (r'(?i)\\x[\da-f]{1,2}', String.Escape), - (r'\\[0-7]{1,3}', String.Escape), - (r'\\[abdefnrstv]', String.Escape), + (hex_char_re, String.Escape), + (escape_char_re, String.Escape), ], 'interpol': [ (r'#{', String.Interpol, 'interpol_string'), diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index b2b4dc28..6a525ace 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -11,7 +11,9 @@ # Characters ?a ; ?1 ; ?\n ; ?\s ; ?\c ; ? ; ?, ?\x{12} ; ?\x{abcd} -?\x34 ; ?\xf +?\x34 ; ?\xF + +# these show that only the first digit is part of the character ?\123 ; ?\12 ; ?\7 # Atoms @@ -26,8 +28,8 @@ atom" # Strings "Hello world" -"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\z\+ \\ escapes" -"Quotes ' inside \" \123 the \"\" \xF string \\\" end" +"Interspersed \x{ff} codes \7 \8 \65 \016 and \t\s\\s\z\+ \\ escapes" +"Quotes ' inside \" \123 the \"\" \xF \\xF string \\\" end" "Multiline string" -- cgit v1.2.1 From b5bc6c6d441b87f1b2cce478cd4de3c3143eac3a Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Fri, 15 Aug 2014 12:14:00 +0300 Subject: [Elixir] Resolve issues with ':' and '::' punctionation --- pygments/lexers/functional.py | 12 ++++++++---- tests/examplefiles/example_elixir.ex | 5 +++-- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 9c010b1e..b307a7d7 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3135,7 +3135,7 @@ def gen_elixir_string_rules(name, symbol, token): (r'[^#%s\\]+' % (symbol,), token), include('escapes'), (r'\\.', token), - (r'(%s)(:?)' % (symbol,), bygroups(token, Punctuation), "#pop"), + (r'(%s)' % (symbol,), bygroups(token), "#pop"), include('interpol') ] return states @@ -3186,13 +3186,13 @@ class ElixirLexer(RegexLexer): OPERATORS3 = ['<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!=='] OPERATORS2 = [ - '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~' + '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', + '->', '<-', '|', '.', '%', '=' ] OPERATORS1 = ['<', '>', '+', '-', '*', '/', '!', '^', '&'] PUNCTUATION = [ - '\\\\', '<<', '>>', '::', '->', '<-', '=>', '|', '(', ')', - '{', '}', ';', ',', '.', '[', ']', '%', '=' + '\\\\', '<<', '>>', '=>', '(', ')', '{', '}', ':', ';', ',', '[', ']' ] def get_tokens_unprocessed(self, text): @@ -3295,6 +3295,10 @@ class ElixirLexer(RegexLexer): bygroups(String.Char, String.Escape)), (r'\?\\?.', String.Char), + # '::' has to go before atoms + (r':::', String.Symbol), + (r'::', Operator), + # atoms (r':' + special_atom_re, String.Symbol), (r':' + complex_name_re, String.Symbol), diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 6a525ace..41de83e5 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -24,7 +24,8 @@ line ' \s \123 \xff atom" :... ; :<<>> ; :%{} ; :% ; :{} -:++; :--; :*; :~~~ +:++; :--; :*; :~~~; ::: +:% ; :. ; :<- # Strings "Hello world" @@ -83,7 +84,7 @@ map = %{shortcut: "syntax"} # Comprehensions for x <- 1..10, x < 5, do: {x, x} pixels = "12345678" -for << <> <- pixels >> do +for << <> <- pixels >> do [r, {g, %{"b" => a}}] end -- cgit v1.2.1 From 37368d865858e12278cc31f8bb6b46d27d63fb2c Mon Sep 17 00:00:00 2001 From: Alex Date: Fri, 15 Aug 2014 12:15:11 +0300 Subject: [Elixir] Update the example file --- tests/examplefiles/example_elixir.ex | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 41de83e5..3c3d72f3 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -60,7 +60,7 @@ atom" # Operators x = 1 + 2.0 * 3 -y = true and false; z = false xor true +y = true and false; z = false or true ... = 144 ... == !x && y || z "hello" |> String.upcase |> String.downcase() @@ -94,7 +94,7 @@ end # Identifiers abc_123 = 1 _018OP = 2 -A__0 = 3 +A__0 == 3 # Modules defmodule Long.Module.Name do @@ -103,7 +103,12 @@ defmodule Long.Module.Name do @doc """ Multiline docstring "with quotes" - and #{ %{"interpolation" => "in" <> "action"} } + and #{ inspect %{"interpolation" => "in" <> "action"} } + now with #{ {:a, 'tuple'} } + and #{ inspect { + :tuple, + %{ with: "nested #{ inspect %{ :interpolation => %{} } }" } + } } """ defstruct [:a, :name, :height] -- cgit v1.2.1 From 04e47f0f5ebd530eb9bc4380848d31279f01fb0b Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Fri, 15 Aug 2014 13:26:02 +0300 Subject: [Elixir] Parse maps and tuples to make non-trivial and nested string interpolation work --- pygments/lexers/functional.py | 24 +++++++++++++++++++++--- tests/examplefiles/example_elixir.ex | 3 ++- 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index b307a7d7..ed63be44 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3187,12 +3187,12 @@ class ElixirLexer(RegexLexer): OPERATORS3 = ['<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!=='] OPERATORS2 = [ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', - '->', '<-', '|', '.', '%', '=' + '->', '<-', '|', '.', '=' ] OPERATORS1 = ['<', '>', '+', '-', '*', '/', '!', '^', '&'] PUNCTUATION = [ - '\\\\', '<<', '>>', '=>', '(', ')', '{', '}', ':', ';', ',', '[', ']' + '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']' ] def get_tokens_unprocessed(self, text): @@ -3321,7 +3321,7 @@ class ElixirLexer(RegexLexer): # identifiers (name_re, Name), - (modname_re, Name.Class), + (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), # numbers (r'0b[01]+', Number.Bin), @@ -3337,6 +3337,9 @@ class ElixirLexer(RegexLexer): (r"'", String.Single, 'string_single'), include('sigils'), + + (r'%{', Punctuation, 'map_key'), + (r'{', Punctuation, 'tuple'), ], 'heredoc_double': [ (r'^\s*"""', String.Heredoc, '#pop'), @@ -3371,6 +3374,21 @@ class ElixirLexer(RegexLexer): (r'}', String.Interpol, "#pop"), include('root') ], + 'map_key': [ + include('root'), + (r':', Punctuation, 'map_val'), + (r'=>', Punctuation, 'map_val'), + (r'}', Punctuation, '#pop'), + ], + 'map_val': [ + include('root'), + (r',', Punctuation, '#pop'), + (r'(?=})', Punctuation, '#pop'), + ], + 'tuple': [ + include('root'), + (r'}', Punctuation, '#pop'), + ], } tokens.update(gen_elixir_string_rules('double', '"', String.Double)) tokens.update(gen_elixir_string_rules('single', "'", String.Single)) diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 3c3d72f3..27462268 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -129,7 +129,8 @@ end # Structs defmodule Second.Module do s = %Long.Module.Name{name: "Silly"} - %{s | height: {192, :cm}} + %Long.Module.Name{s | height: {192, :cm}} + ".. #{%Long.Module.Name{s | height: {192, :cm}}} .." end # Types, pseudo-vars, attributes -- cgit v1.2.1 From 0d9f70215c44a27ea10e4759300464873b618a10 Mon Sep 17 00:00:00 2001 From: Andy Li Date: Sun, 17 Aug 2014 17:01:29 +0800 Subject: HaxeLexer: macro class reification could have no class name. --- pygments/lexers/web.py | 7 ++++++- tests/examplefiles/example.hx | 7 +++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py index cd619596..0188508f 100644 --- a/pygments/lexers/web.py +++ b/pygments/lexers/web.py @@ -1611,13 +1611,18 @@ class HaxeLexer(ExtendedRegexLexer): (r'(?:extern|private)\b', Keyword.Declaration), (r'(?:abstract)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'abstract')), - (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'class')), + (r'(?:class|interface)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'macro-class')), (r'(?:enum)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'enum')), (r'(?:typedef)\b', Keyword.Declaration, ('#pop', 'optional-semicolon', 'typedef')), default(('#pop', 'expr')), ], + 'macro-class': [ + (r'\{', Punctuation, ('#pop', 'class-body')), + include('class') + ], + # cast can be written as "cast expr" or "cast(expr, type)" 'cast': [ include('spaces'), diff --git a/tests/examplefiles/example.hx b/tests/examplefiles/example.hx index 381cf825..7584fc81 100644 --- a/tests/examplefiles/example.hx +++ b/tests/examplefiles/example.hx @@ -181,5 +181,12 @@ var c = macro class MyClass { var c = macro interface IClass {}; +//macro class could have no name... +var def = macro class { + private inline function new(loader) this = loader; + private var loader(get,never) : $loaderType; + inline private function get_loader() : $loaderType return this; +}; + //ECheckType var f = (123:Float); \ No newline at end of file -- cgit v1.2.1 From 1987be8c8f814b4fb5ec3240e91a7ce2a399ed26 Mon Sep 17 00:00:00 2001 From: bgirarde Date: Thu, 21 Aug 2014 16:49:40 -0700 Subject: Fix parsing of escaped quotes in strings in MySqlLexer --- pygments/lexers/sql.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index 7540f079..5070d487 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -464,10 +464,9 @@ class MySqlLexer(RegexLexer): (r'/\*', Comment.Multiline, 'multiline-comments'), (r'[0-9]+', Number.Integer), (r'[0-9]*\.[0-9]+(e[+-][0-9]+)', Number.Float), - # TODO: add backslash escapes - (r"'(''|[^'])*'", String.Single), - (r'"(""|[^"])*"', String.Double), - (r"`(``|[^`])*`", String.Symbol), + (r"'(\\\\|\\'|''|[^'])*'", String.Single), + (r'"(\\\\|\\"|""|[^"])*"', String.Double), + (r"`(\\\\|\\`|``|[^`])*`", String.Symbol), (r'[+*/<>=~!@#%^&|`?-]', Operator), (r'\b(tinyint|smallint|mediumint|int|integer|bigint|date|' r'datetime|time|bit|bool|tinytext|mediumtext|longtext|text|' -- cgit v1.2.1 From a64f6bd6bf6090a71d048ba426e21a19a906704d Mon Sep 17 00:00:00 2001 From: Matt Montag Date: Sun, 24 Aug 2014 02:35:45 +0000 Subject: Sentence fix --- doc/docs/lexerdevelopment.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst index eab1306a..23f7cdc6 100644 --- a/doc/docs/lexerdevelopment.rst +++ b/doc/docs/lexerdevelopment.rst @@ -190,7 +190,7 @@ nevertheless need a group, use a non-capturing group defined using this syntax: If you find yourself needing a capturing group inside the regex which shouldn't be part of the output but is used in the regular expressions for backreferencing (eg: ``r'(<(foo|bar)>)(.*?)()'``), you can pass `None` -to the bygroups function and it will skip that group will be skipped in the +to the bygroups function and that group will be skipped in the output. -- cgit v1.2.1 From a3e9a7e8c2f7b1da9338d38ecf2366c6e5ef8fff Mon Sep 17 00:00:00 2001 From: Yannick Loiseau Date: Tue, 26 Aug 2014 14:16:30 +0200 Subject: add a almost complete example file --- tests/examplefiles/example.golo | 113 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 113 insertions(+) create mode 100644 tests/examplefiles/example.golo diff --git a/tests/examplefiles/example.golo b/tests/examplefiles/example.golo new file mode 100644 index 00000000..92ff78b5 --- /dev/null +++ b/tests/examplefiles/example.golo @@ -0,0 +1,113 @@ +# +# Comments +# + +module pygments.Example + +import some.Module + +local function foo = |a, b| -> a + b + +---- +golodoc string +---- +augment java.util.Collection { + + ---- + sub doc + ---- + function plop = |this, v| { + return this: length() + v + } +} + +function bar = |a, b| { + let msg = "a string" + var tmp = "" + tmp = tmp + a: toString() + println(tmp + b) +} + +function baz = { + foreach i in range(0, 5) { + if i % 2 == 0 and true or false { + print("e") + } else { + print("o") + } + } +} + +function userMatch = |v| -> + match { + when v % 2 == 0 then "e" + otherwise "o" + } +} + +function add = |x| -> |y| -> x + y + +let aChar = 'a' + +let multiline = +""" +foo +bar +baz +""" + +local function myObj = -> DynamicObject(): + name("foo"): + age(25): + define("meth", |this| -> this: name() + this: age() + +---- +Golo doc string +---- +function nullTest = { + let m = map[ + ["a", 1], + ["b", 2] + ] + + println(map: get("a") orIfNull 0) + println(map: get("b")?: toString() orIfNull "0") + +} + +struct Point = { x, y } + +function deco1 = |fun| { + return |args...| { + return "deco1 + " + fun: invokeWithArguments(args) + } +} + +@deco1 +function decofoo = |a| { + return "foo: " + a +} + +@deco1 +function decobar = |a| -> "bar: " + a + +function deco2 = |fun| { + return |args...| { + return "deco2 + " + fun: invokeWithArguments(args) + } +} + +@deco2 +@deco1 +function decobaz = |a| -> "baz: " + a + +let deco3 = ^deco1: andThen(^deco2) + +@deco3 +function decospam = |a| -> "spam: " + a + +@another.Module.deco +function ping = -> "pong" + +@deco("with", params) +function gnop = -> "gnip" -- cgit v1.2.1 From fdd26dca6a9004419c516ab9d8db82a846ed3f87 Mon Sep 17 00:00:00 2001 From: Yannick Loiseau Date: Tue, 26 Aug 2014 14:17:00 +0200 Subject: add the new upcomming decorator syntax for Golo --- pygments/lexers/jvm.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py index fa05b7ed..0d16e2fd 100644 --- a/pygments/lexers/jvm.py +++ b/pygments/lexers/jvm.py @@ -1219,6 +1219,7 @@ class GoloLexer(RegexLexer): (r'-?\d[\d_]*', Number.Integer), ('`?[a-zA-Z_][\w$]*', Name), + (r'@[a-zA-Z_][\w$._]*', Name.Decorator), (r'"""', String, combined('stringescape', 'triplestring')), (r'"', String, combined('stringescape', 'doublestring')), -- cgit v1.2.1 From ce7ab4225df16d761e62ff2d7ab6fc5d88d57eb1 Mon Sep 17 00:00:00 2001 From: Tim Baumann Date: Sun, 31 Aug 2014 17:31:47 +0200 Subject: Idris lexer: parse commented out type declarations as comments --- pygments/lexers/functional.py | 10 +++++----- tests/examplefiles/test.idr | 7 +++++++ 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index a22c4f55..85f07a31 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -1542,14 +1542,14 @@ class IdrisLexer(RegexLexer): tokens = { 'root': [ - # Declaration - (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', - bygroups(Text, Name.Function, Text, Operator.Word, Text)), # Comments (r'^(\s*)(%%%s)' % '|'.join(annotations), bygroups(Text, Keyword.Reserved)), - (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), + (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), + (r'(\s*)({-)', bygroups(Text, Comment.Multiline), 'comment'), + # Declaration + (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', + bygroups(Text, Name.Function, Text, Operator.Word, Text)), # Identifiers (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), diff --git a/tests/examplefiles/test.idr b/tests/examplefiles/test.idr index f0e96d88..c31232b5 100644 --- a/tests/examplefiles/test.idr +++ b/tests/examplefiles/test.idr @@ -87,6 +87,13 @@ using (G : Vect n Ty) testFac : Int testFac = interp [] eFac 4 +--testFacTooBig : Int +--testFacTooBig = interp [] eFac 100000 + + {-testFacTooBig2 : Int +testFacTooBig2 = interp [] eFac 1000 +-} + main : IO () main = print testFac -- cgit v1.2.1 From fb36a5cea3738b96036b9d536e54cd3d2b4261d6 Mon Sep 17 00:00:00 2001 From: Tim Baumann Date: Sun, 31 Aug 2014 17:40:23 +0200 Subject: Idris lexer: add support for doc comments --- pygments/lexers/functional.py | 1 + tests/examplefiles/test.idr | 1 + 2 files changed, 2 insertions(+) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 85f07a31..2c7e03ed 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -1546,6 +1546,7 @@ class IdrisLexer(RegexLexer): (r'^(\s*)(%%%s)' % '|'.join(annotations), bygroups(Text, Keyword.Reserved)), (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), + (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)), (r'(\s*)({-)', bygroups(Text, Comment.Multiline), 'comment'), # Declaration (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', diff --git a/tests/examplefiles/test.idr b/tests/examplefiles/test.idr index c31232b5..fd008d31 100644 --- a/tests/examplefiles/test.idr +++ b/tests/examplefiles/test.idr @@ -60,6 +60,7 @@ using (G : Vect n Ty) fromInteger = Val . fromInteger + ||| Evaluates an expression in the given context. interp : Env G -> {static} Expr G t -> interpTy t interp env (Var i) = lookup i env interp env (Val x) = x -- cgit v1.2.1 From eba3e1877925003f42ef352404a6f5c8fbe768ab Mon Sep 17 00:00:00 2001 From: Tim Baumann Date: Sun, 31 Aug 2014 19:06:15 +0200 Subject: Idris lexer: add new directives --- pygments/lexers/functional.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 2c7e03ed..8f2697d5 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -1537,13 +1537,13 @@ class IdrisLexer(RegexLexer): 'DC[1-4]','NAK','SYN','ETB','CAN', 'EM','SUB','ESC','[FGRU]S','SP','DEL'] - annotations = ['assert_total','lib','link','include','provide','access', - 'default'] + directives = ['lib','link','flag','include','hide','freeze','access', + 'default','logging','dynamic','name','error_handlers','language'] tokens = { 'root': [ # Comments - (r'^(\s*)(%%%s)' % '|'.join(annotations), + (r'^(\s*)(%%%s)' % '|'.join(directives), bygroups(Text, Keyword.Reserved)), (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)), -- cgit v1.2.1 From fb6c708a8b3706cf97ebfecb9ad46a92bdf16116 Mon Sep 17 00:00:00 2001 From: BJ Neilsen Date: Sun, 7 Sep 2014 23:17:57 -0600 Subject: Add "oneof" keyword to ProtoBufLexer [Protocol Buffers version 2.6][1] added a new union construct with the `oneof` keyword. [1]: https://groups.google.com/d/msg/protobuf/lvI8-sWZbUY/2_a8LVDMi5YJ --- pygments/lexers/other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 608e499d..0075fc07 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -2473,7 +2473,7 @@ class ProtoBufLexer(RegexLexer): (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), (r'\b(import|option|optional|required|repeated|default|packed|' - r'ctype|extensions|to|max|rpc|returns)\b', Keyword), + r'ctype|extensions|to|max|rpc|returns|oneof)\b', Keyword), (r'(int32|int64|uint32|uint64|sint32|sint64|' r'fixed32|fixed64|sfixed32|sfixed64|' r'float|double|bool|string|bytes)\b', Keyword.Type), -- cgit v1.2.1 From 07c10db9002d28d1061e51a34ef1c000a4113b45 Mon Sep 17 00:00:00 2001 From: Thomas Van Doren Date: Tue, 9 Sep 2014 09:28:30 -0700 Subject: Add 'with' keyword and octal int. 'with' keyword is added to the language for the upcoming release to support task ref intents. Octal integers are also entering the language. --- pygments/lexers/compiled.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 53292dfa..4249da92 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3924,7 +3924,7 @@ class ChapelLexer(RegexLexer): r'delete|dmapped|do|domain|else|enum|export|extern|for|forall|' r'if|index|inline|iter|label|lambda|let|local|new|noinit|on|' r'otherwise|pragma|reduce|return|scan|select|serial|single|sparse|' - r'subdomain|sync|then|use|when|where|while|yield|zip)\b', + r'subdomain|sync|then|use|when|where|while|with|yield|zip)\b', Keyword), (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), @@ -3948,6 +3948,8 @@ class ChapelLexer(RegexLexer): (r'0[xX][0-9a-fA-F]+', Number.Hex), # -- decimal (r'[0-9]+', Number.Integer), + # -- octal + (r'0[oO][0-7]+', Number.Oct), # strings (r'["\'](\\\\|\\"|[^"\'])*["\']', String), -- cgit v1.2.1 From 3fcde32e95cc11c24015d3d33d2e4c691519a8ca Mon Sep 17 00:00:00 2001 From: Thomas Van Doren Date: Tue, 9 Sep 2014 10:34:58 -0700 Subject: Update chpl example file with 'with' keyword and octal ints. --- tests/examplefiles/99_bottles_of_beer.chpl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/tests/examplefiles/99_bottles_of_beer.chpl b/tests/examplefiles/99_bottles_of_beer.chpl index 10c3e48a..47fcaaf6 100644 --- a/tests/examplefiles/99_bottles_of_beer.chpl +++ b/tests/examplefiles/99_bottles_of_beer.chpl @@ -147,3 +147,15 @@ class IntPair { } var ip = new IntPair(17,2); write(ip); + +var targetDom: {1..10}, + target: [targetDom] int; +coforall i in targetDom with (ref target) { + targetDom[i] = i ** 3; +} + +var wideOpen = 0o777, + mememe = 0o600, + clique_y = 0O660, + zeroOct = 0o0, + minPosOct = 0O1; -- cgit v1.2.1 From b966709650bba050ea9e173a531775af8c5f1c7e Mon Sep 17 00:00:00 2001 From: Thomas Van Doren Date: Tue, 9 Sep 2014 10:46:44 -0700 Subject: Parse octal ints before regular ints --- pygments/lexers/compiled.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 4249da92..79790725 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3946,10 +3946,10 @@ class ChapelLexer(RegexLexer): (r'0[bB][0-1]+', Number.Bin), # -- hex (r'0[xX][0-9a-fA-F]+', Number.Hex), - # -- decimal - (r'[0-9]+', Number.Integer), # -- octal (r'0[oO][0-7]+', Number.Oct), + # -- decimal + (r'[0-9]+', Number.Integer), # strings (r'["\'](\\\\|\\"|[^"\'])*["\']', String), -- cgit v1.2.1 From a5e8f686cea05a44f62423cfdb5c628f452bb55a Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Wed, 10 Sep 2014 20:32:40 +0300 Subject: [Elixir] Add the last batch of new operators introduced just before the 1.0.0 release. --- pygments/lexers/functional.py | 8 ++++++-- tests/examplefiles/example_elixir.ex | 8 ++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index ed63be44..9b4d26c2 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3184,10 +3184,14 @@ class ElixirLexer(RegexLexer): PSEUDO_VAR = ['_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__'] - OPERATORS3 = ['<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!=='] + + OPERATORS3 = [ + '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', + '~>>', '<~>', '\|~>', '<\|>', + ] OPERATORS2 = [ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', - '->', '<-', '|', '.', '=' + '->', '<-', '|', '.', '=', '~>', '<~', ] OPERATORS1 = ['<', '>', '+', '-', '*', '/', '!', '^', '&'] diff --git a/tests/examplefiles/example_elixir.ex b/tests/examplefiles/example_elixir.ex index 27462268..ddca7f60 100644 --- a/tests/examplefiles/example_elixir.ex +++ b/tests/examplefiles/example_elixir.ex @@ -66,6 +66,14 @@ y = true and false; z = false or true "hello" |> String.upcase |> String.downcase() {^z, a} = {true, x} +# Free operators (added in 1.0.0) +p ~>> f = bind(p, f) +p1 ~> p2 = pair_right(p1, p2) +p1 <~ p2 = pair_left(p1, p2) +p1 <~> p2 = pair_both(p1, p2) +p |~> f = map(p, f) +p1 <|> p2 = either(p1, p2) + # Lists, tuples, maps, keywords [1, :a, 'hello'] ++ [2, 3] [:head | [?t, ?a, ?i, ?l]] -- cgit v1.2.1 From 6300669fd2009650cbe7b4d0d53a0884ebf0ae91 Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Thu, 11 Sep 2014 11:01:42 +0000 Subject: Updated Logtalk lexer --- pygments/lexers/other.py | 91 +++++++++++++++++++++++------------------------- 1 file changed, 44 insertions(+), 47 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 0075fc07..66e70a29 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -494,7 +494,7 @@ class LogtalkLexer(RegexLexer): name = 'Logtalk' aliases = ['logtalk'] - filenames = ['*.lgt'] + filenames = ['*.lgt', '*.logtalk'] mimetypes = ['text/x-logtalk'] tokens = { @@ -509,24 +509,24 @@ class LogtalkLexer(RegexLexer): (r'\s+', Text), # Numbers (r"0'.", Number), - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number.Float), + (r'0b[01]+', Number), + (r'0o[0-7]+', Number), + (r'0x[0-9a-fA-F]+', Number), + (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables - (r'([A-Z_]\w*)', Name.Variable), + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), # Event handlers (r'(after|before)(?=[(])', Keyword), + # Message forwarding handler + (r'forward(?=[(])', Keyword), # Execution-context methods (r'(parameter|this|se(lf|nder))(?=[(])', Keyword), # Reflection (r'(current_predicate|predicate_property)(?=[(])', Keyword), # DCGs and term expansion - (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', - Keyword), + (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword), # Entity - (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', - Keyword), + (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword), (r'(object|protocol|category)_property(?=[(])', Keyword), # Entity relations (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword), @@ -538,32 +538,30 @@ class LogtalkLexer(RegexLexer): # Flags (r'(current|set)_logtalk_flag(?=[(])', Keyword), # Compiling, loading, and library paths - (r'logtalk_(compile|l(ibrary_path|oad_context|oad))(?=[(])', - Keyword), + (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword), + (r'\blogtalk_make\b', Keyword), # Database (r'(clause|retract(all)?)(?=[(])', Keyword), (r'a(bolish|ssert(a|z))(?=[(])', Keyword), # Control constructs (r'(ca(ll|tch)|throw)(?=[(])', Keyword), - (r'(fail|true)\b', Keyword), + (r'(fa(il|lse)|true)\b', Keyword), # All solutions (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword), # Multi-threading meta-predicates - (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', - Keyword), + (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), # Term unification - (r'unify_with_occurs_check(?=[(])', Keyword), + (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword), # Term creation and decomposition - (r'(functor|arg|copy_term|numbervars)(?=[(])', Keyword), + (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword), # Evaluable functors - (r'(rem|mod|abs|sign)(?=[(])', Keyword), + (r'(rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword), (r'float(_(integer|fractional)_part)?(?=[(])', Keyword), (r'(floor|truncate|round|ceiling)(?=[(])', Keyword), # Other arithmetic functors - (r'(cos|atan|exp|log|s(in|qrt))(?=[(])', Keyword), + (r'(cos|a(cos|sin|tan)|exp|log|s(in|qrt))(?=[(])', Keyword), # Term testing - (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|' - r'ground)(?=[(])', Keyword), + (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword), # Term comparison (r'compare(?=[(])', Keyword), # Stream selection and control @@ -571,8 +569,7 @@ class LogtalkLexer(RegexLexer): (r'(open|close)(?=[(])', Keyword), (r'flush_output(?=[(])', Keyword), (r'(at_end_of_stream|flush_output)\b', Keyword), - (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', - Keyword), + (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword), # Character and byte input/output (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword), (r'\bnl\b', Keyword), @@ -594,12 +591,14 @@ class LogtalkLexer(RegexLexer): # External call (r'[{}]', Keyword), # Logic and control - (r'\b(ignore|once)(?=[(])', Keyword), + (r'(ignore|once)(?=[(])', Keyword), (r'\brepeat\b', Keyword), # Sorting (r'(key)?sort(?=[(])', Keyword), # Bitwise functors (r'(>>|<<|/\\|\\\\|\\)', Operator), + # Predicate aliases + (r'\bas\b', Operator), # Arithemtic evaluation (r'\bis\b', Keyword), # Arithemtic comparison @@ -630,20 +629,20 @@ class LogtalkLexer(RegexLexer): # Ponctuation (r'[()\[\],.|]', Text), # Atoms - (r"[a-z]\w*", Text), - (r"'", String, 'quoted_atom'), + (r"[a-z][a-zA-Z0-9_]*", Text), + (r"[']", String, 'quoted_atom'), ], 'quoted_atom': [ - (r"''", String), - (r"'", String, '#pop'), + (r"['][']", String), + (r"[']", String, '#pop'), (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape), (r"[^\\'\n]+", String), (r'\\', String), ], 'directive': [ - # Conditional compilation directives + # Conditional compilation directives (r'(el)?if(?=[(])', Keyword, 'root'), (r'(e(lse|ndif))[.]', Keyword, 'root'), # Entity directives @@ -653,32 +652,28 @@ class LogtalkLexer(RegexLexer): (r'(public|protected|private)(?=[(])', Keyword, 'root'), # Other directives (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'), - (r'in(fo|itialization)(?=[(])', Keyword, 'root'), - (r'(dynamic|synchronized|threaded)[.]', Keyword, 'root'), - (r'(alias|d(ynamic|iscontiguous)|m(eta_predicate|ode|ultifile)|' - r's(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', - Keyword, 'root'), + (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'), + (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'), + (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), (r'op(?=[(])', Keyword, 'root'), - (r'(c(alls|oinductive)|reexport|use(s|_module))(?=[(])', - Keyword, 'root'), - (r'[a-z]\w*(?=[(])', Text, 'root'), - (r'[a-z]\w*[.]', Text, 'root'), + (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'), + (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), + (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'), ], 'entityrelations': [ - (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)' - r'(?=[(])', Keyword), + (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), # Numbers (r"0'.", Number), - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number.Float), + (r'0b[01]+', Number), + (r'0o[0-7]+', Number), + (r'0x[0-9a-fA-F]+', Number), + (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables - (r'([A-Z_]\w*)', Name.Variable), + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), # Atoms - (r"[a-z]\w*", Text), - (r"'", String, 'quoted_atom'), + (r"[a-z][a-zA-Z0-9_]*", Text), + (r"[']", String, 'quoted_atom'), # Strings (r'"(\\\\|\\"|[^"])*"', String), # End of entity-opening directive @@ -703,6 +698,8 @@ class LogtalkLexer(RegexLexer): return True if ':- category(' in text: return True + if ':-' in text: + return True return False -- cgit v1.2.1 -- cgit v1.2.1 From 3c822143b6576d4a8b73e6a09ea3125755dcf24c Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Thu, 11 Sep 2014 11:12:35 +0000 Subject: Fix harmless comment layout --- pygments/lexers/other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 66e70a29..e70f194b 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -642,7 +642,7 @@ class LogtalkLexer(RegexLexer): ], 'directive': [ - # Conditional compilation directives + # Conditional compilation directives (r'(el)?if(?=[(])', Keyword, 'root'), (r'(e(lse|ndif))[.]', Keyword, 'root'), # Entity directives -- cgit v1.2.1 -- cgit v1.2.1 From 110968fa96000d3167a79beeb84d1a4047771b3b Mon Sep 17 00:00:00 2001 From: Alexei Sholik Date: Thu, 11 Sep 2014 23:41:23 +0300 Subject: [Elixir] Fix some inconsistencies in operator parsing. --- pygments/lexers/functional.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 9b4d26c2..04594036 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3187,7 +3187,7 @@ class ElixirLexer(RegexLexer): OPERATORS3 = [ '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', - '~>>', '<~>', '\|~>', '<\|>', + '~>>', '<~>', '|~>', '<|>', ] OPERATORS2 = [ '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', @@ -3275,7 +3275,7 @@ class ElixirLexer(RegexLexer): ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) alnum = '[A-Za-z_0-9]' - name_re = r'[a-z_]%s*[!\?]?' % alnum + name_re = r'(?:\.\.\.|[a-z_]%s*[!\?]?)' % alnum modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})' @@ -3316,6 +3316,10 @@ class ElixirLexer(RegexLexer): # @attributes (r'@' + name_re, Name.Attribute), + # identifiers + (name_re, Name), + (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), + # operators and punctuation (op3_re, Operator), (op2_re, Operator), @@ -3323,10 +3327,6 @@ class ElixirLexer(RegexLexer): (r'&\d', Name.Entity), # anon func arguments (op1_re, Operator), - # identifiers - (name_re, Name), - (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), - # numbers (r'0b[01]+', Number.Bin), (r'0o[0-7]+', Number.Oct), -- cgit v1.2.1 From 7b7726d637d561fd35cea67c23e70393cf85e86a Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Fri, 12 Sep 2014 18:11:17 +0000 Subject: Add more specific number types for binary, octal, and hexadecimal syntaxes. --- pygments/lexers/other.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index e70f194b..9e000cc6 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -509,9 +509,9 @@ class LogtalkLexer(RegexLexer): (r'\s+', Text), # Numbers (r"0'.", Number), - (r'0b[01]+', Number), - (r'0o[0-7]+', Number), - (r'0x[0-9a-fA-F]+', Number), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), -- cgit v1.2.1 From b0704914d698b7b7f801b9435250061694676b35 Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Fri, 12 Sep 2014 18:31:09 +0000 Subject: Added missing specific number tokens to the entity relation regular expressions. --- pygments/lexers/other.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 9e000cc6..4f11f35d 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -665,9 +665,9 @@ class LogtalkLexer(RegexLexer): (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), # Numbers (r"0'.", Number), - (r'0b[01]+', Number), - (r'0o[0-7]+', Number), - (r'0x[0-9a-fA-F]+', Number), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), # Variables (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), -- cgit v1.2.1 From f3aaf003440cbde495e12c3ae5e53bc48b9b1ffd Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 09:40:51 +0200 Subject: test nested comments in rust --- tests/examplefiles/rust_example.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/tests/examplefiles/rust_example.rs b/tests/examplefiles/rust_example.rs index 1c0a70c3..8ffbaf6b 100644 --- a/tests/examplefiles/rust_example.rs +++ b/tests/examplefiles/rust_example.rs @@ -11,6 +11,8 @@ // based on: // http://shootout.alioth.debian.org/u32/benchmark.php?test=nbody&lang=java +/* nest some /* comments */ */ + extern mod std; use core::os; -- cgit v1.2.1 From 36b2888b4a17b28e6bcd7bc76ee57ed706de0f01 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 09:50:29 +0200 Subject: changelog entries --- CHANGES | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/CHANGES b/CHANGES index 8459e05a..807319ef 100644 --- a/CHANGES +++ b/CHANGES @@ -105,7 +105,9 @@ Version 2.0 - C family lexers: fix parsing of indented preprocessor directives (#944). -- Rust lexer: update to 0.9 language version (PR#270). +- Rust lexer: update to 0.9 language version (PR#270, PR#388). + +- Elixir lexer: update to 0.15 language version (PR#392). Version 1.6 -- cgit v1.2.1 From 57a5e307fde4e78e1025c73f158e9f38709fd3df Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 09:55:09 +0200 Subject: Nit lexer: add changelog entry, attribution, mapfile entry, cat all example files into one --- AUTHORS | 1 + CHANGES | 1 + pygments/lexers/_mapping.py | 1 + pygments/lexers/compiled.py | 6 +- tests/examplefiles/all.nit | 1986 +++++++++++++++++++++++ tests/examplefiles/calculator.nit | 272 ---- tests/examplefiles/callback_chimpanze.nit | 45 - tests/examplefiles/callback_monkey.nit | 92 -- tests/examplefiles/circular_list.nit | 167 -- tests/examplefiles/clock.nit | 78 - tests/examplefiles/clock_more.nit | 60 - tests/examplefiles/curl_http.nit | 113 -- tests/examplefiles/curl_mail.nit | 59 - tests/examplefiles/draw_operation.nit | 243 --- tests/examplefiles/drop_privileges.nit | 46 - tests/examplefiles/extern_methods.nit | 69 - tests/examplefiles/fibonacci.nit | 43 - tests/examplefiles/hello_world.nit | 1 - tests/examplefiles/html_page.nit | 105 -- tests/examplefiles/int_stack.nit | 100 -- tests/examplefiles/opengles2_hello_triangle.nit | 193 --- tests/examplefiles/print_arguments.nit | 22 - tests/examplefiles/procedural_array.nit | 48 - tests/examplefiles/socket_client.nit | 38 - tests/examplefiles/socket_server.nit | 52 - tests/examplefiles/tmpl_composer.nit | 94 -- tests/examplefiles/websocket_server.nit | 46 - 27 files changed, 1994 insertions(+), 1987 deletions(-) create mode 100644 tests/examplefiles/all.nit delete mode 100644 tests/examplefiles/calculator.nit delete mode 100644 tests/examplefiles/callback_chimpanze.nit delete mode 100644 tests/examplefiles/callback_monkey.nit delete mode 100644 tests/examplefiles/circular_list.nit delete mode 100644 tests/examplefiles/clock.nit delete mode 100644 tests/examplefiles/clock_more.nit delete mode 100644 tests/examplefiles/curl_http.nit delete mode 100644 tests/examplefiles/curl_mail.nit delete mode 100644 tests/examplefiles/draw_operation.nit delete mode 100644 tests/examplefiles/drop_privileges.nit delete mode 100644 tests/examplefiles/extern_methods.nit delete mode 100644 tests/examplefiles/fibonacci.nit delete mode 100644 tests/examplefiles/hello_world.nit delete mode 100644 tests/examplefiles/html_page.nit delete mode 100644 tests/examplefiles/int_stack.nit delete mode 100644 tests/examplefiles/opengles2_hello_triangle.nit delete mode 100644 tests/examplefiles/print_arguments.nit delete mode 100644 tests/examplefiles/procedural_array.nit delete mode 100644 tests/examplefiles/socket_client.nit delete mode 100644 tests/examplefiles/socket_server.nit delete mode 100644 tests/examplefiles/tmpl_composer.nit delete mode 100644 tests/examplefiles/websocket_server.nit diff --git a/AUTHORS b/AUTHORS index 1bedef4b..e9fb5301 100644 --- a/AUTHORS +++ b/AUTHORS @@ -14,6 +14,7 @@ Other contributors, listed alphabetically, are: * Jeffrey Arnold -- R/S, Rd, BUGS, Jags, and Stan lexers * Jeremy Ashkenas -- CoffeeScript lexer * Stefan Matthias Aust -- Smalltalk lexer +* Lucas Bajolet -- Nit lexer * Ben Bangert -- Mako lexers * Max Battcher -- Darcs patch lexer * Thomas Baruchel -- APL lexer diff --git a/CHANGES b/CHANGES index 807319ef..db3ffd9c 100644 --- a/CHANGES +++ b/CHANGES @@ -41,6 +41,7 @@ Version 2.0 * Inform 6/7 (PR#281) * MQL (PR#285) * APL (#969) + * Nit (PR#375) - New styles: "xcode" and "igor", similar to the default highlighting of the respective IDEs. diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index b028aee6..d505dcf5 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -239,6 +239,7 @@ LEXERS = { 'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), 'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), + 'NitLexer': ('pygments.lexers.compiled', 'Nit', ('nit',), ('*.nit',), ()), 'NixLexer': ('pygments.lexers.functional', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()), 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 1cf5fee7..6556b77e 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -5199,9 +5199,12 @@ class SwiftLexer(ObjectiveCLexer): token = Operator yield index, token, value + class NitLexer(RegexLexer): """ For `nit `_ source. + + .. versionadded:: 2.0 """ name = 'Nit' @@ -5219,7 +5222,8 @@ class NitLexer(RegexLexer): 'isset|label|__debug__)(?=( |\n|\t|\r|\())', Keyword), (r'[A-Z][A-Za-z0-9_]*', Name.Class), (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), #Simple long string - (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), #Simple long string alt + (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|' + r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), #Simple long string alt (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), #Start long string (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), #Mid long string (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), #End long string diff --git a/tests/examplefiles/all.nit b/tests/examplefiles/all.nit new file mode 100644 index 00000000..d4e1ddfa --- /dev/null +++ b/tests/examplefiles/all.nit @@ -0,0 +1,1986 @@ +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import gtk + +class CalculatorContext + var result : nullable Float = null + + var last_op : nullable Char = null + + var current : nullable Float = null + var after_point : nullable Int = null + + fun push_op( op : Char ) + do + apply_last_op_if_any + if op == 'C' then + self.result = 0.0 + last_op = null + else + last_op = op # store for next push_op + end + + # prepare next current + after_point = null + current = null + end + + fun push_digit( digit : Int ) + do + var current = current + if current == null then current = 0.0 + + var after_point = after_point + if after_point == null then + current = current * 10.0 + digit.to_f + else + current = current + digit.to_f * 10.0.pow(after_point.to_f) + self.after_point -= 1 + end + + self.current = current + end + + fun switch_to_decimals + do + if self.current == null then current = 0.0 + if after_point != null then return + + after_point = -1 + end + + fun apply_last_op_if_any + do + var op = last_op + + var result = result + if result == null then result = 0.0 + + var current = current + if current == null then current = 0.0 + + if op == null then + result = current + else if op == '+' then + result = result + current + else if op == '-' then + result = result - current + else if op == '/' then + result = result / current + else if op == '*' then + result = result * current + end + self.result = result + self.current = null + end +end + +class CalculatorGui + super GtkCallable + + var win : GtkWindow + var container : GtkGrid + + var lbl_disp : GtkLabel + var but_eq : GtkButton + var but_dot : GtkButton + + var context = new CalculatorContext + + redef fun signal( sender, user_data ) + do + var after_point = context.after_point + if after_point == null then + after_point = 0 + else + after_point = (after_point.abs) + end + + if user_data isa Char then # is an operation + var c = user_data + if c == '.' then + but_dot.sensitive= false + context.switch_to_decimals + lbl_disp.text = "{context.current.to_i}." + else + but_dot.sensitive= true + context.push_op( c ) + + var s = context.result.to_precision_native(6) + var index : nullable Int = null + for i in s.length.times do + var chiffre = s.chars[i] + if chiffre == '0' and index == null then + index = i + else if chiffre != '0' then + index = null + end + end + if index != null then + s = s.substring(0, index) + if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1) + end + lbl_disp.text = s + end + else if user_data isa Int then # is a number + var n = user_data + context.push_digit( n ) + lbl_disp.text = context.current.to_precision_native(after_point) + end + end + + init + do + init_gtk + + win = new GtkWindow( 0 ) + + container = new GtkGrid(5,5,true) + win.add( container ) + + lbl_disp = new GtkLabel( "_" ) + container.attach( lbl_disp, 0, 0, 5, 1 ) + + # digits + for n in [0..9] do + var but = new GtkButton.with_label( n.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, n ) + if n == 0 then + container.attach( but, 0, 4, 1, 1 ) + else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 ) + end + + # operators + var r = 1 + for op in ['+', '-', '*', '/' ] do + var but = new GtkButton.with_label( op.to_s ) + but.request_size( 64, 64 ) + but.signal_connect( "clicked", self, op ) + container.attach( but, 3, r, 1, 1 ) + r+=1 + end + + # = + but_eq = new GtkButton.with_label( "=" ) + but_eq.request_size( 64, 64 ) + but_eq.signal_connect( "clicked", self, '=' ) + container.attach( but_eq, 4, 3, 1, 2 ) + + # . + but_dot = new GtkButton.with_label( "." ) + but_dot.request_size( 64, 64 ) + but_dot.signal_connect( "clicked", self, '.' ) + container.attach( but_dot, 1, 4, 1, 1 ) + + #C + var but_c = new GtkButton.with_label( "C" ) + but_c.request_size( 64, 64 ) + but_c.signal_connect("clicked", self, 'C') + container.attach( but_c, 2, 4, 1, 1 ) + + win.show_all + end +end + +# context tests +var context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_op( '+' ) +context.push_digit( 3 ) +context.push_op( '*' ) +context.push_digit( 2 ) +context.push_op( '=' ) +var r = context.result.to_precision( 2 ) +assert r == "30.00" else print r + +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 4 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_op( '*' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "42.30" else print r + +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "53.30" else print r + +context = new CalculatorContext +context.push_digit( 4 ) +context.push_digit( 2 ) +context.switch_to_decimals +context.push_digit( 3 ) +context.push_op( '/' ) +context.push_digit( 3 ) +context.push_op( '=' ) +r = context.result.to_precision( 2 ) +assert r == "14.10" else print r + +#test multiple decimals +context = new CalculatorContext +context.push_digit( 5 ) +context.push_digit( 0 ) +context.switch_to_decimals +context.push_digit( 1 ) +context.push_digit( 2 ) +context.push_digit( 3 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_op( '=' ) +r = context.result.to_precision( 3 ) +assert r == "51.123" else print r + +#test 'C' button +context = new CalculatorContext +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '+' ) +context.push_digit( 1 ) +context.push_digit( 0 ) +context.push_op( '=' ) +context.push_op( 'C' ) +r = context.result.to_precision( 1 ) +assert r == "0.0" else print r + +# graphical application + +if "NIT_TESTING".environ != "true" then + var app = new CalculatorGui + run_gtk +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_chimpanze +import callback_monkey + +class Chimpanze + super MonkeyActionCallable + + fun create + do + var monkey = new Monkey + print "Hum, I'm sleeping ..." + # Invoking method which will take some time to compute, and + # will be back in wokeUp method with information. + # - Callback method defined in MonkeyActionCallable Interface + monkey.wokeUpAction(self, "Hey, I'm awake.") + end + + # Inherit callback method, defined by MonkeyActionCallable interface + # - Back of wokeUpAction method + redef fun wokeUp( sender:Monkey, message:Object ) + do + print message + end +end + +var m = new Chimpanze +m.create +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This sample has been implemented to show you how simple is it to play +# with native callbacks (C) through an high level with NIT program. + +module callback_monkey + +in "C header" `{ + #include + #include + + typedef struct { + int id; + int age; + } CMonkey; + + typedef struct { + MonkeyActionCallable toCall; + Object message; + } MonkeyAction; +`} + +in "C body" `{ + // Method which reproduce a callback answer + // Please note that a function pointer is only used to reproduce the callback + void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data) + { + sleep(2); + callbackFunc( mkey, data ); + } + + // Back of background treatment, will be redirected to callback function + void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data ) + { + // To call a your method, the signature must be written like this : + // _... + MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message ); + } +`} + +# Implementable interface to get callback in defined methods +interface MonkeyActionCallable + fun wokeUp( sender:Monkey, message: Object) is abstract +end + +# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey) +extern class Monkey `{ CMonkey * `} + + new `{ + CMonkey *monkey = malloc( sizeof(CMonkey) ); + monkey->age = 10; + monkey->id = 1; + return monkey; + `} + + # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface + # Must be defined as Nit/C method because of C call inside + fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{ + + // Allocating memory to keep reference of received parameters : + // - Object receiver + // - Message + MonkeyAction *data = malloc( sizeof(MonkeyAction) ); + + // Incrementing reference counter to prevent from releasing + MonkeyActionCallable_incr_ref( toCall ); + Object_incr_ref( message ); + + data->toCall = toCall; + data->message = message; + + // Calling method which reproduce a callback by passing : + // - Receiver + // - Function pointer to object return method + // - Datas + cbMonkey( recv, &nit_monkey_callback_func, data ); + `} +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Implementation of circular lists +# This example shows the usage of generics and somewhat a specialisation of collections. +module circular_list + +# Sequences of elements implemented with a double-linked circular list +class CircularList[E] + # Like standard Array or LinkedList, CircularList is a Sequence. + super Sequence[E] + + # The first node of the list if any + # The special case of an empty list is handled by a null node + private var node: nullable CLNode[E] = null + + redef fun iterator do return new CircularListIterator[E](self) + + redef fun first do return self.node.item + + redef fun push(e) + do + var new_node = new CLNode[E](e) + var n = self.node + if n == null then + # the first node + self.node = new_node + else + # not the first one, so attach nodes correctly. + var old_last_node = n.prev + new_node.next = n + new_node.prev = old_last_node + old_last_node.next = new_node + n.prev = new_node + end + end + + redef fun pop + do + var n = self.node + assert n != null + var prev = n.prev + if prev == n then + # the only node + self.node = null + return n.item + end + # not the only one do detach nodes correctly. + var prev_prev = prev.prev + n.prev = prev_prev + prev_prev.next = n + return prev.item + end + + redef fun unshift(e) + do + # Circularity has benefits. + push(e) + self.node = self.node.prev + end + + redef fun shift + do + # Circularity has benefits. + self.node = self.node.next + return self.pop + end + + # Move the first at the last position, the second at the first, etc. + fun rotate + do + var n = self.node + if n == null then return + self.node = n.next + end + + # Sort the list using the Josephus algorithm. + fun josephus(step: Int) + do + var res = new CircularList[E] + while not self.is_empty do + # count 'step' + for i in [1..step[ do self.rotate + # kill + var x = self.shift + res.add(x) + end + self.node = res.node + end +end + +# Nodes of a CircularList +private class CLNode[E] + # The current item + var item: E + + # The next item in the circular list. + # Because of circularity, there is always a next; + # so by default let it be self + var next: CLNode[E] = self + + # The previous item in the circular list. + # Coherence between next and previous nodes has to be maintained by the + # circular list. + var prev: CLNode[E] = self +end + +# An iterator of a CircularList. +private class CircularListIterator[E] + super IndexedIterator[E] + + redef var index: Int + + # The current node pointed. + # Is null if the list is empty. + var node: nullable CLNode[E] + + # The list iterated. + var list: CircularList[E] + + redef fun is_ok + do + # Empty lists are not OK. + # Pointing again the first node is not OK. + return self.node != null and (self.index == 0 or self.node != self.list.node) + end + + redef fun next + do + self.node = self.node.next + self.index += 1 + end + + redef fun item do return self.node.item + + init(list: CircularList[E]) + do + self.node = list.node + self.list = list + self.index = 0 + end +end + +var i = new CircularList[Int] +i.add_all([1, 2, 3, 4, 5, 6, 7]) +print i.first +print i.join(":") + +i.push(8) +print i.shift +print i.pop +i.unshift(0) +print i.join(":") + +i.josephus(3) +print i.join(":") +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module beef up the clock module by allowing a clock to be comparable. +# It show the usage of class refinement +module clock_more + +import clock + +redef class Clock + # Clock are now comparable + super Comparable + + # Comparaison of a clock make only sense with an other clock + redef type OTHER: Clock + + redef fun <(o) + do + # Note: < is the only abstract method of Comparable. + # All other operators and methods rely on < and ==. + return self.total_minutes < o.total_minutes + end +end + +var c1 = new Clock(8, 12) +var c2 = new Clock(8, 13) +var c3 = new Clock(9, 13) + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" + +print "-" + +c1.minutes += 1 + +print "{c1}<{c2}? {c1{c2}? {c1>c2}" +print "{c1}>={c2}? {c1>=c2}" +print "{c1}<=>{c2}? {c1<=>c2}" +print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" +print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" +print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module provide a simple wall clock. +# It is an example of getters and setters. +# A beefed-up module is available in clock_more +module clock + +# A simple wall clock with 60 minutes and 12 hours. +class Clock + # total number of minutes from 0 to 719 + var total_minutes: Int + # Note: only the read acces is public, the write access is private. + + # number of minutes in the current hour (from 0 to 59) + fun minutes: Int do return self.total_minutes % 60 + + # set the number of minutes in the current hour. + # if m < 0 or m >= 60, the hour will be changed accordinlgy + fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m + + # number of hours (from 0 to 11) + fun hours: Int do return self.total_minutes / 60 + + # set the number of hours + # the minutes will not be updated + fun hours=(h: Int) do self.total_minutes = h * 60 + minutes + + # the position of the hour arrow in the [0..60[ interval + fun hour_pos: Int do return total_minutes / 12 + + # replace the arrow of hours (from 0 to 59). + # the hours and the minutes will be updated. + fun hour_pos=(h: Int) do self.total_minutes = h * 12 + + redef fun to_s do return "{hours}:{minutes}" + + fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes + + init(hours, minutes: Int) do self.reset(hours, minutes) + + redef fun ==(o) + do + # Note: o is a nullable Object, a type test is required + # Thanks to adaptive typing, there is no downcast + # i.e. the code is safe! + return o isa Clock and self.total_minutes == o.total_minutes + end +end + +var c = new Clock(10,50) +print "It's {c} o'clock." + +c.minutes += 22 +print "Now it's {c} o'clock." + +print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}." + +c.hours -= 2 +print "Now it's {c} o'clock." + +var c2 = new Clock(9, 11) +print "It's {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." +c2.minutes += 1 +print "It's now {c2} on the second clock." +print "The two clocks are synchronized: {c == c2}." +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample of the Curl module. +module curl_http + +import curl + +# Small class to represent an Http Fetcher +class MyHttpFetcher + super CurlCallbacks + + var curl: Curl + var our_body: String = "" + + init(curl: Curl) do self.curl = curl + + # Release curl object + fun destroy do self.curl.destroy + + # Header callback + redef fun header_callback(line: String) do + # We keep this callback silent for testing purposes + #if not line.has_prefix("Date:") then print "Header_callback : {line}" + end + + # Body callback + redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}" + + # Stream callback - Cf : No one is registered + redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}" +end + + +# Program +if args.length < 2 then + print "Usage: curl_http " +else + var curl = new Curl + var url = args[1] + var request = new CurlHTTPRequest(url, curl) + + # HTTP Get Request + if args[0] == "GET" then + request.verbose = false + var getResponse = request.execute + + if getResponse isa CurlResponseSuccess then + print "Status code : {getResponse.status_code}" + print "Body : {getResponse.body_str}" + else if getResponse isa CurlResponseFailed then + print "Error code : {getResponse.error_code}" + print "Error msg : {getResponse.error_msg}" + end + + # HTTP Post Request + else if args[0] == "POST" then + var myHttpFetcher = new MyHttpFetcher(curl) + request.delegate = myHttpFetcher + + var postDatas = new HeaderMap + postDatas["Bugs Bunny"] = "Daffy Duck" + postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*" + postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one" + request.datas = postDatas + request.verbose = false + var postResponse = request.execute + + print "Our body from the callback : {myHttpFetcher.our_body}" + + if postResponse isa CurlResponseSuccess then + print "*** Answer ***" + print "Status code : {postResponse.status_code}" + print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}" + else if postResponse isa CurlResponseFailed then + print "Error code : {postResponse.error_code}" + print "Error msg : {postResponse.error_msg}" + end + + # HTTP Get to file Request + else if args[0] == "GET_FILE" then + var headers = new HeaderMap + headers["Accept"] = "Moo" + request.headers = headers + request.verbose = false + var downloadResponse = request.download_to_file(null) + + if downloadResponse isa CurlFileResponseSuccess then + print "*** Answer ***" + print "Status code : {downloadResponse.status_code}" + print "Size downloaded : {downloadResponse.size_download}" + else if downloadResponse isa CurlResponseFailed then + print "Error code : {downloadResponse.error_code}" + print "Error msg : {downloadResponse.error_msg}" + end + # Program logic + else + print "Usage : Method[POST, GET, GET_FILE]" + end +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Mail sender sample using the Curl module +module curl_mail + +import curl + +var curl = new Curl +var mail_request = new CurlMailRequest(curl) + +# Networks +var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword") +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +end + +# Headers +mail_request.from = "Billy Bob" +mail_request.to = ["user@example.org"] +mail_request.cc = ["bob@example.org"] +mail_request.bcc = null + +var headers_body = new HeaderMap +headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\"" +headers_body["Content-Transfer-Encoding:"] = "quoted-printable" +mail_request.headers_body = headers_body + +# Content +mail_request.body = "

Here you can write HTML stuff.

" +mail_request.subject = "Hello From My Nit Program" + +# Others +mail_request.verbose = false + +# Send mail +response = mail_request.execute +if response isa CurlResponseFailed then + print "Error code : {response.error_code}" + print "Error msg : {response.error_msg}" +else if response isa CurlMailResponseSuccess then + print "Mail Sent" +else + print "Unknown Curl Response type" +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Draws an arithmetic operation to the terminal +module draw_operation + +redef enum Int + fun n_chars: Int `{ + int c; + if ( abs(recv) >= 10 ) + c = 1+(int)log10f( (float)abs(recv) ); + else + c = 1; + if ( recv < 0 ) c ++; + return c; + `} +end + +redef enum Char + fun as_operator(a, b: Int): Int + do + if self == '+' then return a + b + if self == '-' then return a - b + if self == '*' then return a * b + if self == '/' then return a / b + if self == '%' then return a % b + abort + end + + fun override_dispc: Bool + do + return self == '+' or self == '-' or self == '*' or self == '/' or self == '%' + end + + fun lines(s: Int): Array[Line] + do + if self == '+' then + return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)] + else if self == '-' then + return [new Line(new P(0,s/2),1,0,s)] + else if self == '*' then + var lines = new Array[Line] + for y in [1..s-1[ do + lines.add( new Line(new P(1,y), 1,0,s-2) ) + end + return lines + else if self == '/' then + return [new Line(new P(s-1,0), -1,1, s )] + else if self == '%' then + var q4 = s/4 + var lines = [new Line(new P(s-1,0),-1,1,s)] + for l in [0..q4[ do + lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ]) + end + return lines + else if self == '1' then + return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s), + new Line( new P(s/2,0),-1,1,s/2)] + else if self == '2' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '3' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)] + else if self == '4' then + return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '5' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '6' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '7' then + var tl = new P(0,0) + var tr = new P(s-1,0) + return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)] + else if self == '8' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), + new Line( new P(0,s/2), 1,0,s)] + else if self == '9' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), + new Line( new P(0,s/2), 1,0,s)] + else if self == '0' then + return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), + new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)] + end + return new Array[Line] + end +end + +class P + var x : Int + var y : Int +end + +redef class String + # hack is to support a bug in the evaluation software + fun draw(dispc: Char, size, gap: Int, hack: Bool) + do + var w = size * length +(length-1)*gap + var h = size + var map = new Array[Array[Char]] + for x in [0..w[ do + map[x] = new Array[Char].filled_with( ' ', h ) + end + + var ci = 0 + for c in self.chars do + var local_dispc + if c.override_dispc then + local_dispc = c + else + local_dispc = dispc + end + + var lines = c.lines( size ) + for line in lines do + var x = line.o.x+ci*size + x += ci*gap + var y = line.o.y + for s in [0..line.len[ do + assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}" + map[x][y] = local_dispc + x += line.step_x + y += line.step_y + end + end + + ci += 1 + end + + if hack then + for c in [0..size[ do + map[c][0] = map[map.length-size+c][0] + map[map.length-size+c][0] = ' ' + end + end + + for y in [0..h[ do + for x in [0..w[ do + printn map[x][y] + end + print "" + end + end +end + +class Line + var o : P + var step_x : Int + var step_y : Int + var len : Int +end + +var a +var b +var op_char +var disp_char +var disp_size +var disp_gap + +if "NIT_TESTING".environ == "true" then + a = 567 + b = 13 + op_char = '*' + disp_char = 'O' + disp_size = 8 + disp_gap = 1 +else + printn "Left operand: " + a = gets.to_i + + printn "Right operand: " + b = gets.to_i + + printn "Operator (+, -, *, /, %): " + op_char = gets.chars[0] + + printn "Char to display: " + disp_char = gets.chars[0] + + printn "Size of text: " + disp_size = gets.to_i + + printn "Space between digits: " + disp_gap = gets.to_i +end + +var result = op_char.as_operator( a, b ) + +var len_a = a.n_chars +var len_b = b.n_chars +var len_res = result.n_chars +var max_len = len_a.max( len_b.max( len_res ) ) + 1 + +# draw first line +var d = max_len - len_a +var line_a = "" +for i in [0..d[ do line_a += " " +line_a += a.to_s +line_a.draw( disp_char, disp_size, disp_gap, false ) + +print "" +# draw second line +d = max_len - len_b-1 +var line_b = op_char.to_s +for i in [0..d[ do line_b += " " +line_b += b.to_s +line_b.draw( disp_char, disp_size, disp_gap, false ) + +# draw ----- +print "" +for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do + printn "_" +end +print "" +print "" + +# draw result +d = max_len - len_res +var line_res = "" +for i in [0..d[ do line_res += " " +line_res += result.to_s +line_res.draw( disp_char, disp_size, disp_gap, false ) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Example using the privileges module to drop privileges from root +module drop_privileges + +import privileges + +# basic command line options +var opts = new OptionContext +var opt_ug = new OptionUserAndGroup.for_dropping_privileges +opt_ug.mandatory = true +opts.add_option(opt_ug) + +# parse and check command line options +opts.parse(args) +if not opts.errors.is_empty then + print opts.errors + print "Usage: drop_privileges [options]" + opts.usage + exit 1 +end + +# original user +print "before {sys.uid}:{sys.gid}" + +# make the switch +var user_group = opt_ug.value +assert user_group != null +user_group.drop_privileges + +# final user +print "after {sys.uid}:{sys.egid}" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2012-2013 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# This module illustrates some uses of the FFI, specifically +# how to use extern methods. Which means to implement a Nit method in C. +module extern_methods + +redef enum Int + # Returns self'th fibonnaci number + # implemented here in C for optimization purposes + fun fib : Int import fib `{ + if ( recv < 2 ) + return recv; + else + return Int_fib( recv-1 ) + Int_fib( recv-2 ); + `} + + # System call to sleep for "self" seconds + fun sleep `{ + sleep( recv ); + `} + + # Return atan2l( self, x ) from libmath + fun atan_with( x : Int ) : Float `{ + return atan2( recv, x ); + `} + + # This method callback to Nit methods from C code + # It will use from C code: + # * the local fib method + # * the + operator, a method of Int + # * to_s, a method of all objects + # * String.to_cstring, a method of String to return an equivalent char* + fun foo import fib, +, to_s, String.to_cstring `{ + long recv_fib = Int_fib( recv ); + long recv_plus_fib = Int__plus( recv, recv_fib ); + + String nit_string = Int_to_s( recv_plus_fib ); + char *c_string = String_to_cstring( nit_string ); + + printf( "from C: self + fib(self) = %s\n", c_string ); + `} + + # Equivalent to foo but written in pure Nit + fun bar do print "from Nit: self + fib(self) = {self+self.fib}" +end + +print 12.fib + +print "sleeping 1 second..." +1.sleep + +print 100.atan_with( 200 ) +8.foo +8.bar + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A simple exemple of refinement where a method is added to the integer class. +module fibonacci + +redef class Int + # Calculate the self-th element of the fibonacci sequence. + fun fibonacci: Int + do + if self < 2 then + return 1 + else + return (self-2).fibonacci + (self-1).fibonacci + end + end +end + +# Print usage and exit. +fun usage +do + print "Usage: fibonnaci " + exit 0 +end + +# Main part +if args.length != 1 then + usage +end +print args.first.to_i.fibonacci +print "hello world" +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import html + +class NitHomepage + super HTMLPage + + redef fun head do + add("meta").attr("charset", "utf-8") + add("title").text("Nit") + add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css") + add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css") + end + + redef fun body do + open("article").add_class("page") + open("section").add_class("pageheader") + add_html("theNitProgramming Language") + open("header").add_class("header") + open("div").add_class("topsubtitle") + add("p").text("A Fun Language for Serious Programming") + close("div") + close("header") + close("section") + + open("div").attr("id", "pagebody") + open("section").attr("id", "content") + add("h1").text("# What is Nit?") + add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.") + add("p").text("So, what does the famous hello world program look like, in Nit?") + add_html("
print 'Hello, World!'
") + + add("h1").text("# Feature Highlights") + add("h2").text("Usability") + add("p").text("Nit's goal is to be usable by real programmers for real projects") + + open("ul") + open("li") + add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle") + close("li") + add("li").text("Script-like language without verbosity nor cryptic statements") + add("li").text("Painless static types: static typing should help programmers") + add("li").text("Efficient development, efficient execution, efficient evolution.") + close("ul") + + add("h2").text("Robustness") + add("p").text("Nit will help you to write bug-free programs") + + open("ul") + add("li").text("Strong static typing") + add("li").text("No more NullPointerException") + close("ul") + + add("h2").text("Object-Oriented") + add("p").text("Nit's guideline is to follow the most powerful OO principles") + + open("ul") + open("li") + add("a").attr("href", "./everything_is_an_object/").text("Everything is an object") + close("li") + open("li") + add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance") + close("li") + open("li") + add("a").attr("href", "./refinement/").text("Open classes") + close("li") + open("li") + add("a").attr("href", "./virtual_types/").text("Virtual types") + close("li") + close("ul") + + + add("h1").text("# Getting Started") + add("p").text("Get Nit from its Git repository:") + + add_html("
$ git clone http://nitlanguage.org/nit.git
") + add("p").text("Build the compiler (may be long):") + add_html("
$ cd nit\n")
+					add_html("$ make
") + add("p").text("Compile a program:") + add_html("
$ bin/nitc examples/hello_world.nit
") + add("p").text("Execute the program:") + add_html("
$ ./hello_world
") + close("section") + close("div") + close("article") + end +end + +var page = new NitHomepage +page.write_to stdout +page.write_to_file("nit.html") +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# An example that defines and uses stacks of integers. +# The implementation is done with a simple linked list. +# It features: free constructors, nullable types and some adaptive typing. +module int_stack + +# A stack of integer implemented by a simple linked list. +# Note that this is only a toy class since a real linked list will gain to use +# generics and extends interfaces, like Collection, from the standard library. +class IntStack + # The head node of the list. + # Null means that the stack is empty. + private var head: nullable ISNode = null + + # Add a new integer in the stack. + fun push(val: Int) + do + self.head = new ISNode(val, self.head) + end + + # Remove and return the last pushed integer. + # Return null if the stack is empty. + fun pop: nullable Int + do + var head = self.head + if head == null then return null + # Note: the followings are statically safe because of the + # previous 'if'. + var val = head.val + self.head = head.next + return val + end + + # Return the sum of all integers of the stack. + # Return 0 if the stack is empty. + fun sumall: Int + do + var sum = 0 + var cur = self.head + while cur != null do + # Note: the followings are statically safe because of + # the condition of the 'while'. + sum += cur.val + cur = cur.next + end + return sum + end + + # Note: Because all attributes have a default value, a free constructor + # "init()" is implicitly defined. +end + +# A node of a IntStack +private class ISNode + # The integer value stored in the node. + var val: Int + + # The next node, if any. + var next: nullable ISNode + + # Note: A free constructor "init(val: Int, next: nullable ISNode)" is + # implicitly defined. +end + +var l = new IntStack +l.push(1) +l.push(2) +l.push(3) + +print l.sumall + +# Note: the 'for' control structure cannot be used on IntStack in its current state. +# It requires a more advanced topic. +# However, why not using the 'loop' control structure? +loop + var i = l.pop + if i == null then break + # The following is statically safe because of the previous 'if'. + print i * 10 +end + +# Note: 'or else' is used to give an alternative of a null expression. +l.push(5) +print l.pop or else 0 # l.pop gives 5, so print 5 +print l.pop or else 0 # l.pop gives null, so print the alternative: 0 + + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Alexis Laferrière +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide. +# +# Code reference: +# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c +module opengles2_hello_triangle + +import glesv2 +import egl +import mnit_linux # for sdl +import x11 + +if "NIT_TESTING".environ == "true" then exit(0) + +var window_width = 800 +var window_height = 600 + +# +## SDL +# +var sdl_display = new SDLDisplay(window_width, window_height) +var sdl_wm_info = new SDLSystemWindowManagerInfo +var x11_window_handle = sdl_wm_info.x11_window_handle + +# +## X11 +# +var x_display = x_open_default_display +assert x_display != 0 else print "x11 fail" + +# +## EGL +# +var egl_display = new EGLDisplay(x_display) +assert egl_display.is_valid else print "EGL display is not valid" +egl_display.initialize + +print "EGL version: {egl_display.version}" +print "EGL vendor: {egl_display.vendor}" +print "EGL extensions: {egl_display.extensions.join(", ")}" +print "EGL client APIs: {egl_display.client_apis.join(", ")}" + +assert egl_display.is_valid else print egl_display.error + +var config_chooser = new EGLConfigChooser +#config_chooser.surface_type_egl +config_chooser.blue_size = 8 +config_chooser.green_size = 8 +config_chooser.red_size = 8 +#config_chooser.alpha_size = 8 +#config_chooser.depth_size = 8 +#config_chooser.stencil_size = 8 +#config_chooser.sample_buffers = 1 +config_chooser.close + +var configs = config_chooser.choose(egl_display) +assert configs != null else print "choosing config failed: {egl_display.error}" +assert not configs.is_empty else print "no EGL config" + +print "{configs.length} EGL configs available" +for config in configs do + var attribs = config.attribs(egl_display) + print "* caveats: {attribs.caveat}" + print " conformant to: {attribs.conformant}" + print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}" + print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}" +end + +var config = configs.first + +var format = config.attribs(egl_display).native_visual_id + +# TODO android part +# Opengles1Display_midway_init(recv, format); + +var surface = egl_display.create_window_surface(config, x11_window_handle, [0]) +assert surface.is_ok else print egl_display.error + +var context = egl_display.create_context(config) +assert context.is_ok else print egl_display.error + +var make_current_res = egl_display.make_current(surface, surface, context) +assert make_current_res + +var width = surface.attribs(egl_display).width +var height = surface.attribs(egl_display).height +print "Width: {width}" +print "Height: {height}" + +assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}" + +# +## GLESv2 +# + +print "Can compile shaders? {gl_shader_compiler}" +assert_no_gl_error + +assert gl_shader_compiler else print "Cannot compile shaders" + +# gl program +print gl_error.to_s +var program = new GLProgram +if not program.is_ok then + print "Program is not ok: {gl_error.to_s}\nLog:" + print program.info_log + abort +end +assert_no_gl_error + +# vertex shader +var vertex_shader = new GLVertexShader +assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}" +vertex_shader.source = """ +attribute vec4 vPosition; +void main() +{ + gl_Position = vPosition; +} """ +vertex_shader.compile +assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}" +assert_no_gl_error + +# fragment shader +var fragment_shader = new GLFragmentShader +assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}" +fragment_shader.source = """ +precision mediump float; +void main() +{ + gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); +} +""" +fragment_shader.compile +assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}" +assert_no_gl_error + +program.attach_shader vertex_shader +program.attach_shader fragment_shader +program.bind_attrib_location(0, "vPosition") +program.link +assert program.is_linked else print "Linking failed: {program.info_log}" +assert_no_gl_error + +# draw! +var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0] +var vertex_array = new VertexArray(0, 3, vertices) +vertex_array.attrib_pointer +gl_clear_color(0.5, 0.0, 0.5, 1.0) +for i in [0..10000[ do + printn "." + assert_no_gl_error + gl_viewport(0, 0, width, height) + gl_clear_color_buffer + program.use + vertex_array.enable + vertex_array.draw_arrays_triangles + egl_display.swap_buffers(surface) +end + +# delete +program.delete +vertex_shader.delete +fragment_shader.delete + +# +## EGL +# +# close +egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none) +egl_display.destroy_context(context) +egl_display.destroy_surface(surface) + +# +## SDL +# +# close +sdl_display.destroy +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# How to print arguments of the command line. +module print_arguments + +for a in args do + print a +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2004-2008 Jean Privat +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A procedural program (without explicit class definition). +# This program manipulates arrays of integers. +module procedural_array + +# The sum of the elements of `a'. +# Uses a 'for' control structure. +fun array_sum(a: Array[Int]): Int +do + var sum = 0 + for i in a do + sum = sum + i + end + return sum +end + +# The sum of the elements of `a' (alternative version). +# Uses a 'while' control structure. +fun array_sum_alt(a: Array[Int]): Int +do + var sum = 0 + var i = 0 + while i < a.length do + sum = sum + a[i] + i = i + 1 + end + return sum +end + +# The main part of the program. +var a = [10, 5, 8, 9] +print(array_sum(a)) +print(array_sum_alt(a)) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Client sample using the Socket module which connect to the server sample. +module socket_client + +import socket + +if args.length < 2 then + print "Usage : socket_client " + return +end + +var s = new Socket.client(args[0], args[1].to_i) +print "[HOST ADDRESS] : {s.address}" +print "[HOST] : {s.host}" +print "[PORT] : {s.port}" +print "Connecting ... {s.connected}" +if s.connected then + print "Writing ... Hello server !" + s.write("Hello server !") + print "[Response from server] : {s.read(100)}" + print "Closing ..." + s.close +end +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2013 Matthieu Lucas +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Server sample using the Socket module which allow client to connect +module socket_server + +import socket + +if args.is_empty then + print "Usage : socket_server " + return +end + +var socket = new Socket.server(args[0].to_i, 1) +print "[PORT] : {socket.port.to_s}" + +var clients = new Array[Socket] +var max = socket +loop + var fs = new SocketObserver(true, true, true) + fs.readset.set(socket) + + for c in clients do fs.readset.set(c) + + if fs.select(max, 4, 0) == 0 then + print "Error occured in select {sys.errno.strerror}" + break + end + + if fs.readset.is_set(socket) then + var ns = socket.accept + print "Accepting {ns.address} ... " + print "[Message from {ns.address}] : {ns.read(100)}" + ns.write("Goodbye client.") + print "Closing {ns.address} ..." + ns.close + end +end + +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import template + +### Here, definition of the specific templates + +# The root template for composers +class TmplComposers + super Template + + # Short list of composers + var composers = new Array[TmplComposer] + + # Detailled list of composers + var composer_details = new Array[TmplComposerDetail] + + # Add a composer in both lists + fun add_composer(firstname, lastname: String, birth, death: Int) + do + composers.add(new TmplComposer(lastname)) + composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death)) + end + + redef fun rendering do + add """ +COMPOSERS +========= +""" + add_all composers + add """ + +DETAILS +======= +""" + add_all composer_details + end +end + +# A composer in the short list of composers +class TmplComposer + super Template + + # Short name + var name: String + + init(name: String) do self.name = name + + redef fun rendering do add "- {name}\n" +end + +# A composer in the detailled list of composers +class TmplComposerDetail + super Template + + var firstname: String + var lastname: String + var birth: Int + var death: Int + + init(firstname, lastname: String, birth, death: Int) do + self.firstname = firstname + self.lastname = lastname + self.birth = birth + self.death = death + end + + redef fun rendering do add """ + +COMPOSER: {{{firstname}}} {{{lastname}}} +BIRTH...: {{{birth}}} +DEATH...: {{{death}}} +""" + +end + +### Here a simple usage of the templates + +var f = new TmplComposers +f.add_composer("Johann Sebastian", "Bach", 1685, 1750) +f.add_composer("George Frideric", "Handel", 1685, 1759) +f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791) +f.write_to(stdout) +# This file is part of NIT ( http://www.nitlanguage.org ). +# +# Copyright 2014 Lucas Bajolet +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Sample module for a minimal chat server using Websockets on port 8088 +module websocket_server + +import websocket + +var sock = new WebSocket(8088, 1) + +var msg: String + +if sock.listener.eof then + print sys.errno.strerror +end + +sock.accept + +while not sock.listener.eof do + if not sock.connected then sock.accept + if sys.stdin.poll_in then + msg = gets + printn "Received message : {msg}" + if msg == "exit" then sock.close + if msg == "disconnect" then sock.disconnect_client + sock.write(msg) + end + if sock.can_read(10) then + msg = sock.read_line + if msg != "" then print msg + end +end + diff --git a/tests/examplefiles/calculator.nit b/tests/examplefiles/calculator.nit deleted file mode 100644 index 541f4d28..00000000 --- a/tests/examplefiles/calculator.nit +++ /dev/null @@ -1,272 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Alexis Laferrière -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import gtk - -class CalculatorContext - var result : nullable Float = null - - var last_op : nullable Char = null - - var current : nullable Float = null - var after_point : nullable Int = null - - fun push_op( op : Char ) - do - apply_last_op_if_any - if op == 'C' then - self.result = 0.0 - last_op = null - else - last_op = op # store for next push_op - end - - # prepare next current - after_point = null - current = null - end - - fun push_digit( digit : Int ) - do - var current = current - if current == null then current = 0.0 - - var after_point = after_point - if after_point == null then - current = current * 10.0 + digit.to_f - else - current = current + digit.to_f * 10.0.pow(after_point.to_f) - self.after_point -= 1 - end - - self.current = current - end - - fun switch_to_decimals - do - if self.current == null then current = 0.0 - if after_point != null then return - - after_point = -1 - end - - fun apply_last_op_if_any - do - var op = last_op - - var result = result - if result == null then result = 0.0 - - var current = current - if current == null then current = 0.0 - - if op == null then - result = current - else if op == '+' then - result = result + current - else if op == '-' then - result = result - current - else if op == '/' then - result = result / current - else if op == '*' then - result = result * current - end - self.result = result - self.current = null - end -end - -class CalculatorGui - super GtkCallable - - var win : GtkWindow - var container : GtkGrid - - var lbl_disp : GtkLabel - var but_eq : GtkButton - var but_dot : GtkButton - - var context = new CalculatorContext - - redef fun signal( sender, user_data ) - do - var after_point = context.after_point - if after_point == null then - after_point = 0 - else - after_point = (after_point.abs) - end - - if user_data isa Char then # is an operation - var c = user_data - if c == '.' then - but_dot.sensitive= false - context.switch_to_decimals - lbl_disp.text = "{context.current.to_i}." - else - but_dot.sensitive= true - context.push_op( c ) - - var s = context.result.to_precision_native(6) - var index : nullable Int = null - for i in s.length.times do - var chiffre = s.chars[i] - if chiffre == '0' and index == null then - index = i - else if chiffre != '0' then - index = null - end - end - if index != null then - s = s.substring(0, index) - if s.chars[s.length-1] == ',' then s = s.substring(0, s.length-1) - end - lbl_disp.text = s - end - else if user_data isa Int then # is a number - var n = user_data - context.push_digit( n ) - lbl_disp.text = context.current.to_precision_native(after_point) - end - end - - init - do - init_gtk - - win = new GtkWindow( 0 ) - - container = new GtkGrid(5,5,true) - win.add( container ) - - lbl_disp = new GtkLabel( "_" ) - container.attach( lbl_disp, 0, 0, 5, 1 ) - - # digits - for n in [0..9] do - var but = new GtkButton.with_label( n.to_s ) - but.request_size( 64, 64 ) - but.signal_connect( "clicked", self, n ) - if n == 0 then - container.attach( but, 0, 4, 1, 1 ) - else container.attach( but, (n-1)%3, 3-(n-1)/3, 1, 1 ) - end - - # operators - var r = 1 - for op in ['+', '-', '*', '/' ] do - var but = new GtkButton.with_label( op.to_s ) - but.request_size( 64, 64 ) - but.signal_connect( "clicked", self, op ) - container.attach( but, 3, r, 1, 1 ) - r+=1 - end - - # = - but_eq = new GtkButton.with_label( "=" ) - but_eq.request_size( 64, 64 ) - but_eq.signal_connect( "clicked", self, '=' ) - container.attach( but_eq, 4, 3, 1, 2 ) - - # . - but_dot = new GtkButton.with_label( "." ) - but_dot.request_size( 64, 64 ) - but_dot.signal_connect( "clicked", self, '.' ) - container.attach( but_dot, 1, 4, 1, 1 ) - - #C - var but_c = new GtkButton.with_label( "C" ) - but_c.request_size( 64, 64 ) - but_c.signal_connect("clicked", self, 'C') - container.attach( but_c, 2, 4, 1, 1 ) - - win.show_all - end -end - -# context tests -var context = new CalculatorContext -context.push_digit( 1 ) -context.push_digit( 2 ) -context.push_op( '+' ) -context.push_digit( 3 ) -context.push_op( '*' ) -context.push_digit( 2 ) -context.push_op( '=' ) -var r = context.result.to_precision( 2 ) -assert r == "30.00" else print r - -context = new CalculatorContext -context.push_digit( 1 ) -context.push_digit( 4 ) -context.switch_to_decimals -context.push_digit( 1 ) -context.push_op( '*' ) -context.push_digit( 3 ) -context.push_op( '=' ) -r = context.result.to_precision( 2 ) -assert r == "42.30" else print r - -context.push_op( '+' ) -context.push_digit( 1 ) -context.push_digit( 1 ) -context.push_op( '=' ) -r = context.result.to_precision( 2 ) -assert r == "53.30" else print r - -context = new CalculatorContext -context.push_digit( 4 ) -context.push_digit( 2 ) -context.switch_to_decimals -context.push_digit( 3 ) -context.push_op( '/' ) -context.push_digit( 3 ) -context.push_op( '=' ) -r = context.result.to_precision( 2 ) -assert r == "14.10" else print r - -#test multiple decimals -context = new CalculatorContext -context.push_digit( 5 ) -context.push_digit( 0 ) -context.switch_to_decimals -context.push_digit( 1 ) -context.push_digit( 2 ) -context.push_digit( 3 ) -context.push_op( '+' ) -context.push_digit( 1 ) -context.push_op( '=' ) -r = context.result.to_precision( 3 ) -assert r == "51.123" else print r - -#test 'C' button -context = new CalculatorContext -context.push_digit( 1 ) -context.push_digit( 0 ) -context.push_op( '+' ) -context.push_digit( 1 ) -context.push_digit( 0 ) -context.push_op( '=' ) -context.push_op( 'C' ) -r = context.result.to_precision( 1 ) -assert r == "0.0" else print r - -# graphical application - -if "NIT_TESTING".environ != "true" then - var app = new CalculatorGui - run_gtk -end diff --git a/tests/examplefiles/callback_chimpanze.nit b/tests/examplefiles/callback_chimpanze.nit deleted file mode 100644 index 2ca8dc3a..00000000 --- a/tests/examplefiles/callback_chimpanze.nit +++ /dev/null @@ -1,45 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This sample has been implemented to show you how simple is it to play -# with native callbacks (C) through an high level with NIT program. - -module callback_chimpanze -import callback_monkey - -class Chimpanze - super MonkeyActionCallable - - fun create - do - var monkey = new Monkey - print "Hum, I'm sleeping ..." - # Invoking method which will take some time to compute, and - # will be back in wokeUp method with information. - # - Callback method defined in MonkeyActionCallable Interface - monkey.wokeUpAction(self, "Hey, I'm awake.") - end - - # Inherit callback method, defined by MonkeyActionCallable interface - # - Back of wokeUpAction method - redef fun wokeUp( sender:Monkey, message:Object ) - do - print message - end -end - -var m = new Chimpanze -m.create diff --git a/tests/examplefiles/callback_monkey.nit b/tests/examplefiles/callback_monkey.nit deleted file mode 100644 index 6e1ed262..00000000 --- a/tests/examplefiles/callback_monkey.nit +++ /dev/null @@ -1,92 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This sample has been implemented to show you how simple is it to play -# with native callbacks (C) through an high level with NIT program. - -module callback_monkey - -in "C header" `{ - #include - #include - - typedef struct { - int id; - int age; - } CMonkey; - - typedef struct { - MonkeyActionCallable toCall; - Object message; - } MonkeyAction; -`} - -in "C body" `{ - // Method which reproduce a callback answer - // Please note that a function pointer is only used to reproduce the callback - void cbMonkey(CMonkey *mkey, void callbackFunc(CMonkey*, MonkeyAction*), MonkeyAction *data) - { - sleep(2); - callbackFunc( mkey, data ); - } - - // Back of background treatment, will be redirected to callback function - void nit_monkey_callback_func( CMonkey *mkey, MonkeyAction *data ) - { - // To call a your method, the signature must be written like this : - // _... - MonkeyActionCallable_wokeUp( data->toCall, mkey, data->message ); - } -`} - -# Implementable interface to get callback in defined methods -interface MonkeyActionCallable - fun wokeUp( sender:Monkey, message: Object) is abstract -end - -# Defining my object type Monkey, which is, in a low level, a pointer to a C struct (CMonkey) -extern class Monkey `{ CMonkey * `} - - new `{ - CMonkey *monkey = malloc( sizeof(CMonkey) ); - monkey->age = 10; - monkey->id = 1; - return monkey; - `} - - # Object method which will get a callback in wokeUp method, defined in MonkeyActionCallable interface - # Must be defined as Nit/C method because of C call inside - fun wokeUpAction( toCall: MonkeyActionCallable, message: Object ) is extern import MonkeyActionCallable.wokeUp `{ - - // Allocating memory to keep reference of received parameters : - // - Object receiver - // - Message - MonkeyAction *data = malloc( sizeof(MonkeyAction) ); - - // Incrementing reference counter to prevent from releasing - MonkeyActionCallable_incr_ref( toCall ); - Object_incr_ref( message ); - - data->toCall = toCall; - data->message = message; - - // Calling method which reproduce a callback by passing : - // - Receiver - // - Function pointer to object return method - // - Datas - cbMonkey( recv, &nit_monkey_callback_func, data ); - `} -end diff --git a/tests/examplefiles/circular_list.nit b/tests/examplefiles/circular_list.nit deleted file mode 100644 index c3ba1edb..00000000 --- a/tests/examplefiles/circular_list.nit +++ /dev/null @@ -1,167 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Implementation of circular lists -# This example shows the usage of generics and somewhat a specialisation of collections. -module circular_list - -# Sequences of elements implemented with a double-linked circular list -class CircularList[E] - # Like standard Array or LinkedList, CircularList is a Sequence. - super Sequence[E] - - # The first node of the list if any - # The special case of an empty list is handled by a null node - private var node: nullable CLNode[E] = null - - redef fun iterator do return new CircularListIterator[E](self) - - redef fun first do return self.node.item - - redef fun push(e) - do - var new_node = new CLNode[E](e) - var n = self.node - if n == null then - # the first node - self.node = new_node - else - # not the first one, so attach nodes correctly. - var old_last_node = n.prev - new_node.next = n - new_node.prev = old_last_node - old_last_node.next = new_node - n.prev = new_node - end - end - - redef fun pop - do - var n = self.node - assert n != null - var prev = n.prev - if prev == n then - # the only node - self.node = null - return n.item - end - # not the only one do detach nodes correctly. - var prev_prev = prev.prev - n.prev = prev_prev - prev_prev.next = n - return prev.item - end - - redef fun unshift(e) - do - # Circularity has benefits. - push(e) - self.node = self.node.prev - end - - redef fun shift - do - # Circularity has benefits. - self.node = self.node.next - return self.pop - end - - # Move the first at the last position, the second at the first, etc. - fun rotate - do - var n = self.node - if n == null then return - self.node = n.next - end - - # Sort the list using the Josephus algorithm. - fun josephus(step: Int) - do - var res = new CircularList[E] - while not self.is_empty do - # count 'step' - for i in [1..step[ do self.rotate - # kill - var x = self.shift - res.add(x) - end - self.node = res.node - end -end - -# Nodes of a CircularList -private class CLNode[E] - # The current item - var item: E - - # The next item in the circular list. - # Because of circularity, there is always a next; - # so by default let it be self - var next: CLNode[E] = self - - # The previous item in the circular list. - # Coherence between next and previous nodes has to be maintained by the - # circular list. - var prev: CLNode[E] = self -end - -# An iterator of a CircularList. -private class CircularListIterator[E] - super IndexedIterator[E] - - redef var index: Int - - # The current node pointed. - # Is null if the list is empty. - var node: nullable CLNode[E] - - # The list iterated. - var list: CircularList[E] - - redef fun is_ok - do - # Empty lists are not OK. - # Pointing again the first node is not OK. - return self.node != null and (self.index == 0 or self.node != self.list.node) - end - - redef fun next - do - self.node = self.node.next - self.index += 1 - end - - redef fun item do return self.node.item - - init(list: CircularList[E]) - do - self.node = list.node - self.list = list - self.index = 0 - end -end - -var i = new CircularList[Int] -i.add_all([1, 2, 3, 4, 5, 6, 7]) -print i.first -print i.join(":") - -i.push(8) -print i.shift -print i.pop -i.unshift(0) -print i.join(":") - -i.josephus(3) -print i.join(":") diff --git a/tests/examplefiles/clock.nit b/tests/examplefiles/clock.nit deleted file mode 100644 index 8fdb9abd..00000000 --- a/tests/examplefiles/clock.nit +++ /dev/null @@ -1,78 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This module provide a simple wall clock. -# It is an example of getters and setters. -# A beefed-up module is available in clock_more -module clock - -# A simple wall clock with 60 minutes and 12 hours. -class Clock - # total number of minutes from 0 to 719 - var total_minutes: Int - # Note: only the read acces is public, the write access is private. - - # number of minutes in the current hour (from 0 to 59) - fun minutes: Int do return self.total_minutes % 60 - - # set the number of minutes in the current hour. - # if m < 0 or m >= 60, the hour will be changed accordinlgy - fun minutes=(m: Int) do self.total_minutes = self.hours * 60 + m - - # number of hours (from 0 to 11) - fun hours: Int do return self.total_minutes / 60 - - # set the number of hours - # the minutes will not be updated - fun hours=(h: Int) do self.total_minutes = h * 60 + minutes - - # the position of the hour arrow in the [0..60[ interval - fun hour_pos: Int do return total_minutes / 12 - - # replace the arrow of hours (from 0 to 59). - # the hours and the minutes will be updated. - fun hour_pos=(h: Int) do self.total_minutes = h * 12 - - redef fun to_s do return "{hours}:{minutes}" - - fun reset(hours, minutes: Int) do self.total_minutes = hours*60 + minutes - - init(hours, minutes: Int) do self.reset(hours, minutes) - - redef fun ==(o) - do - # Note: o is a nullable Object, a type test is required - # Thanks to adaptive typing, there is no downcast - # i.e. the code is safe! - return o isa Clock and self.total_minutes == o.total_minutes - end -end - -var c = new Clock(10,50) -print "It's {c} o'clock." - -c.minutes += 22 -print "Now it's {c} o'clock." - -print "The short arrow in on the {c.hour_pos/5} and the long arrow in on the {c.minutes/5}." - -c.hours -= 2 -print "Now it's {c} o'clock." - -var c2 = new Clock(9, 11) -print "It's {c2} on the second clock." -print "The two clocks are synchronized: {c == c2}." -c2.minutes += 1 -print "It's now {c2} on the second clock." -print "The two clocks are synchronized: {c == c2}." diff --git a/tests/examplefiles/clock_more.nit b/tests/examplefiles/clock_more.nit deleted file mode 100644 index d2ef89e2..00000000 --- a/tests/examplefiles/clock_more.nit +++ /dev/null @@ -1,60 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This module beef up the clock module by allowing a clock to be comparable. -# It show the usage of class refinement -module clock_more - -import clock - -redef class Clock - # Clock are now comparable - super Comparable - - # Comparaison of a clock make only sense with an other clock - redef type OTHER: Clock - - redef fun <(o) - do - # Note: < is the only abstract method of Comparable. - # All other operators and methods rely on < and ==. - return self.total_minutes < o.total_minutes - end -end - -var c1 = new Clock(8, 12) -var c2 = new Clock(8, 13) -var c3 = new Clock(9, 13) - -print "{c1}<{c2}? {c1{c2}? {c1>c2}" -print "{c1}>={c2}? {c1>=c2}" -print "{c1}<=>{c2}? {c1<=>c2}" -print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" -print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" -print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" - -print "-" - -c1.minutes += 1 - -print "{c1}<{c2}? {c1{c2}? {c1>c2}" -print "{c1}>={c2}? {c1>=c2}" -print "{c1}<=>{c2}? {c1<=>c2}" -print "{c1},{c2}? max={c1.max(c2)} min={c1.min(c2)}" -print "{c1}.is_between({c2}, {c3})? {c1.is_between(c2, c3)}" -print "{c2}.is_between({c1}, {c3})? {c2.is_between(c1, c3)}" diff --git a/tests/examplefiles/curl_http.nit b/tests/examplefiles/curl_http.nit deleted file mode 100644 index 079f12c8..00000000 --- a/tests/examplefiles/curl_http.nit +++ /dev/null @@ -1,113 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Sample of the Curl module. -module curl_http - -import curl - -# Small class to represent an Http Fetcher -class MyHttpFetcher - super CurlCallbacks - - var curl: Curl - var our_body: String = "" - - init(curl: Curl) do self.curl = curl - - # Release curl object - fun destroy do self.curl.destroy - - # Header callback - redef fun header_callback(line: String) do - # We keep this callback silent for testing purposes - #if not line.has_prefix("Date:") then print "Header_callback : {line}" - end - - # Body callback - redef fun body_callback(line: String) do self.our_body = "{self.our_body}{line}" - - # Stream callback - Cf : No one is registered - redef fun stream_callback(buffer: String, size: Int, count: Int) do print "Stream_callback : {buffer} - {size} - {count}" -end - - -# Program -if args.length < 2 then - print "Usage: curl_http " -else - var curl = new Curl - var url = args[1] - var request = new CurlHTTPRequest(url, curl) - - # HTTP Get Request - if args[0] == "GET" then - request.verbose = false - var getResponse = request.execute - - if getResponse isa CurlResponseSuccess then - print "Status code : {getResponse.status_code}" - print "Body : {getResponse.body_str}" - else if getResponse isa CurlResponseFailed then - print "Error code : {getResponse.error_code}" - print "Error msg : {getResponse.error_msg}" - end - - # HTTP Post Request - else if args[0] == "POST" then - var myHttpFetcher = new MyHttpFetcher(curl) - request.delegate = myHttpFetcher - - var postDatas = new HeaderMap - postDatas["Bugs Bunny"] = "Daffy Duck" - postDatas["Batman"] = "Robin likes special characters @#ùà!è§'(\"é&://,;<>∞~*" - postDatas["Batman"] = "Yes you can set multiple identical keys, but APACHE will consider only once, the last one" - request.datas = postDatas - request.verbose = false - var postResponse = request.execute - - print "Our body from the callback : {myHttpFetcher.our_body}" - - if postResponse isa CurlResponseSuccess then - print "*** Answer ***" - print "Status code : {postResponse.status_code}" - print "Body should be empty, because we decided to manage callbacks : {postResponse.body_str.length}" - else if postResponse isa CurlResponseFailed then - print "Error code : {postResponse.error_code}" - print "Error msg : {postResponse.error_msg}" - end - - # HTTP Get to file Request - else if args[0] == "GET_FILE" then - var headers = new HeaderMap - headers["Accept"] = "Moo" - request.headers = headers - request.verbose = false - var downloadResponse = request.download_to_file(null) - - if downloadResponse isa CurlFileResponseSuccess then - print "*** Answer ***" - print "Status code : {downloadResponse.status_code}" - print "Size downloaded : {downloadResponse.size_download}" - else if downloadResponse isa CurlResponseFailed then - print "Error code : {downloadResponse.error_code}" - print "Error msg : {downloadResponse.error_msg}" - end - # Program logic - else - print "Usage : Method[POST, GET, GET_FILE]" - end -end diff --git a/tests/examplefiles/curl_mail.nit b/tests/examplefiles/curl_mail.nit deleted file mode 100644 index b28f5a4c..00000000 --- a/tests/examplefiles/curl_mail.nit +++ /dev/null @@ -1,59 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Mail sender sample using the Curl module -module curl_mail - -import curl - -var curl = new Curl -var mail_request = new CurlMailRequest(curl) - -# Networks -var response = mail_request.set_outgoing_server("smtps://smtp.example.org:465", "user@example.org", "mypassword") -if response isa CurlResponseFailed then - print "Error code : {response.error_code}" - print "Error msg : {response.error_msg}" -end - -# Headers -mail_request.from = "Billy Bob" -mail_request.to = ["user@example.org"] -mail_request.cc = ["bob@example.org"] -mail_request.bcc = null - -var headers_body = new HeaderMap -headers_body["Content-Type:"] = "text/html; charset=\"UTF-8\"" -headers_body["Content-Transfer-Encoding:"] = "quoted-printable" -mail_request.headers_body = headers_body - -# Content -mail_request.body = "

Here you can write HTML stuff.

" -mail_request.subject = "Hello From My Nit Program" - -# Others -mail_request.verbose = false - -# Send mail -response = mail_request.execute -if response isa CurlResponseFailed then - print "Error code : {response.error_code}" - print "Error msg : {response.error_msg}" -else if response isa CurlMailResponseSuccess then - print "Mail Sent" -else - print "Unknown Curl Response type" -end diff --git a/tests/examplefiles/draw_operation.nit b/tests/examplefiles/draw_operation.nit deleted file mode 100644 index cada8318..00000000 --- a/tests/examplefiles/draw_operation.nit +++ /dev/null @@ -1,243 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2012-2013 Alexis Laferrière -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Draws an arithmetic operation to the terminal -module draw_operation - -redef enum Int - fun n_chars: Int `{ - int c; - if ( abs(recv) >= 10 ) - c = 1+(int)log10f( (float)abs(recv) ); - else - c = 1; - if ( recv < 0 ) c ++; - return c; - `} -end - -redef enum Char - fun as_operator(a, b: Int): Int - do - if self == '+' then return a + b - if self == '-' then return a - b - if self == '*' then return a * b - if self == '/' then return a / b - if self == '%' then return a % b - abort - end - - fun override_dispc: Bool - do - return self == '+' or self == '-' or self == '*' or self == '/' or self == '%' - end - - fun lines(s: Int): Array[Line] - do - if self == '+' then - return [new Line(new P(0,s/2),1,0,s), new Line(new P(s/2,1),0,1,s-2)] - else if self == '-' then - return [new Line(new P(0,s/2),1,0,s)] - else if self == '*' then - var lines = new Array[Line] - for y in [1..s-1[ do - lines.add( new Line(new P(1,y), 1,0,s-2) ) - end - return lines - else if self == '/' then - return [new Line(new P(s-1,0), -1,1, s )] - else if self == '%' then - var q4 = s/4 - var lines = [new Line(new P(s-1,0),-1,1,s)] - for l in [0..q4[ do - lines.append([ new Line( new P(0,l), 1,0,q4), new Line( new P(s-1,s-1-l), -1,0,q4) ]) - end - return lines - else if self == '1' then - return [new Line(new P(s/2,0), 0,1,s),new Line(new P(0,s-1),1,0,s), - new Line( new P(s/2,0),-1,1,s/2)] - else if self == '2' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s/2), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 0,1,s/2), - new Line( new P(0,s/2), 1,0,s)] - else if self == '3' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,s/2), 1,0,s)] - else if self == '4' then - return [new Line(new P(s-1,0),0,1,s), new Line( new P(0,0), 0,1,s/2), - new Line( new P(0,s/2), 1,0,s)] - else if self == '5' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), - new Line( new P(0,s/2), 1,0,s)] - else if self == '6' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,s/2),0,1,s/2), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), - new Line( new P(0,s/2), 1,0,s)] - else if self == '7' then - var tl = new P(0,0) - var tr = new P(s-1,0) - return [new Line(tl, 1,0,s), new Line(tr,-1,1,s)] - else if self == '8' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s), - new Line( new P(0,s/2), 1,0,s)] - else if self == '9' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s/2), - new Line( new P(0,s/2), 1,0,s)] - else if self == '0' then - return [new Line(new P(0,0), 1,0,s),new Line(new P(s-1,0),0,1,s), - new Line( new P(0,s-1),1,0,s), new Line( new P(0,0), 0,1,s)] - end - return new Array[Line] - end -end - -class P - var x : Int - var y : Int -end - -redef class String - # hack is to support a bug in the evaluation software - fun draw(dispc: Char, size, gap: Int, hack: Bool) - do - var w = size * length +(length-1)*gap - var h = size - var map = new Array[Array[Char]] - for x in [0..w[ do - map[x] = new Array[Char].filled_with( ' ', h ) - end - - var ci = 0 - for c in self.chars do - var local_dispc - if c.override_dispc then - local_dispc = c - else - local_dispc = dispc - end - - var lines = c.lines( size ) - for line in lines do - var x = line.o.x+ci*size - x += ci*gap - var y = line.o.y - for s in [0..line.len[ do - assert map.length > x and map[x].length > y else print "setting {x},{y} as {local_dispc}" - map[x][y] = local_dispc - x += line.step_x - y += line.step_y - end - end - - ci += 1 - end - - if hack then - for c in [0..size[ do - map[c][0] = map[map.length-size+c][0] - map[map.length-size+c][0] = ' ' - end - end - - for y in [0..h[ do - for x in [0..w[ do - printn map[x][y] - end - print "" - end - end -end - -class Line - var o : P - var step_x : Int - var step_y : Int - var len : Int -end - -var a -var b -var op_char -var disp_char -var disp_size -var disp_gap - -if "NIT_TESTING".environ == "true" then - a = 567 - b = 13 - op_char = '*' - disp_char = 'O' - disp_size = 8 - disp_gap = 1 -else - printn "Left operand: " - a = gets.to_i - - printn "Right operand: " - b = gets.to_i - - printn "Operator (+, -, *, /, %): " - op_char = gets.chars[0] - - printn "Char to display: " - disp_char = gets.chars[0] - - printn "Size of text: " - disp_size = gets.to_i - - printn "Space between digits: " - disp_gap = gets.to_i -end - -var result = op_char.as_operator( a, b ) - -var len_a = a.n_chars -var len_b = b.n_chars -var len_res = result.n_chars -var max_len = len_a.max( len_b.max( len_res ) ) + 1 - -# draw first line -var d = max_len - len_a -var line_a = "" -for i in [0..d[ do line_a += " " -line_a += a.to_s -line_a.draw( disp_char, disp_size, disp_gap, false ) - -print "" -# draw second line -d = max_len - len_b-1 -var line_b = op_char.to_s -for i in [0..d[ do line_b += " " -line_b += b.to_s -line_b.draw( disp_char, disp_size, disp_gap, false ) - -# draw ----- -print "" -for i in [0..disp_size*max_len+(max_len-1)*disp_gap] do - printn "_" -end -print "" -print "" - -# draw result -d = max_len - len_res -var line_res = "" -for i in [0..d[ do line_res += " " -line_res += result.to_s -line_res.draw( disp_char, disp_size, disp_gap, false ) diff --git a/tests/examplefiles/drop_privileges.nit b/tests/examplefiles/drop_privileges.nit deleted file mode 100644 index 932a87be..00000000 --- a/tests/examplefiles/drop_privileges.nit +++ /dev/null @@ -1,46 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Alexis Laferrière -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Example using the privileges module to drop privileges from root -module drop_privileges - -import privileges - -# basic command line options -var opts = new OptionContext -var opt_ug = new OptionUserAndGroup.for_dropping_privileges -opt_ug.mandatory = true -opts.add_option(opt_ug) - -# parse and check command line options -opts.parse(args) -if not opts.errors.is_empty then - print opts.errors - print "Usage: drop_privileges [options]" - opts.usage - exit 1 -end - -# original user -print "before {sys.uid}:{sys.gid}" - -# make the switch -var user_group = opt_ug.value -assert user_group != null -user_group.drop_privileges - -# final user -print "after {sys.uid}:{sys.egid}" diff --git a/tests/examplefiles/extern_methods.nit b/tests/examplefiles/extern_methods.nit deleted file mode 100644 index 00c6b684..00000000 --- a/tests/examplefiles/extern_methods.nit +++ /dev/null @@ -1,69 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2012-2013 Alexis Laferrière -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# This module illustrates some uses of the FFI, specifically -# how to use extern methods. Which means to implement a Nit method in C. -module extern_methods - -redef enum Int - # Returns self'th fibonnaci number - # implemented here in C for optimization purposes - fun fib : Int import fib `{ - if ( recv < 2 ) - return recv; - else - return Int_fib( recv-1 ) + Int_fib( recv-2 ); - `} - - # System call to sleep for "self" seconds - fun sleep `{ - sleep( recv ); - `} - - # Return atan2l( self, x ) from libmath - fun atan_with( x : Int ) : Float `{ - return atan2( recv, x ); - `} - - # This method callback to Nit methods from C code - # It will use from C code: - # * the local fib method - # * the + operator, a method of Int - # * to_s, a method of all objects - # * String.to_cstring, a method of String to return an equivalent char* - fun foo import fib, +, to_s, String.to_cstring `{ - long recv_fib = Int_fib( recv ); - long recv_plus_fib = Int__plus( recv, recv_fib ); - - String nit_string = Int_to_s( recv_plus_fib ); - char *c_string = String_to_cstring( nit_string ); - - printf( "from C: self + fib(self) = %s\n", c_string ); - `} - - # Equivalent to foo but written in pure Nit - fun bar do print "from Nit: self + fib(self) = {self+self.fib}" -end - -print 12.fib - -print "sleeping 1 second..." -1.sleep - -print 100.atan_with( 200 ) -8.foo -8.bar - diff --git a/tests/examplefiles/fibonacci.nit b/tests/examplefiles/fibonacci.nit deleted file mode 100644 index e1a72c9e..00000000 --- a/tests/examplefiles/fibonacci.nit +++ /dev/null @@ -1,43 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2004-2008 Jean Privat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A simple exemple of refinement where a method is added to the integer class. -module fibonacci - -redef class Int - # Calculate the self-th element of the fibonacci sequence. - fun fibonacci: Int - do - if self < 2 then - return 1 - else - return (self-2).fibonacci + (self-1).fibonacci - end - end -end - -# Print usage and exit. -fun usage -do - print "Usage: fibonnaci " - exit 0 -end - -# Main part -if args.length != 1 then - usage -end -print args.first.to_i.fibonacci diff --git a/tests/examplefiles/hello_world.nit b/tests/examplefiles/hello_world.nit deleted file mode 100644 index da6849ae..00000000 --- a/tests/examplefiles/hello_world.nit +++ /dev/null @@ -1 +0,0 @@ -print "hello world" diff --git a/tests/examplefiles/html_page.nit b/tests/examplefiles/html_page.nit deleted file mode 100644 index cf76665d..00000000 --- a/tests/examplefiles/html_page.nit +++ /dev/null @@ -1,105 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import html - -class NitHomepage - super HTMLPage - - redef fun head do - add("meta").attr("charset", "utf-8") - add("title").text("Nit") - add("link").attr("rel", "icon").attr("href", "http://nitlanguage.org/favicon.ico").attr("type", "image/x-icon") - add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/style.css").attr("type", "text/css") - add("link").attr("rel", "stylesheet").attr("href", "http://nitlanguage.org/local.css").attr("type", "text/css") - end - - redef fun body do - open("article").add_class("page") - open("section").add_class("pageheader") - add_html("theNitProgramming Language") - open("header").add_class("header") - open("div").add_class("topsubtitle") - add("p").text("A Fun Language for Serious Programming") - close("div") - close("header") - close("section") - - open("div").attr("id", "pagebody") - open("section").attr("id", "content") - add("h1").text("# What is Nit?") - add("p").text("Nit is an object-oriented programming language. The goal of Nit is to propose a robust statically typed programming language where structure is not a pain.") - add("p").text("So, what does the famous hello world program look like, in Nit?") - add_html("
print 'Hello, World!'
") - - add("h1").text("# Feature Highlights") - add("h2").text("Usability") - add("p").text("Nit's goal is to be usable by real programmers for real projects") - - open("ul") - open("li") - add("a").attr("href", "http://en.wikipedia.org/wiki/KISS_principle").text("KISS principle") - close("li") - add("li").text("Script-like language without verbosity nor cryptic statements") - add("li").text("Painless static types: static typing should help programmers") - add("li").text("Efficient development, efficient execution, efficient evolution.") - close("ul") - - add("h2").text("Robustness") - add("p").text("Nit will help you to write bug-free programs") - - open("ul") - add("li").text("Strong static typing") - add("li").text("No more NullPointerException") - close("ul") - - add("h2").text("Object-Oriented") - add("p").text("Nit's guideline is to follow the most powerful OO principles") - - open("ul") - open("li") - add("a").attr("href", "./everything_is_an_object/").text("Everything is an object") - close("li") - open("li") - add("a").attr("href", "./multiple_inheritance/").text("Multiple inheritance") - close("li") - open("li") - add("a").attr("href", "./refinement/").text("Open classes") - close("li") - open("li") - add("a").attr("href", "./virtual_types/").text("Virtual types") - close("li") - close("ul") - - - add("h1").text("# Getting Started") - add("p").text("Get Nit from its Git repository:") - - add_html("
$ git clone http://nitlanguage.org/nit.git
") - add("p").text("Build the compiler (may be long):") - add_html("
$ cd nit\n")
-					add_html("$ make
") - add("p").text("Compile a program:") - add_html("
$ bin/nitc examples/hello_world.nit
") - add("p").text("Execute the program:") - add_html("
$ ./hello_world
") - close("section") - close("div") - close("article") - end -end - -var page = new NitHomepage -page.write_to stdout -page.write_to_file("nit.html") diff --git a/tests/examplefiles/int_stack.nit b/tests/examplefiles/int_stack.nit deleted file mode 100644 index 1109bbbc..00000000 --- a/tests/examplefiles/int_stack.nit +++ /dev/null @@ -1,100 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# An example that defines and uses stacks of integers. -# The implementation is done with a simple linked list. -# It features: free constructors, nullable types and some adaptive typing. -module int_stack - -# A stack of integer implemented by a simple linked list. -# Note that this is only a toy class since a real linked list will gain to use -# generics and extends interfaces, like Collection, from the standard library. -class IntStack - # The head node of the list. - # Null means that the stack is empty. - private var head: nullable ISNode = null - - # Add a new integer in the stack. - fun push(val: Int) - do - self.head = new ISNode(val, self.head) - end - - # Remove and return the last pushed integer. - # Return null if the stack is empty. - fun pop: nullable Int - do - var head = self.head - if head == null then return null - # Note: the followings are statically safe because of the - # previous 'if'. - var val = head.val - self.head = head.next - return val - end - - # Return the sum of all integers of the stack. - # Return 0 if the stack is empty. - fun sumall: Int - do - var sum = 0 - var cur = self.head - while cur != null do - # Note: the followings are statically safe because of - # the condition of the 'while'. - sum += cur.val - cur = cur.next - end - return sum - end - - # Note: Because all attributes have a default value, a free constructor - # "init()" is implicitly defined. -end - -# A node of a IntStack -private class ISNode - # The integer value stored in the node. - var val: Int - - # The next node, if any. - var next: nullable ISNode - - # Note: A free constructor "init(val: Int, next: nullable ISNode)" is - # implicitly defined. -end - -var l = new IntStack -l.push(1) -l.push(2) -l.push(3) - -print l.sumall - -# Note: the 'for' control structure cannot be used on IntStack in its current state. -# It requires a more advanced topic. -# However, why not using the 'loop' control structure? -loop - var i = l.pop - if i == null then break - # The following is statically safe because of the previous 'if'. - print i * 10 -end - -# Note: 'or else' is used to give an alternative of a null expression. -l.push(5) -print l.pop or else 0 # l.pop gives 5, so print 5 -print l.pop or else 0 # l.pop gives null, so print the alternative: 0 - - diff --git a/tests/examplefiles/opengles2_hello_triangle.nit b/tests/examplefiles/opengles2_hello_triangle.nit deleted file mode 100644 index 2b39b1ba..00000000 --- a/tests/examplefiles/opengles2_hello_triangle.nit +++ /dev/null @@ -1,193 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2014 Alexis Laferrière -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Basic example of OpenGL ES 2.0 usage from the book OpenGL ES 2.0 Programming Guide. -# -# Code reference: -# https://code.google.com/p/opengles-book-samples/source/browse/trunk/LinuxX11/Chapter_2/Hello_Triangle/Hello_Triangle.c -module opengles2_hello_triangle - -import glesv2 -import egl -import mnit_linux # for sdl -import x11 - -if "NIT_TESTING".environ == "true" then exit(0) - -var window_width = 800 -var window_height = 600 - -# -## SDL -# -var sdl_display = new SDLDisplay(window_width, window_height) -var sdl_wm_info = new SDLSystemWindowManagerInfo -var x11_window_handle = sdl_wm_info.x11_window_handle - -# -## X11 -# -var x_display = x_open_default_display -assert x_display != 0 else print "x11 fail" - -# -## EGL -# -var egl_display = new EGLDisplay(x_display) -assert egl_display.is_valid else print "EGL display is not valid" -egl_display.initialize - -print "EGL version: {egl_display.version}" -print "EGL vendor: {egl_display.vendor}" -print "EGL extensions: {egl_display.extensions.join(", ")}" -print "EGL client APIs: {egl_display.client_apis.join(", ")}" - -assert egl_display.is_valid else print egl_display.error - -var config_chooser = new EGLConfigChooser -#config_chooser.surface_type_egl -config_chooser.blue_size = 8 -config_chooser.green_size = 8 -config_chooser.red_size = 8 -#config_chooser.alpha_size = 8 -#config_chooser.depth_size = 8 -#config_chooser.stencil_size = 8 -#config_chooser.sample_buffers = 1 -config_chooser.close - -var configs = config_chooser.choose(egl_display) -assert configs != null else print "choosing config failed: {egl_display.error}" -assert not configs.is_empty else print "no EGL config" - -print "{configs.length} EGL configs available" -for config in configs do - var attribs = config.attribs(egl_display) - print "* caveats: {attribs.caveat}" - print " conformant to: {attribs.conformant}" - print " size of RGBA: {attribs.red_size} {attribs.green_size} {attribs.blue_size} {attribs.alpha_size}" - print " buffer, depth, stencil: {attribs.buffer_size} {attribs.depth_size} {attribs.stencil_size}" -end - -var config = configs.first - -var format = config.attribs(egl_display).native_visual_id - -# TODO android part -# Opengles1Display_midway_init(recv, format); - -var surface = egl_display.create_window_surface(config, x11_window_handle, [0]) -assert surface.is_ok else print egl_display.error - -var context = egl_display.create_context(config) -assert context.is_ok else print egl_display.error - -var make_current_res = egl_display.make_current(surface, surface, context) -assert make_current_res - -var width = surface.attribs(egl_display).width -var height = surface.attribs(egl_display).height -print "Width: {width}" -print "Height: {height}" - -assert egl_bind_opengl_es_api else print "eglBingAPI failed: {egl_display.error}" - -# -## GLESv2 -# - -print "Can compile shaders? {gl_shader_compiler}" -assert_no_gl_error - -assert gl_shader_compiler else print "Cannot compile shaders" - -# gl program -print gl_error.to_s -var program = new GLProgram -if not program.is_ok then - print "Program is not ok: {gl_error.to_s}\nLog:" - print program.info_log - abort -end -assert_no_gl_error - -# vertex shader -var vertex_shader = new GLVertexShader -assert vertex_shader.is_ok else print "Vertex shader is not ok: {gl_error}" -vertex_shader.source = """ -attribute vec4 vPosition; -void main() -{ - gl_Position = vPosition; -} """ -vertex_shader.compile -assert vertex_shader.is_compiled else print "Vertex shader compilation failed with: {vertex_shader.info_log} {program.info_log}" -assert_no_gl_error - -# fragment shader -var fragment_shader = new GLFragmentShader -assert fragment_shader.is_ok else print "Fragment shader is not ok: {gl_error}" -fragment_shader.source = """ -precision mediump float; -void main() -{ - gl_FragColor = vec4(1.0, 0.0, 0.0, 1.0); -} -""" -fragment_shader.compile -assert fragment_shader.is_compiled else print "Fragment shader compilation failed with: {fragment_shader.info_log}" -assert_no_gl_error - -program.attach_shader vertex_shader -program.attach_shader fragment_shader -program.bind_attrib_location(0, "vPosition") -program.link -assert program.is_linked else print "Linking failed: {program.info_log}" -assert_no_gl_error - -# draw! -var vertices = [0.0, 0.5, 0.0, -0.5, -0.5, 0.0, 0.5, -0.5, 0.0] -var vertex_array = new VertexArray(0, 3, vertices) -vertex_array.attrib_pointer -gl_clear_color(0.5, 0.0, 0.5, 1.0) -for i in [0..10000[ do - printn "." - assert_no_gl_error - gl_viewport(0, 0, width, height) - gl_clear_color_buffer - program.use - vertex_array.enable - vertex_array.draw_arrays_triangles - egl_display.swap_buffers(surface) -end - -# delete -program.delete -vertex_shader.delete -fragment_shader.delete - -# -## EGL -# -# close -egl_display.make_current(new EGLSurface.none, new EGLSurface.none, new EGLContext.none) -egl_display.destroy_context(context) -egl_display.destroy_surface(surface) - -# -## SDL -# -# close -sdl_display.destroy diff --git a/tests/examplefiles/print_arguments.nit b/tests/examplefiles/print_arguments.nit deleted file mode 100644 index 3bdddc62..00000000 --- a/tests/examplefiles/print_arguments.nit +++ /dev/null @@ -1,22 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2004-2008 Jean Privat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# How to print arguments of the command line. -module print_arguments - -for a in args do - print a -end diff --git a/tests/examplefiles/procedural_array.nit b/tests/examplefiles/procedural_array.nit deleted file mode 100644 index 838bda02..00000000 --- a/tests/examplefiles/procedural_array.nit +++ /dev/null @@ -1,48 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2004-2008 Jean Privat -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# A procedural program (without explicit class definition). -# This program manipulates arrays of integers. -module procedural_array - -# The sum of the elements of `a'. -# Uses a 'for' control structure. -fun array_sum(a: Array[Int]): Int -do - var sum = 0 - for i in a do - sum = sum + i - end - return sum -end - -# The sum of the elements of `a' (alternative version). -# Uses a 'while' control structure. -fun array_sum_alt(a: Array[Int]): Int -do - var sum = 0 - var i = 0 - while i < a.length do - sum = sum + a[i] - i = i + 1 - end - return sum -end - -# The main part of the program. -var a = [10, 5, 8, 9] -print(array_sum(a)) -print(array_sum_alt(a)) diff --git a/tests/examplefiles/socket_client.nit b/tests/examplefiles/socket_client.nit deleted file mode 100644 index 0ba19132..00000000 --- a/tests/examplefiles/socket_client.nit +++ /dev/null @@ -1,38 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Client sample using the Socket module which connect to the server sample. -module socket_client - -import socket - -if args.length < 2 then - print "Usage : socket_client " - return -end - -var s = new Socket.client(args[0], args[1].to_i) -print "[HOST ADDRESS] : {s.address}" -print "[HOST] : {s.host}" -print "[PORT] : {s.port}" -print "Connecting ... {s.connected}" -if s.connected then - print "Writing ... Hello server !" - s.write("Hello server !") - print "[Response from server] : {s.read(100)}" - print "Closing ..." - s.close -end diff --git a/tests/examplefiles/socket_server.nit b/tests/examplefiles/socket_server.nit deleted file mode 100644 index aa77a759..00000000 --- a/tests/examplefiles/socket_server.nit +++ /dev/null @@ -1,52 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2013 Matthieu Lucas -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Server sample using the Socket module which allow client to connect -module socket_server - -import socket - -if args.is_empty then - print "Usage : socket_server " - return -end - -var socket = new Socket.server(args[0].to_i, 1) -print "[PORT] : {socket.port.to_s}" - -var clients = new Array[Socket] -var max = socket -loop - var fs = new SocketObserver(true, true, true) - fs.readset.set(socket) - - for c in clients do fs.readset.set(c) - - if fs.select(max, 4, 0) == 0 then - print "Error occured in select {sys.errno.strerror}" - break - end - - if fs.readset.is_set(socket) then - var ns = socket.accept - print "Accepting {ns.address} ... " - print "[Message from {ns.address}] : {ns.read(100)}" - ns.write("Goodbye client.") - print "Closing {ns.address} ..." - ns.close - end -end - diff --git a/tests/examplefiles/tmpl_composer.nit b/tests/examplefiles/tmpl_composer.nit deleted file mode 100644 index 6160b1a8..00000000 --- a/tests/examplefiles/tmpl_composer.nit +++ /dev/null @@ -1,94 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import template - -### Here, definition of the specific templates - -# The root template for composers -class TmplComposers - super Template - - # Short list of composers - var composers = new Array[TmplComposer] - - # Detailled list of composers - var composer_details = new Array[TmplComposerDetail] - - # Add a composer in both lists - fun add_composer(firstname, lastname: String, birth, death: Int) - do - composers.add(new TmplComposer(lastname)) - composer_details.add(new TmplComposerDetail(firstname, lastname, birth, death)) - end - - redef fun rendering do - add """ -COMPOSERS -========= -""" - add_all composers - add """ - -DETAILS -======= -""" - add_all composer_details - end -end - -# A composer in the short list of composers -class TmplComposer - super Template - - # Short name - var name: String - - init(name: String) do self.name = name - - redef fun rendering do add "- {name}\n" -end - -# A composer in the detailled list of composers -class TmplComposerDetail - super Template - - var firstname: String - var lastname: String - var birth: Int - var death: Int - - init(firstname, lastname: String, birth, death: Int) do - self.firstname = firstname - self.lastname = lastname - self.birth = birth - self.death = death - end - - redef fun rendering do add """ - -COMPOSER: {{{firstname}}} {{{lastname}}} -BIRTH...: {{{birth}}} -DEATH...: {{{death}}} -""" - -end - -### Here a simple usage of the templates - -var f = new TmplComposers -f.add_composer("Johann Sebastian", "Bach", 1685, 1750) -f.add_composer("George Frideric", "Handel", 1685, 1759) -f.add_composer("Wolfgang Amadeus", "Mozart", 1756, 1791) -f.write_to(stdout) diff --git a/tests/examplefiles/websocket_server.nit b/tests/examplefiles/websocket_server.nit deleted file mode 100644 index 38029c37..00000000 --- a/tests/examplefiles/websocket_server.nit +++ /dev/null @@ -1,46 +0,0 @@ -# This file is part of NIT ( http://www.nitlanguage.org ). -# -# Copyright 2014 Lucas Bajolet -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Sample module for a minimal chat server using Websockets on port 8088 -module websocket_server - -import websocket - -var sock = new WebSocket(8088, 1) - -var msg: String - -if sock.listener.eof then - print sys.errno.strerror -end - -sock.accept - -while not sock.listener.eof do - if not sock.connected then sock.accept - if sys.stdin.poll_in then - msg = gets - printn "Received message : {msg}" - if msg == "exit" then sock.close - if msg == "disconnect" then sock.disconnect_client - sock.write(msg) - end - if sock.can_read(10) then - msg = sock.read_line - if msg != "" then print msg - end -end - -- cgit v1.2.1 From a180cc3373345b3cb0ddecf14bcc31a1a1e53d51 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 09:58:24 +0200 Subject: add link to docs on styles. manual merge of PR#393. --- doc/faq.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/faq.rst b/doc/faq.rst index 0f65b9fe..f040e053 100644 --- a/doc/faq.rst +++ b/doc/faq.rst @@ -58,7 +58,7 @@ Usage as a library is thoroughly demonstrated in the Documentation section. How do I make a new style? -------------------------- -Please see the documentation on styles. +Please see the :doc:`documentation on styles `. How can I report a bug or suggest a feature? -------------------------------------------- -- cgit v1.2.1 From dd633af87bff78758abd120a68eb71635458b729 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 10:02:49 +0200 Subject: stan changelog --- CHANGES | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/CHANGES b/CHANGES index db3ffd9c..2f47139b 100644 --- a/CHANGES +++ b/CHANGES @@ -73,7 +73,7 @@ Version 2.0 - Ruby lexer: fix lexing of Name::Space tokens (#860) and of symbols in hashes (#873). -- Stan lexer: update for version 1.3.0 of the language (PR#162). +- Stan lexer: update for version 2.4.0 of the language (PR#162, PR#255, PR#377). - JavaScript lexer: add the "yield" keyword (PR#196). @@ -92,8 +92,6 @@ Version 2.0 - Rebol lexer: fix comment detection and analyse_text (PR#261). -- Stan lexer: update to v2.0.1 (PR#255). - - LLVM lexer: update keywords to v3.4 (PR#258). - PHP lexer: add new keywords and binary literals (PR#222). -- cgit v1.2.1 From 44a52b8a665328afece9780ea34687ad2b27b245 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 10:10:31 +0200 Subject: Add optional whitespace in F90 "end" constructs. --- pygments/lexers/compiled.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 61b4ffe4..dd1249fc 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1586,10 +1586,11 @@ class FortranLexer(RegexLexer): r'ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK|BLOCKDATA|BYTE|CALL|' r'CASE|CLASS|CLOSE|CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|' r'CONTAINS|CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|' - r'DIMENSION|DO|ELEMENTAL|ELSE|ENCODE|ENDASSOCIATE|ENDBLOCK|' - r'ENDBLOCKDATA|ENDCRITICAL|ENDDO|ENDFILE|ENDFORALL|ENDFUNCTION|ENDIF|' - r'ENDINTERFACE|ENDMODULE|ENDPROCEDURE|ENDPROGRAM|ENDSELECT|' - r'ENDSUBMODULE|ENDSUBROUTINE|ENDTYPE|ENDWHERE|ENTRY|ENUM|ENUMERATOR|' + r'DIMENSION|DO|ELEMENTAL|ELSE|ENCODE|END\s*ASSOCIATE|END\s*BLOCK|' + r'END\s*BLOCKDATA|END\s*CRITICAL|END\s*DO|END\s*FILE|END\s*FORALL|' + r'END\s*FUNCTION|END\s*IF|END\s*INTERFACE|END\s*MODULE|END\s*PROCEDURE|' + r'END\s*PROGRAM|END\s*SELECT|END\s*SUBMODULE|END\s*SUBROUTINE|' + r'END\s*TYPE|END\s*WHERE|ENTRY|ENUM|ENUMERATOR|' r'EQUIVALENCE|EXIT|EXTENDS|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|' r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' @@ -1627,14 +1628,14 @@ class FortranLexer(RegexLexer): r'BesY|BesYN|BGE|BGT|BLE|BLT|Bit_Size|BTest|CAbs|CCos|Ceiling|' r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|' r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|' - r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer| + r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|' r'C_F_ProcPointer|C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|' - r'C_FunLoc|C_Loc|C_SizeofC_New_Line|' + r'C_FunLoc|C_Loc|C_Sizeof|C_New_Line|' r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|' r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|' r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|' r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|' - r'DProd|DSign|DSinH|DShiftL|DShiftR|DSin|DSqRt|DTanH|DTan|DTime| + r'DProd|DSign|DSinH|DShiftL|DShiftR|DSin|DSqRt|DTanH|DTan|DTime|' r'EOShift|Epsilon|ErF|ErFC|ErFC_Scaled|ETime|Execute_Command_Line|' r'Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|FGetC|' r'FindLoc|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|' -- cgit v1.2.1 From f023ece2fab081c16a763ca43b513552fc87f207 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 10:10:49 +0200 Subject: ignore tags file --- .hgignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.hgignore b/.hgignore index 57aaeff5..a70d8593 100644 --- a/.hgignore +++ b/.hgignore @@ -9,3 +9,4 @@ Pygments.egg-info/* .ropeproject tests/examplefiles/output .idea/ +.tags -- cgit v1.2.1 From a54efd963ebd1a74eff5f1d6f7efcdd8895980e3 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 10:32:39 +0200 Subject: pygments.lexer: small PEP8 overhaul. --- pygments/lexer.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/pygments/lexer.py b/pygments/lexer.py index 0ede7927..5214d43e 100644 --- a/pygments/lexer.py +++ b/pygments/lexer.py @@ -8,17 +8,20 @@ :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import re, itertools + +import re +import itertools from pygments.filter import apply_filters, Filter from pygments.filters import get_filter_by_name from pygments.token import Error, Text, Other, _TokenType from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ - make_analysator, text_type, add_metaclass, iteritems + make_analysator, text_type, add_metaclass, iteritems __all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer', - 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', 'default'] + 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', + 'default'] _encoding_map = [(b'\xef\xbb\xbf', 'utf-8'), @@ -160,7 +163,7 @@ class Lexer(object): break # no BOM found, so use chardet if decoded is None: - enc = chardet.detect(text[:1024]) # Guess using first 1KB + enc = chardet.detect(text[:1024]) # Guess using first 1KB decoded = text.decode(enc.get('encoding') or 'utf-8', 'replace') text = decoded @@ -237,7 +240,7 @@ class DelegatingLexer(Lexer): self.root_lexer.get_tokens_unprocessed(buffered)) -#------------------------------------------------------------------------------- +# ------------------------------------------------------------------------------ # RegexLexer and ExtendedRegexLexer # @@ -406,7 +409,7 @@ class RegexLexerMeta(LexerMeta): def _process_token(cls, token): """Preprocess the token component of a token definition.""" assert type(token) is _TokenType or callable(token), \ - 'token type must be simple type or callable, not %r' % (token,) + 'token type must be simple type or callable, not %r' % (token,) return token def _process_new_state(cls, new_state, unprocessed, processed): @@ -439,7 +442,7 @@ class RegexLexerMeta(LexerMeta): for istate in new_state: assert (istate in unprocessed or istate in ('#pop', '#push')), \ - 'unknown new state ' + istate + 'unknown new state ' + istate return new_state else: assert False, 'unknown new state def %r' % new_state @@ -645,7 +648,7 @@ class LexerContext(object): def __init__(self, text, pos, stack=None, end=None): self.text = text self.pos = pos - self.end = end or len(text) # end=0 not supported ;-) + self.end = end or len(text) # end=0 not supported ;-) self.stack = stack or ['root'] def __repr__(self): -- cgit v1.2.1 From ee3d33a5fee236ccff80b6e0571b56e073883dce Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Tue, 16 Sep 2014 09:45:35 +0000 Subject: When analyzing, replace the too broad `:-` sequence by a regular expression matching directives. --- pygments/lexers/other.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 4f11f35d..d21f7b30 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -694,13 +694,14 @@ class LogtalkLexer(RegexLexer): def analyse_text(text): if ':- object(' in text: return True - if ':- protocol(' in text: + elif ':- protocol(' in text: return True - if ':- category(' in text: + elif ':- category(' in text: return True - if ':-' in text: - return True - return False + elif re.search('^:-\s[a-z]', text, re.M): + return 0.9 + else: + return False def _shortened(word): -- cgit v1.2.1 From d59a849ebf7430f7ec8e3895933e32fa63ce79bd Mon Sep 17 00:00:00 2001 From: Paulo Moura Date: Tue, 16 Sep 2014 10:06:49 +0000 Subject: Style change: use `1.0`and `0.0` instead of, respectively, `True` and `False` in the analyse_text() function. --- pygments/lexers/other.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index d21f7b30..37deca6e 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -693,15 +693,15 @@ class LogtalkLexer(RegexLexer): def analyse_text(text): if ':- object(' in text: - return True + return 1.0 elif ':- protocol(' in text: - return True + return 1.0 elif ':- category(' in text: - return True + return 1.0 elif re.search('^:-\s[a-z]', text, re.M): return 0.9 else: - return False + return 0.0 def _shortened(word): -- cgit v1.2.1 From 5e5586a698e82c7b596ab2e47f035d2aa941b400 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 13:09:41 +0200 Subject: fix global instantiation of GosuLexer. --- pygments/lexers/jvm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pygments/lexers/jvm.py b/pygments/lexers/jvm.py index 0d16e2fd..d422f5fd 100644 --- a/pygments/lexers/jvm.py +++ b/pygments/lexers/jvm.py @@ -413,11 +413,11 @@ class GosuTemplateLexer(Lexer): aliases = ['gst'] filenames = ['*.gst'] mimetypes = ['text/x-gosu-template'] - lexer = GosuLexer() def get_tokens_unprocessed(self, text): + lexer = GosuLexer() stack = ['templateText'] - for item in self.lexer.get_tokens_unprocessed(text, stack): + for item in lexer.get_tokens_unprocessed(text, stack): yield item -- cgit v1.2.1 From 4ebcf72d1a077c29d94a0cefce3f068ce41a37eb Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 14:06:54 +0200 Subject: Add module to optimize regexes that consist of a long |-separated list of literals. --- CHANGES | 4 +++ pygments/lexer.py | 24 +++++++++++-- pygments/regexopt.py | 93 ++++++++++++++++++++++++++++++++++++++++++++++++++ tests/test_regexopt.py | 39 +++++++++++++++++++++ 4 files changed, 157 insertions(+), 3 deletions(-) create mode 100644 pygments/regexopt.py create mode 100644 tests/test_regexopt.py diff --git a/CHANGES b/CHANGES index 2f47139b..08c50ee2 100644 --- a/CHANGES +++ b/CHANGES @@ -43,6 +43,10 @@ Version 2.0 * APL (#969) * Nit (PR#375) +- Added a helper to "optimize" regular expressions that match one of many + literal words; this can save 20% and more lexing time with lexers that + highlight many keywords or builtins. + - New styles: "xcode" and "igor", similar to the default highlighting of the respective IDEs. diff --git a/pygments/lexer.py b/pygments/lexer.py index 5214d43e..f3543d41 100644 --- a/pygments/lexer.py +++ b/pygments/lexer.py @@ -17,11 +17,11 @@ from pygments.filters import get_filter_by_name from pygments.token import Error, Text, Other, _TokenType from pygments.util import get_bool_opt, get_int_opt, get_list_opt, \ make_analysator, text_type, add_metaclass, iteritems - +from pygments.regexopt import regex_opt __all__ = ['Lexer', 'RegexLexer', 'ExtendedRegexLexer', 'DelegatingLexer', 'LexerContext', 'include', 'inherit', 'bygroups', 'using', 'this', - 'default'] + 'default', 'words'] _encoding_map = [(b'\xef\xbb\xbf', 'utf-8'), @@ -390,12 +390,27 @@ class default: """ Indicates a state or state action (e.g. #pop) to apply. For example default('#pop') is equivalent to ('', Token, '#pop') - Note that state tuples may be used as well + Note that state tuples may be used as well. + + .. versionadded:: 2.0 """ def __init__(self, state): self.state = state +class words: + """ + Indicates a list of literal words that is transformed into an optimized + regex that matches any of the words. + + .. versionadded:: 2.0 + """ + def __init__(self, words, prefix='', suffix=''): + self.words = words + self.prefix = prefix + self.suffix = suffix + + class RegexLexerMeta(LexerMeta): """ Metaclass for RegexLexer, creates the self._tokens attribute from @@ -404,6 +419,9 @@ class RegexLexerMeta(LexerMeta): def _process_regex(cls, regex, rflags): """Preprocess the regular expression component of a token definition.""" + if isinstance(regex, words): + return regex_opt(regex.words, rflags, prefix=regex.prefix, + suffix=regex.suffix).match return re.compile(regex, rflags).match def _process_token(cls, token): diff --git a/pygments/regexopt.py b/pygments/regexopt.py new file mode 100644 index 00000000..cc924ea0 --- /dev/null +++ b/pygments/regexopt.py @@ -0,0 +1,93 @@ +# -*- coding: utf-8 -*- +""" + pygments.regexopt + ~~~~~~~~~~~~~~~~~ + + An algorithm that generates optimized regexes for matching long lists of + literal strings. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re +from re import escape +from os.path import commonprefix +from itertools import groupby +from operator import itemgetter + +CS_ESCAPE = re.compile(r'[\^\\\-\]]') +FIRST_ELEMENT = itemgetter(0) + + +def make_charset(letters): + return '[' + CS_ESCAPE.sub(lambda m: '\\' + m.group(), ''.join(letters)) + ']' + + +def regex_opt_inner(strings, open_paren): + """Return a regex that matches any string in the sorted list of strings.""" + close_paren = open_paren and ')' or '' + # print strings, repr(open_paren) + if not strings: + # print '-> nothing left' + return '' + first = strings[0] + if len(strings) == 1: + # print '-> only 1 string' + return open_paren + escape(first) + close_paren + if not first: + # print '-> first string empty' + return open_paren + regex_opt_inner(strings[1:], '(?:') \ + + '?' + close_paren + if len(first) == 1: + # multiple one-char strings? make a charset + oneletter = [] + rest = [] + for s in strings: + if len(s) == 1: + oneletter.append(s) + else: + rest.append(s) + if len(oneletter) > 1: # do we have more than one oneletter string? + if rest: + # print '-> 1-character + rest' + return open_paren + regex_opt_inner(rest, '') + '|' \ + + make_charset(oneletter) + close_paren + # print '-> only 1-character' + return make_charset(oneletter) + prefix = commonprefix(strings) + if prefix: + plen = len(prefix) + # we have a prefix for all strings + # print '-> prefix:', prefix + return open_paren + escape(prefix) \ + + regex_opt_inner([s[plen:] for s in strings], '(?:') \ + + close_paren + # is there a suffix? + strings_rev = [s[::-1] for s in strings] + suffix = commonprefix(strings_rev) + if suffix: + slen = len(suffix) + # print '-> suffix:', suffix[::-1] + return open_paren \ + + regex_opt_inner(sorted(s[:-slen] for s in strings), '(?:') \ + + escape(suffix[::-1]) + close_paren + # recurse on common 1-string prefixes + # print '-> last resort' + return open_paren + \ + '|'.join(regex_opt_inner(list(group[1]), '') + for group in groupby(strings, lambda s: s[0] == first[0])) \ + + close_paren + + +def regex_opt(strings, flags=0, prefix='', suffix=''): + """Return a compiled regex that matches any string in the given list. + + The strings to match must be literal strings, not regexes. They will be + regex-escaped. + + *prefix* and *suffix* are pre- and appended to the final regex. + """ + strings = sorted(strings) + rex = prefix + regex_opt_inner(strings, '(') + suffix + return re.compile(rex, flags) diff --git a/tests/test_regexopt.py b/tests/test_regexopt.py new file mode 100644 index 00000000..5dc8f9af --- /dev/null +++ b/tests/test_regexopt.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- +""" + Tests for pygments.regexopt + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import random +import unittest +import itertools + +from pygments.regexopt import regex_opt + +ALPHABET = ['a', 'b', 'c', 'd', 'e'] +N_TRIES = 15 + + +class RegexOptTestCase(unittest.TestCase): + + def generate_keywordlist(self, length): + return [''.join(p) for p in + itertools.combinations_with_replacement(ALPHABET, length)] + + def test_randomly(self): + # generate a list of all possible keywords of a certain length using + # a restricted alphabet, then choose some to match and make sure only + # those do + for n in range(3, N_TRIES): + kwlist = self.generate_keywordlist(n) + to_match = random.sample(kwlist, + random.randint(1, len(kwlist) - 1)) + no_match = set(kwlist) - set(to_match) + rex = regex_opt(to_match, True) + for w in to_match: + self.assertTrue(rex.match(w)) + for w in no_match: + self.assertFalse(rex.match(w)) -- cgit v1.2.1 From 28eb10102b3e624ab92573c1c3b40afbbb359b92 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 14:16:05 +0200 Subject: convert FortranLexer to use "words" function --- pygments/lexers/compiled.py | 152 ++++++++++++++++++++++++-------------------- 1 file changed, 82 insertions(+), 70 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index dd1249fc..17027bfc 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -13,10 +13,10 @@ import re from string import Template from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - this, combined, inherit, do_insertions, default + this, combined, inherit, do_insertions, default, words from pygments.util import get_bool_opt, get_list_opt from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Error, Literal, Generic + Number, Punctuation, Error, Literal, Generic from pygments.scanner import Scanner # backwards compatibility @@ -1582,35 +1582,37 @@ class FortranLexer(RegexLexer): ], 'core': [ # Statements - (r'\b(ABSTRACT|ACCEPT|ALL|ALLSTOP|ALLOCATABLE|ALLOCATE|ARRAY|ASSIGN|' - r'ASSOCIATE|ASYNCHRONOUS|BACKSPACE|BIND|BLOCK|BLOCKDATA|BYTE|CALL|' - r'CASE|CLASS|CLOSE|CODIMENSION|COMMON|CONCURRRENT|CONTIGUOUS|' - r'CONTAINS|CONTINUE|CRITICAL|CYCLE|DATA|DEALLOCATE|DECODE|DEFERRED|' - r'DIMENSION|DO|ELEMENTAL|ELSE|ENCODE|END\s*ASSOCIATE|END\s*BLOCK|' - r'END\s*BLOCKDATA|END\s*CRITICAL|END\s*DO|END\s*FILE|END\s*FORALL|' - r'END\s*FUNCTION|END\s*IF|END\s*INTERFACE|END\s*MODULE|END\s*PROCEDURE|' - r'END\s*PROGRAM|END\s*SELECT|END\s*SUBMODULE|END\s*SUBROUTINE|' - r'END\s*TYPE|END\s*WHERE|ENTRY|ENUM|ENUMERATOR|' - r'EQUIVALENCE|EXIT|EXTENDS|EXTERNAL|EXTRINSIC|FINAL|FORALL|FORMAT|' - r'FUNCTION|GENERIC|GOTO|IF|IMAGES|IMPLICIT|IMPORT|IMPURE|INCLUDE|' - r'INQUIRE|INTENT|INTERFACE|INTRINSIC|IS|LOCK|MEMORY|MODULE|NAMELIST|' - r'NULLIFY|NONE|NON_INTRINSIC|NON_OVERRIDABLE|NOPASS|OPEN|OPTIONAL|' - r'OPTIONS|PARAMETER|PASS|PAUSE|POINTER|PRINT|PRIVATE|PROGRAM|' - r'PROTECTED|PUBLIC|PURE|READ|RECURSIVE|RESULT|RETURN|REWIND|SAVE|' - r'SELECT|SEQUENCE|STOP|SUBMODULE|SUBROUTINE|SYNC|SYNCALL|SYNCIMAGES|' - r'SYNCMEMORY|TARGET|THEN|TYPE|UNLOCK|USE|VALUE|VOLATILE|WHERE|WRITE|' - r'WHILE)\s*\b', + (words(( + 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE', + 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND', + 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE', + 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS', + 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE', + 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END', + 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS', + 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT', + 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT', + 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE', + 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY', + 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL', + 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE', + 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ', + 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE', + 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES', + 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE', + 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix='\\b', suffix='\\s*\\b'), Keyword), # Data Types - (r'\b(CHARACTER|COMPLEX|DOUBLE PRECISION|DOUBLE COMPLEX|INTEGER|' - r'LOGICAL|REAL|C_INT|C_SHORT|C_LONG|C_LONG_LONG|C_SIGNED_CHAR|' - r'C_SIZE_T|C_INT8_T|C_INT16_T|C_INT32_T|C_INT64_T|C_INT_LEAST8_T|' - r'C_INT_LEAST16_T|C_INT_LEAST32_T|C_INT_LEAST64_T|C_INT_FAST8_T|' - r'C_INT_FAST16_T|C_INT_FAST32_T|C_INT_FAST64_T|C_INTMAX_T|' - r'C_INTPTR_T|C_FLOAT|C_DOUBLE|C_LONG_DOUBLE|C_FLOAT_COMPLEX|' - r'C_DOUBLE_COMPLEX|C_LONG_DOUBLE_COMPLEX|C_BOOL|C_CHAR|C_PTR|' - r'C_FUNPTR)\s*\b', + (words(( + 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER', + 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR', + 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T', + 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T', + 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T', + 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', + 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', + 'C_FUNPTR'), prefix='\\b', suffix='\\s*\\b'), Keyword.Type), # Operators @@ -1619,48 +1621,58 @@ class FortranLexer(RegexLexer): (r'(::)', Keyword.Declaration), (r'[()\[\],:&%;]', Punctuation), - - # Intrinsics - (r'\b(Abort|Abs|Access|AChar|ACos|ACosH|AdjustL|AdjustR|AImag|AInt|' - r'Alarm|All|Allocated|ALog|AMax|AMin|AMod|And|ANInt|Any|ASin|ASinH|' - r'Associated|ATan|ATanH|Atomic_Define|Atomic_Ref|BesJ|BesJN|' - r'Bessel_J0|Bessel_J1|Bessel_JN|Bessel_Y0|Bessel_Y1|Bessel_YN|' - r'BesY|BesYN|BGE|BGT|BLE|BLT|Bit_Size|BTest|CAbs|CCos|Ceiling|' - r'CExp|Char|ChDir|ChMod|CLog|Cmplx|Command_Argument_Count|Complex|' - r'Conjg|Cos|CosH|Count|CPU_Time|CShift|CSin|CSqRt|CTime|C_Funloc|' - r'C_Loc|C_Associated|C_Null_Ptr|C_Null_Funptr|C_F_Pointer|' - r'C_F_ProcPointer|C_Null_Char|C_Alert|C_Backspace|C_Form_Feed|' - r'C_FunLoc|C_Loc|C_Sizeof|C_New_Line|' - r'C_Carriage_Return|C_Horizontal_Tab|C_Vertical_Tab|' - r'DAbs|DACos|DASin|DATan|Date_and_Time|DbesJ|' - r'DbesJ|DbesJN|DbesY|DbesY|DbesYN|Dble|DCos|DCosH|DDiM|DErF|DErFC|' - r'DExp|Digits|DiM|DInt|DLog|DLog|DMax|DMin|DMod|DNInt|Dot_Product|' - r'DProd|DSign|DSinH|DShiftL|DShiftR|DSin|DSqRt|DTanH|DTan|DTime|' - r'EOShift|Epsilon|ErF|ErFC|ErFC_Scaled|ETime|Execute_Command_Line|' - r'Exit|Exp|Exponent|Extends_Type_Of|FDate|FGet|FGetC|' - r'FindLoc|Float|Floor|Flush|FNum|FPutC|FPut|Fraction|FSeek|FStat|' - r'FTell|Gamma|GError|GetArg|Get_Command|Get_Command_Argument|' - r'Get_Environment_Variable|GetCWD|GetEnv|GetGId|GetLog|GetPId|' - r'GetUId|GMTime|HostNm|Huge|Hypot|IAbs|IAChar|IAll|IAnd|IAny|' - r'IArgC|IBClr|IBits|IBSet|IChar|IDate|IDiM|IDInt|IDNInt|IEOr|' - r'IErrNo|IFix|Imag|ImagPart|Image_Index|Index|Int|IOr|IParity|' - r'IRand|IsaTty|IShft|IShftC|ISign|Iso_C_Binding|Is_Contiguous|' - r'Is_Iostat_End|Is_Iostat_Eor|ITime|Kill|Kind|LBound|LCoBound|' - r'Len|Len_Trim|LGe|LGt|Link|LLe|LLt|LnBlnk|Loc|Log|Log_Gamma|' - r'Logical|Long|LShift|LStat|LTime|MaskL|MaskR|MatMul|Max|' - r'MaxExponent|MaxLoc|MaxVal|MClock|Merge|Merge_Bits|Move_Alloc|' - r'Min|MinExponent|MinLoc|MinVal|Mod|Modulo|MvBits|Nearest|' - r'New_Line|NInt|Norm2|Not|Null|Num_Images|Or|Pack|Parity|PError|' - r'Precision|Present|Product|Radix|Rand|Random_Number|' - r'Random_Seed|Range|Real|RealPart|Rename|Repeat|Reshape|' - r'RRSpacing|RShift|Same_Type_As|Scale|Scan|Second|' - r'Selected_Char_Kind|Selected_Int_Kind|Selected_Real_Kind|' - r'Set_Exponent|Shape|ShiftA|ShiftL|ShiftR|Short|Sign|Signal|SinH|' - r'Sin|Sleep|Sngl|Spacing|Spread|SqRt|SRand|Stat|Storage_Size|Sum|' - r'SymLnk|System|System_Clock|Tan|TanH|Time|This_Image|Tiny|' - r'TrailZ|Transfer|Transpose|Trim|TtyNam|UBound|UCoBound|UMask|' - r'Unlink|Unpack|Verify|XOr|ZAbs|ZCos|ZExp|' - r'ZLog|ZSin|ZSqRt)\s*\b', + # Intrinsics + (words(( + 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL', + 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog', + 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH', + 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref', + 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0', + 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE', + 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp', + 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count', + 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift', + 'CSin', 'CSqRt', 'CTime', 'C_Funloc', 'C_Loc', 'C_Associated', + 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer', + 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc', + 'C_Loc', 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return', + 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin', + 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJ', 'DbesJN', 'DbesY', + 'DbesY', 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF', + 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DLog', 'DMax', + 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH', + 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime', + 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime', + 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of', + 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush', + 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell', + 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument', + 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog', + 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs', + 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits', + 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr', + 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index', + 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC', + 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End', + 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound', + 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk', + 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat', + 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent', + 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc', + 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits', + 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images', + 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product', + 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real', + 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift', + 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind', + 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape', + 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH', + 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand', + 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock', + 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer', + 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask', + 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp', + 'ZLog', 'ZSin', 'ZSqRt'), prefix='\\b', suffix='\\s*\\b'), Name.Builtin), # Booleans -- cgit v1.2.1 From 50528f3f2415449cbd6cc557525bf87b05ca5b99 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:02:47 +0200 Subject: convert C and C-like lexers to use "words" function --- pygments/lexers/compiled.py | 115 +++++++++++++++++++++++++------------------- 1 file changed, 66 insertions(+), 49 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 17027bfc..7ab5f7ee 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -58,7 +58,7 @@ class CFamilyLexer(RegexLexer): bygroups(using(this), Comment.Preproc), 'macro'), (r'\n', Text), (r'\s+', Text), - (r'\\\n', Text), # line continuation + (r'\\\n', Text), # line continuation (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), ], @@ -73,19 +73,25 @@ class CFamilyLexer(RegexLexer): (r'\*/', Error), (r'[~!%^&*+=|?:<>/-]', Operator), (r'[()\[\],.]', Punctuation), - (r'(auto|break|case|const|continue|default|do|else|enum|extern|' - r'for|goto|if|register|restricted|return|sizeof|static|struct|' - r'switch|typedef|union|volatile|while)\b', Keyword), + (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do', + 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register', + 'restricted', 'return', 'sizeof', 'static', 'struct', + 'switch', 'typedef', 'union', 'volatile', 'while'), + suffix=r'\b'), Keyword), (r'(bool|int|long|float|short|double|char|unsigned|signed|void|' r'[a-z_][a-z0-9_]*_t)\b', Keyword.Type), - (r'(_{0,2}inline|naked|restrict|thread|typename)\b', Keyword.Reserved), + (words(('inline', '_inline', '__inline', 'naked', 'restrict', + 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved), # Vector intrinsics - (r'(__(m128i|m128d|m128|m64))\b', Keyword.Reserved), + (r'(__m(128i|128d|128|64))\b', Keyword.Reserved), # Microsoft-isms - (r'__(asm|int8|based|except|int16|stdcall|cdecl|fastcall|int32|' - r'declspec|finally|int64|try|leave|wchar_t|w64|unaligned|' - r'raise|noop|identifier|forceinline|assume)\b', Keyword.Reserved), + (words(( + 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl', + 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try', + 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop', + 'identifier', 'forceinline', 'assume'), + prefix=r'__', suffix=r'\b'), Keyword.Reserved), (r'(true|false|NULL)\b', Name.Builtin), (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), ('[a-zA-Z_]\w*', Name), @@ -109,7 +115,7 @@ class CFamilyLexer(RegexLexer): Punctuation)), default('statement'), ], - 'statement' : [ + 'statement': [ include('whitespace'), include('statements'), ('[{}]', Punctuation), @@ -126,9 +132,9 @@ class CFamilyLexer(RegexLexer): (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), @@ -206,24 +212,27 @@ class CppLexer(CFamilyLexer): tokens = { 'statements': [ - (r'(asm|catch|const_cast|delete|dynamic_cast|explicit|' - r'export|friend|mutable|namespace|new|operator|' - r'private|protected|public|reinterpret_cast|' - r'restrict|static_cast|template|this|throw|throws|' - r'typeid|typename|using|virtual|' - r'constexpr|nullptr|decltype|thread_local|' - r'alignas|alignof|static_assert|noexcept|override|final)\b', Keyword), - (r'(char16_t|char32_t)\b', Keyword.Type), + (words(( + 'asm', 'catch', 'const_cast', 'delete', 'dynamic_cast', 'explicit', + 'export', 'friend', 'mutable', 'namespace', 'new', 'operator', + 'private', 'protected', 'public', 'reinterpret_cast', + 'restrict', 'static_cast', 'template', 'this', 'throw', 'throws', + 'typeid', 'typename', 'using', 'virtual', + 'constexpr', 'nullptr', 'decltype', 'thread_local', + 'alignas', 'alignof', 'static_assert', 'noexcept', 'override', + 'final'), suffix=r'\b'), Keyword), + (r'char(16_t|32_t)\b', Keyword.Type), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), inherit, ], 'root': [ inherit, # C++ Microsoft-isms - (r'__(virtual_inheritance|uuidof|super|single_inheritance|' - r'multiple_inheritance|interface|event)\b', Keyword.Reserved), + (words(('virtual_inheritance', 'uuidof', 'super', 'single_inheritance', + 'multiple_inheritance', 'interface', 'event'), + prefix=r'__', suffix=r'\b'), Keyword.Reserved), # Offload C++ extensions, http://offload.codeplay.com/ - (r'(__offload|__blockingoffload|__outer)\b', Keyword.Pseudo), + (r'__(offload|blockingoffload|outer)\b', Keyword.Pseudo), ], 'classname': [ (r'[a-zA-Z_]\w*', Name.Class, '#pop'), @@ -252,15 +261,16 @@ class PikeLexer(CppLexer): tokens = { 'statements': [ - (r'(catch|new|private|protected|public|gauge|' - r'throw|throws|class|interface|implement|abstract|extends|from|' - r'this|super|new|constant|final|static|import|use|extern|' - r'inline|proto|break|continue|if|else|for|' - r'while|do|switch|case|as|in|version|return|true|false|null|' - r'__VERSION__|__MAJOR__|__MINOR__|__BUILD__|__REAL_VERSION__|' - r'__REAL_MAJOR__|__REAL_MINOR__|__REAL_BUILD__|__DATE__|__TIME__|' - r'__FILE__|__DIR__|__LINE__|__AUTO_BIGNUM__|__NT__|__PIKE__|' - r'__amigaos__|_Pragma|static_assert|defined|sscanf)\b', + (words(( + 'catch', 'new', 'private', 'protected', 'public', 'gauge', + 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from', + 'this', 'super', 'new', 'constant', 'final', 'static', 'import', 'use', 'extern', + 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for', + 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null', + '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__', + '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__', + '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__', + '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'), Keyword), (r'(bool|int|long|float|short|double|char|string|object|void|mapping|' r'array|multiset|program|function|lambda|mixed|' @@ -303,7 +313,7 @@ class SwigLexer(CppLexer): } # This is a far from complete set of SWIG directives - swig_directives = ( + swig_directives = set(( # Most common directives '%apply', '%define', '%director', '%enddef', '%exception', '%extend', '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include', @@ -321,7 +331,8 @@ class SwigLexer(CppLexer): '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend', '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall', '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof', - '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', '%warnfilter') + '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', + '%warnfilter')) def analyse_text(text): rv = 0 @@ -351,15 +362,18 @@ class ECLexer(CLexer): tokens = { 'statements': [ - (r'(virtual|class|private|public|property|import|delete|new|new0|' - r'renew|renew0|define|get|set|remote|dllexport|dllimport|stdcall|' - r'subclass|__on_register_module|namespace|using|typed_object|' - r'any_object|incref|register|watch|stopwatching|firewatchers|' - r'watchable|class_designer|class_fixed|class_no_expansion|isset|' - r'class_default_property|property_category|class_data|' - r'class_property|virtual|thisclass|' - r'dbtable|dbindex|database_open|dbfield)\b', Keyword), - (r'(uint|uint16|uint32|uint64|bool|byte|unichar|int64)\b', + (words(( + 'virtual', 'class', 'private', 'public', 'property', 'import', + 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get', + 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass', + '__on_register_module', 'namespace', 'using', 'typed_object', + 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers', + 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset', + 'class_default_property', 'property_category', 'class_data', + 'class_property', 'virtual', 'thisclass', 'dbtable', 'dbindex', + 'database_open', 'dbfield'), suffix=r'\b'), Keyword), + (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte', + 'unichar', 'int64'), suffix=r'\b'), Keyword.Type), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(null|value|this)\b', Name.Builtin), @@ -387,12 +401,15 @@ class NesCLexer(CLexer): tokens = { 'statements': [ - (r'(abstract|as|async|atomic|call|command|component|components|' - r'configuration|event|extends|generic|implementation|includes|' - r'interface|module|new|norace|post|provides|signal|task|uses)\b', + (words(( + 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component', + 'components', 'configuration', 'event', 'extends', 'generic', + 'implementation', 'includes', 'interface', 'module', 'new', 'norace', + 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'), Keyword), - (r'(nx_struct|nx_union|nx_int8_t|nx_int16_t|nx_int32_t|nx_int64_t|' - r'nx_uint8_t|nx_uint16_t|nx_uint32_t|nx_uint64_t)\b', + (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t', + 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t', + 'nx_uint64_t'), suffix=r'\b'), Keyword.Type), inherit, ], -- cgit v1.2.1 From 9866d8da397c9b43ec104489e8cd56f449e90d1c Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:07:38 +0200 Subject: Nit lexer: untabify and apply words(). --- pygments/lexers/compiled.py | 85 +++++++++++++++++++++++---------------------- 1 file changed, 44 insertions(+), 41 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 7ab5f7ee..6353aa15 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -5245,45 +5245,48 @@ class SwiftLexer(ObjectiveCLexer): class NitLexer(RegexLexer): - """ - For `nit `_ source. - - .. versionadded:: 2.0 - """ - - name = 'Nit' - aliases = ['nit'] - filenames = ['*.nit'] - tokens = { - 'root': [ - (r'#.*?$', Comment.Single), - (r'(package|module|import|class|abstract|interface|' - 'universal|enum|end|fun|type|init|redef|isa|do|' - 'readable|writable|var|intern|extern|public|protected|' - 'private|intrude|if|then|else|while|loop|for|in|and|' - 'or|not|implies|return|continue|break|abort|assert|' - 'new|is|once|super|self|true|false|nullable|null|as|' - 'isset|label|__debug__)(?=( |\n|\t|\r|\())', Keyword), - (r'[A-Z][A-Za-z0-9_]*', Name.Class), - (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), #Simple long string - (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|' - r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), #Simple long string alt - (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), #Start long string - (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), #Mid long string - (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), #End long string - (r'"(\\.|([^"}{\\]))*"', String), #Simple String - (r'"(\\.|([^"}{\\]))*{', String), #Start string - (r'}(\\.|([^"}{\\]))*{', String), #Mid String - (r'}(\\.|([^"}{\\]))*"', String), #End String - (r'(\'[^\'\\]\')|(\'\\.\')', String.Char), - (r'[0-9]+', Number.Integer), - (r'[0-9]*.[0-9]+', Number.Float), - (r'0(x|X)[0-9A-Fa-f]+', Number.Hex), - (r'[a-z][A-Za-z0-9_]*', Name), - (r'_[A-Za-z0-9_]+', Name.Variable.Instance), - (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator), - (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation), - (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit - ('(\r|\n| |\t)+', Text)] - } + """ + For `nit `_ source. + + .. versionadded:: 2.0 + """ + name = 'Nit' + aliases = ['nit'] + filenames = ['*.nit'] + tokens = { + 'root': [ + (r'#.*?$', Comment.Single), + (words(( + 'package', 'module', 'import', 'class', 'abstract', 'interface', + 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef', + 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern', + 'public', 'protected', 'private', 'intrude', 'if', 'then', + 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not', + 'implies', 'return', 'continue', 'break', 'abort', 'assert', + 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable', + 'null', 'as', 'isset', 'label', '__debug__'), suffix='(?=( |\n|\t|\r|\())'), + Keyword), + (r'[A-Z][A-Za-z0-9_]*', Name.Class), + (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), # Simple long string + (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|' + r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt + (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), # Start long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), # Mid long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), # End long string + (r'"(\\.|([^"}{\\]))*"', String), # Simple String + (r'"(\\.|([^"}{\\]))*{', String), # Start string + (r'}(\\.|([^"}{\\]))*{', String), # Mid String + (r'}(\\.|([^"}{\\]))*"', String), # End String + (r'(\'[^\'\\]\')|(\'\\.\')', String.Char), + (r'[0-9]+', Number.Integer), + (r'[0-9]*.[0-9]+', Number.Float), + (r'0(x|X)[0-9A-Fa-f]+', Number.Hex), + (r'[a-z][A-Za-z0-9_]*', Name), + (r'_[A-Za-z0-9_]+', Name.Variable.Instance), + (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator), + (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation), + (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit + ('(\r|\n| |\t)+', Text), + ], + } -- cgit v1.2.1 From 65aa3ad9a78ea08602def91bde7eda5f9a8b7079 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:08:57 +0200 Subject: Swift: use sets for lookup --- pygments/lexers/compiled.py | 75 +++++++++++++++++++++++++-------------------- 1 file changed, 42 insertions(+), 33 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 6353aa15..627027c1 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -488,23 +488,32 @@ class DLexer(RegexLexer): (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'/\+', Comment.Multiline, 'nested_comment'), # Keywords - (r'(abstract|alias|align|asm|assert|auto|body|break|case|cast' - r'|catch|class|const|continue|debug|default|delegate|delete' - r'|deprecated|do|else|enum|export|extern|finally|final' - r'|foreach_reverse|foreach|for|function|goto|if|immutable|import' - r'|interface|invariant|inout|in|is|lazy|mixin|module|new|nothrow|out' - r'|override|package|pragma|private|protected|public|pure|ref|return' - r'|scope|shared|static|struct|super|switch|synchronized|template|this' - r'|throw|try|typedef|typeid|typeof|union|unittest|version|volatile' - r'|while|with|__gshared|__traits|__vector|__parameters)\b', Keyword - ), - (r'(bool|byte|cdouble|cent|cfloat|char|creal|dchar|double|float' - r'|idouble|ifloat|int|ireal|long|real|short|ubyte|ucent|uint|ulong' - r'|ushort|void|wchar)\b', Keyword.Type - ), + (words(( + 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body', + 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue', + 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else', + 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse', + 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import', + 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin', + 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma', + 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope', + 'shared', 'static', 'struct', 'super', 'switch', 'synchronized', + 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof', + 'union', 'unittest', 'version', 'volatile', 'while', 'with', + '__gshared', '__traits', '__vector', '__parameters'), + suffix=r'\b'), + Keyword), + (words(( + 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal', + 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal', + 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong', + 'ushort', 'void', 'wchar'), suffix=r'\b'), + Keyword.Type), (r'(false|true|null)\b', Keyword.Constant), - (r'(__FILE__|__MODULE__|__LINE__|__FUNCTION__|__PRETTY_FUNCTION__' - r'|__DATE__|__EOF__|__TIME__|__TIMESTAMP__|__VENDOR__|__VERSION__)\b', + (words(( + '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__' + '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__', + '__VERSION__'), suffix=r'\b'), Keyword.Pseudo), (r'macro\b', Keyword.Reserved), (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin), @@ -5202,6 +5211,7 @@ class MqlLexer(CppLexer): ], } + class SwiftLexer(ObjectiveCLexer): """ For `Swift `_ source. @@ -5211,25 +5221,24 @@ class SwiftLexer(ObjectiveCLexer): aliases = ['swift'] mimetypes = ['text/x-swift'] - keywords_decl = ['class', 'deinit', 'enum', 'extension', 'func', 'import', - 'init', 'let', 'protocol', 'static', 'struct', 'subscript', - 'typealias', 'var'] - keywords_stmt = ['break', 'case', 'continue', 'default', 'do', 'else', - 'fallthrough', 'if', 'in', 'for', 'return', 'switch', - 'where', 'while'] - keywords_type = ['as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self', - 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__', - '__LINE__'] - keywords_resrv = ['associativity', 'didSet', 'get', 'infix', 'inout', 'left', - 'mutating', 'none', 'nonmutating', 'operator', 'override', - 'postfix', 'precedence', 'prefix', 'right', 'set', - 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak', - 'willSet'] - operators = ['->'] + keywords_decl = set(('class', 'deinit', 'enum', 'extension', 'func', 'import', + 'init', 'let', 'protocol', 'static', 'struct', 'subscript', + 'typealias', 'var')) + keywords_stmt = set(('break', 'case', 'continue', 'default', 'do', 'else', + 'fallthrough', 'if', 'in', 'for', 'return', 'switch', + 'where', 'while')) + keywords_type = set(('as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self', + 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__', + '__LINE__')) + keywords_resrv = set(('associativity', 'didSet', 'get', 'infix', 'inout', 'left', + 'mutating', 'none', 'nonmutating', 'operator', 'override', + 'postfix', 'precedence', 'prefix', 'right', 'set', + 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak', + 'willSet')) + operators = set(('->',)) def get_tokens_unprocessed(self, text): - for index, token, value in \ - ObjectiveCLexer.get_tokens_unprocessed(self, text): + for index, token, value in ObjectiveCLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.keywords_decl: token = Keyword -- cgit v1.2.1 From 80ab5a3cdb3dee2a12351ba95c94aff7c3f82643 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:12:52 +0200 Subject: Mql: switch to words() --- pygments/lexers/compiled.py | 855 ++++++++++++++++++++++---------------------- 1 file changed, 433 insertions(+), 422 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 627027c1..0fc41199 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -4783,430 +4783,441 @@ class MqlLexer(CppLexer): tokens = { 'statements': [ - (r'(input|_Digits|_Point|_LastError|_Period|_RandomSeed|' - r'_StopFlag|_Symbol|_UninitReason|' - r'Ask|Bars|Bid|Close|Digits|High|Low|Open|Point|Time|Volume)\b', + (words(( + 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed', + '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid', + 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time', + 'Volume'), suffix=r'\b'), Keyword), - (r'(void|char|uchar|bool|short|ushort|int|uint|color|long|ulong|datetime|' - r'float|double|string)\b', + (words(( + 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint', + 'color', 'long', 'ulong', 'datetime', 'float', 'double', + 'string'), suffix=r'\b'), Keyword.Type), - (r'(Alert|CheckPointer|Comment|DebugBreak|ExpertRemove|' - r'GetPointer|GetTickCount|MessageBox|PeriodSeconds|PlaySound|' - r'Print|PrintFormat|ResetLastError|ResourceCreate|ResourceFree|' - r'ResourceReadImage|ResourceSave|SendFTP|SendMail|SendNotification|' - r'Sleep|TerminalClose|TesterStatistics|ZeroMemory|' - r'ArrayBsearch|ArrayCopy|ArrayCompare|ArrayFree|ArrayGetAsSeries|' - r'ArrayInitialize|ArrayFill|ArrayIsSeries|ArrayIsDynamic|' - r'ArrayMaximum|ArrayMinimum|ArrayRange|ArrayResize|' - r'ArraySetAsSeries|ArraySize|ArraySort|ArrayCopyRates|' - r'ArrayCopySeries|ArrayDimension|' - r'CharToString|DoubleToString|EnumToString|NormalizeDouble|' - r'StringToDouble|StringToInteger|StringToTime|TimeToString|' - r'IntegerToString|ShortToString|ShortArrayToString|' - r'StringToShortArray|CharArrayToString|StringToCharArray|' - r'ColorToARGB|ColorToString|StringToColor|StringFormat|' - r'CharToStr|DoubleToStr|StrToDouble|StrToInteger|StrToTime|TimeToStr|' - r'MathAbs|MathArccos|MathArcsin|MathArctan|MathCeil|MathCos|MathExp|' - r'MathFloor|MathLog|MathMax|MathMin|MathMod|MathPow|MathRand|' - r'MathRound|MathSin|MathSqrt|MathSrand|MathTan|MathIsValidNumber|' - r'StringAdd|StringBufferLen|StringCompare|StringConcatenate|StringFill|' - r'StringFind|StringGetCharacter|StringInit|StringLen|StringReplace|' - r'StringSetCharacter|StringSplit|StringSubstr|StringToLower|StringToUpper|' - r'StringTrimLeft|StringTrimRight|StringGetChar|StringSetChar|' - r'TimeCurrent|TimeTradeServer|TimeLocal|TimeGMT|TimeDaylightSavings|' - r'TimeGMTOffset|TimeToStruct|StructToTime|Day|DayOfWeek|DayOfYear|' - r'Hour|Minute|Month|Seconds|TimeDay|TimeDayOfWeek|TimeDayOfYear|TimeHour|' - r'TimeMinute|TimeMonth|TimeSeconds|TimeYear|Year|' - r'AccountInfoDouble|AccountInfoInteger|AccountInfoString|AccountBalance|' - r'AccountCredit|AccountCompany|AccountCurrency|AccountEquity|' - r'AccountFreeMargin|AccountFreeMarginCheck|AccountFreeMarginMode|' - r'AccountLeverage|AccountMargin|AccountName|AccountNumber|AccountProfit|' - r'AccountServer|AccountStopoutLevel|AccountStopoutMode|' - r'GetLastError|IsStopped|UninitializeReason|MQLInfoInteger|MQLInfoString|' - r'Symbol|Period|Digits|Point|IsConnected|IsDemo|IsDllsAllowed|' - r'IsExpertEnabled|IsLibrariesAllowed|IsOptimization|IsTesting|' - r'IsTradeAllowed|' - r'IsTradeContextBusy|IsVisualMode|TerminalCompany|TerminalName|' - r'TerminalPath|' - r'SymbolsTotal|SymbolName|SymbolSelect|SymbolIsSynchronized|' - r'SymbolInfoDouble|' - r'SymbolInfoInteger|SymbolInfoString|SymbolInfoTick|' - r'SymbolInfoSessionQuote|' - r'SymbolInfoSessionTrade|MarketInfo|' - r'SeriesInfoInteger|CopyRates|CopyTime|CopyOpen|' - r'CopyHigh|CopyLow|CopyClose|' - r'CopyTickVolume|CopyRealVolume|CopySpread|iBars|iBarShift|iClose|' - r'iHigh|iHighest|iLow|iLowest|iOpen|iTime|iVolume|' - r'HideTestIndicators|Period|RefreshRates|Symbol|WindowBarsPerChart|' - r'WindowExpertName|WindowFind|WindowFirstVisibleBar|WindowHandle|' - r'WindowIsVisible|WindowOnDropped|WindowPriceMax|WindowPriceMin|' - r'WindowPriceOnDropped|WindowRedraw|WindowScreenShot|' - r'WindowTimeOnDropped|WindowsTotal|WindowXOnDropped|WindowYOnDropped|' - r'OrderClose|OrderCloseBy|OrderClosePrice|OrderCloseTime|OrderComment|' - r'OrderCommission|OrderDelete|OrderExpiration|OrderLots|OrderMagicNumber|' - r'OrderModify|OrderOpenPrice|OrderOpenTime|OrderPrint|OrderProfit|' - r'OrderSelect|OrderSend|OrdersHistoryTotal|OrderStopLoss|OrdersTotal|' - r'OrderSwap|OrderSymbol|OrderTakeProfit|OrderTicket|OrderType|' - r'GlobalVariableCheck|GlobalVariableTime|' - r'GlobalVariableDel|GlobalVariableGet|GlobalVariableName|' - r'GlobalVariableSet|GlobalVariablesFlush|GlobalVariableTemp|' - r'GlobalVariableSetOnCondition|GlobalVariablesDeleteAll|' - r'GlobalVariablesTotal|GlobalVariableCheck|GlobalVariableTime|' - r'GlobalVariableDel|GlobalVariableGet|' - r'GlobalVariableName|GlobalVariableSet|GlobalVariablesFlush|' - r'GlobalVariableTemp|GlobalVariableSetOnCondition|' - r'GlobalVariablesDeleteAll|GlobalVariablesTotal|' - r'GlobalVariableCheck|GlobalVariableTime|GlobalVariableDel|' - r'GlobalVariableGet|GlobalVariableName|GlobalVariableSet|' - r'GlobalVariablesFlush|GlobalVariableTemp|' - r'GlobalVariableSetOnCondition|GlobalVariablesDeleteAll|' - r'GlobalVariablesTotal|' - r'FileFindFirst|FileFindNext|FileFindClose|FileOpen|FileDelete|' - r'FileFlush|FileGetInteger|FileIsEnding|FileIsLineEnding|' - r'FileClose|FileIsExist|FileCopy|FileMove|FileReadArray|' - r'FileReadBool|FileReadDatetime|FileReadDouble|FileReadFloat|' - r'FileReadInteger|FileReadLong|FileReadNumber|FileReadString|' - r'FileReadStruct|FileSeek|FileSize|FileTell|FileWrite|' - r'FileWriteArray|FileWriteDouble|FileWriteFloat|FileWriteInteger|' - r'FileWriteLong|FileWriteString|FileWriteStruct|FolderCreate|' - r'FolderDelete|FolderClean|FileOpenHistory|' - r'IndicatorSetDouble|IndicatorSetInteger|IndicatorSetString|' - r'SetIndexBuffer|IndicatorBuffers|IndicatorCounted|IndicatorDigits|' - r'IndicatorShortName|SetIndexArrow|SetIndexDrawBegin|' - r'SetIndexEmptyValue|SetIndexLabel|SetIndexShift|' - r'SetIndexStyle|SetLevelStyle|SetLevelValue|' - r'ObjectCreate|ObjectName|ObjectDelete|ObjectsDeleteAll|' - r'ObjectFind|ObjectGetTimeByValue|ObjectGetValueByTime|' - r'ObjectMove|ObjectsTotal|ObjectGetDouble|ObjectGetInteger|' - r'ObjectGetString|ObjectSetDouble|ObjectSetInteger|' - r'ObjectSetString|TextSetFont|TextOut|TextGetSize|' - r'ObjectDescription|ObjectGet|ObjectGetFiboDescription|' - r'ObjectGetShiftByValue|ObjectGetValueByShift|ObjectSet|' - r'ObjectSetFiboDescription|ObjectSetText|ObjectType|' - r'iAC|iAD|iADX|iAlligator|iAO|iATR|iBearsPower|' - r'iBands|iBandsOnArray|iBullsPower|iCCI|iCCIOnArray|' - r'iCustom|iDeMarker|iEnvelopes|iEnvelopesOnArray|' - r'iForce|iFractals|iGator|iIchimoku|iBWMFI|iMomentum|' - r'iMomentumOnArray|iMFI|iMA|iMAOnArray|iOsMA|iMACD|' - r'iOBV|iSAR|iRSI|iRSIOnArray|iRVI|iStdDev|iStdDevOnArray|' - r'iStochastic|iWPR|' - r'EventSetMillisecondTimer|EventSetTimer|' - r'EventKillTimer|EventChartCustom)\b', Name.Function), - (r'(CHARTEVENT_KEYDOWN|CHARTEVENT_MOUSE_MOVE|' - r'CHARTEVENT_OBJECT_CREATE|' - r'CHARTEVENT_OBJECT_CHANGE|CHARTEVENT_OBJECT_DELETE|' - r'CHARTEVENT_CLICK|' - r'CHARTEVENT_OBJECT_CLICK|CHARTEVENT_OBJECT_DRAG|' - r'CHARTEVENT_OBJECT_ENDEDIT|' - r'CHARTEVENT_CHART_CHANGE|CHARTEVENT_CUSTOM|' - r'CHARTEVENT_CUSTOM_LAST|' - r'PERIOD_CURRENT|PERIOD_M1|PERIOD_M2|PERIOD_M3|' - r'PERIOD_M4|PERIOD_M5|' - r'PERIOD_M6|PERIOD_M10|PERIOD_M12|PERIOD_M15|' - r'PERIOD_M20|PERIOD_M30|' - r'PERIOD_H1|PERIOD_H2|PERIOD_H3|PERIOD_H4|' - r'PERIOD_H6|PERIOD_H8|' - r'PERIOD_H12|PERIOD_D1|PERIOD_W1|PERIOD_MN1|' - r'CHART_IS_OBJECT|CHART_BRING_TO_TOP|' - r'CHART_MOUSE_SCROLL|CHART_EVENT_MOUSE_MOVE|' - r'CHART_EVENT_OBJECT_CREATE|' - r'CHART_EVENT_OBJECT_DELETE|CHART_MODE|CHART_FOREGROUND|' - r'CHART_SHIFT|' - r'CHART_AUTOSCROLL|CHART_SCALE|CHART_SCALEFIX|' - r'CHART_SCALEFIX_11|' - r'CHART_SCALE_PT_PER_BAR|CHART_SHOW_OHLC|' - r'CHART_SHOW_BID_LINE|' - r'CHART_SHOW_ASK_LINE|CHART_SHOW_LAST_LINE|' - r'CHART_SHOW_PERIOD_SEP|' - r'CHART_SHOW_GRID|CHART_SHOW_VOLUMES|' - r'CHART_SHOW_OBJECT_DESCR|' - r'CHART_VISIBLE_BARS|CHART_WINDOWS_TOTAL|' - r'CHART_WINDOW_IS_VISIBLE|' - r'CHART_WINDOW_HANDLE|CHART_WINDOW_YDISTANCE|' - r'CHART_FIRST_VISIBLE_BAR|' - r'CHART_WIDTH_IN_BARS|CHART_WIDTH_IN_PIXELS|' - r'CHART_HEIGHT_IN_PIXELS|' - r'CHART_COLOR_BACKGROUND|CHART_COLOR_FOREGROUND|' - r'CHART_COLOR_GRID|' - r'CHART_COLOR_VOLUME|CHART_COLOR_CHART_UP|' - r'CHART_COLOR_CHART_DOWN|' - r'CHART_COLOR_CHART_LINE|CHART_COLOR_CANDLE_BULL|' - r'CHART_COLOR_CANDLE_BEAR|' - r'CHART_COLOR_BID|CHART_COLOR_ASK|CHART_COLOR_LAST|' - r'CHART_COLOR_STOP_LEVEL|' - r'CHART_SHOW_TRADE_LEVELS|CHART_DRAG_TRADE_LEVELS|' - r'CHART_SHOW_DATE_SCALE|' - r'CHART_SHOW_PRICE_SCALE|CHART_SHIFT_SIZE|' - r'CHART_FIXED_POSITION|' - r'CHART_FIXED_MAX|CHART_FIXED_MIN|CHART_POINTS_PER_BAR|' - r'CHART_PRICE_MIN|' - r'CHART_PRICE_MAX|CHART_COMMENT|CHART_BEGIN|' - r'CHART_CURRENT_POS|CHART_END|' - r'CHART_BARS|CHART_CANDLES|CHART_LINE|CHART_VOLUME_HIDE|' - r'CHART_VOLUME_TICK|CHART_VOLUME_REAL|' - r'OBJ_VLINE|OBJ_HLINE|OBJ_TREND|OBJ_TRENDBYANGLE|OBJ_CYCLES|' - r'OBJ_CHANNEL|OBJ_STDDEVCHANNEL|OBJ_REGRESSION|OBJ_PITCHFORK|' - r'OBJ_GANNLINE|OBJ_GANNFAN|OBJ_GANNGRID|OBJ_FIBO|' - r'OBJ_FIBOTIMES|OBJ_FIBOFAN|OBJ_FIBOARC|OBJ_FIBOCHANNEL|' - r'OBJ_EXPANSION|OBJ_RECTANGLE|OBJ_TRIANGLE|OBJ_ELLIPSE|' - r'OBJ_ARROW_THUMB_UP|OBJ_ARROW_THUMB_DOWN|' - r'OBJ_ARROW_UP|OBJ_ARROW_DOWN|' - r'OBJ_ARROW_STOP|OBJ_ARROW_CHECK|OBJ_ARROW_LEFT_PRICE|' - r'OBJ_ARROW_RIGHT_PRICE|OBJ_ARROW_BUY|OBJ_ARROW_SELL|' - r'OBJ_ARROW|' - r'OBJ_TEXT|OBJ_LABEL|OBJ_BUTTON|OBJ_BITMAP|' - r'OBJ_BITMAP_LABEL|' - r'OBJ_EDIT|OBJ_EVENT|OBJ_RECTANGLE_LABEL|' - r'OBJPROP_TIME1|OBJPROP_PRICE1|OBJPROP_TIME2|' - r'OBJPROP_PRICE2|OBJPROP_TIME3|' - r'OBJPROP_PRICE3|OBJPROP_COLOR|OBJPROP_STYLE|' - r'OBJPROP_WIDTH|' - r'OBJPROP_BACK|OBJPROP_RAY|OBJPROP_ELLIPSE|' - r'OBJPROP_SCALE|' - r'OBJPROP_ANGLE|OBJPROP_ARROWCODE|OBJPROP_TIMEFRAMES|' - r'OBJPROP_DEVIATION|OBJPROP_FONTSIZE|OBJPROP_CORNER|' - r'OBJPROP_XDISTANCE|OBJPROP_YDISTANCE|OBJPROP_FIBOLEVELS|' - r'OBJPROP_LEVELCOLOR|OBJPROP_LEVELSTYLE|OBJPROP_LEVELWIDTH|' - r'OBJPROP_FIRSTLEVEL|OBJPROP_COLOR|OBJPROP_STYLE|OBJPROP_WIDTH|' - r'OBJPROP_BACK|OBJPROP_ZORDER|OBJPROP_FILL|OBJPROP_HIDDEN|' - r'OBJPROP_SELECTED|OBJPROP_READONLY|OBJPROP_TYPE|OBJPROP_TIME|' - r'OBJPROP_SELECTABLE|OBJPROP_CREATETIME|OBJPROP_LEVELS|' - r'OBJPROP_LEVELCOLOR|OBJPROP_LEVELSTYLE|OBJPROP_LEVELWIDTH|' - r'OBJPROP_ALIGN|OBJPROP_FONTSIZE|OBJPROP_RAY_RIGHT|OBJPROP_RAY|' - r'OBJPROP_ELLIPSE|OBJPROP_ARROWCODE|OBJPROP_TIMEFRAMES|OBJPROP_ANCHOR|' - r'OBJPROP_XDISTANCE|OBJPROP_YDISTANCE|OBJPROP_DRAWLINES|OBJPROP_STATE|' - r'OBJPROP_CHART_ID|OBJPROP_XSIZE|OBJPROP_YSIZE|OBJPROP_XOFFSET|' - r'OBJPROP_YOFFSET|OBJPROP_PERIOD|OBJPROP_DATE_SCALE|OBJPROP_PRICE_SCALE|' - r'OBJPROP_CHART_SCALE|OBJPROP_BGCOLOR|OBJPROP_CORNER|OBJPROP_BORDER_TYPE|' - r'OBJPROP_BORDER_COLOR|OBJPROP_PRICE|OBJPROP_LEVELVALUE|OBJPROP_SCALE|' - r'OBJPROP_ANGLE|OBJPROP_DEVIATION|' - r'OBJPROP_NAME|OBJPROP_TEXT|OBJPROP_TOOLTIP|OBJPROP_LEVELTEXT|' - r'OBJPROP_FONT|OBJPROP_BMPFILE|OBJPROP_SYMBOL|' - r'BORDER_FLAT|BORDER_RAISED|BORDER_SUNKEN|ALIGN_LEFT|ALIGN_CENTER|' - r'ALIGN_RIGHT|ANCHOR_LEFT_UPPER|ANCHOR_LEFT|ANCHOR_LEFT_LOWER|' - r'ANCHOR_LOWER|ANCHOR_RIGHT_LOWER|ANCHOR_RIGHT|ANCHOR_RIGHT_UPPER|' - r'ANCHOR_UPPER|ANCHOR_CENTER|ANCHOR_TOP|ANCHOR_BOTTOM|' - r'CORNER_LEFT_UPPER|CORNER_LEFT_LOWER|CORNER_RIGHT_LOWER|' - r'CORNER_RIGHT_UPPER|' - r'OBJ_NO_PERIODS|EMPTY|OBJ_PERIOD_M1|OBJ_PERIOD_M5|OBJ_PERIOD_M15|' - r'OBJ_PERIOD_M30|OBJ_PERIOD_H1|OBJ_PERIOD_H4|OBJ_PERIOD_D1|' - r'OBJ_PERIOD_W1|OBJ_PERIOD_MN1|OBJ_ALL_PERIODS|' - r'GANN_UP_TREND|GANN_DOWN_TREND|' - r'((clr)?(Black|DarkGreen|DarkSlateGray|Olive|' - r'Green|Teal|Navy|Purple|' - r'Maroon|Indigo|MidnightBlue|DarkBlue|' - r'DarkOliveGreen|SaddleBrown|' - r'ForestGreen|OliveDrab|SeaGreen|' - r'DarkGoldenrod|DarkSlateBlue|' - r'Sienna|MediumBlue|Brown|DarkTurquoise|' - r'DimGray|LightSeaGreen|' - r'DarkViolet|FireBrick|MediumVioletRed|' - r'MediumSeaGreen|Chocolate|' - r'Crimson|SteelBlue|Goldenrod|MediumSpringGreen|' - r'LawnGreen|CadetBlue|' - r'DarkOrchid|YellowGreen|LimeGreen|OrangeRed|' - r'DarkOrange|Orange|' - r'Gold|Yellow|Chartreuse|Lime|SpringGreen|' - r'Aqua|DeepSkyBlue|Blue|' - r'Magenta|Red|Gray|SlateGray|Peru|BlueViolet|' - r'LightSlateGray|DeepPink|' - r'MediumTurquoise|DodgerBlue|Turquoise|RoyalBlue|' - r'SlateBlue|DarkKhaki|' - r'IndianRed|MediumOrchid|GreenYellow|' - r'MediumAquamarine|DarkSeaGreen|' - r'Tomato|RosyBrown|Orchid|MediumPurple|' - r'PaleVioletRed|Coral|CornflowerBlue|' - r'DarkGray|SandyBrown|MediumSlateBlue|' - r'Tan|DarkSalmon|BurlyWood|' - r'HotPink|Salmon|Violet|LightCoral|SkyBlue|' - r'LightSalmon|Plum|' - r'Khaki|LightGreen|Aquamarine|Silver|' - r'LightSkyBlue|LightSteelBlue|' - r'LightBlue|PaleGreen|Thistle|PowderBlue|' - r'PaleGoldenrod|PaleTurquoise|' - r'LightGray|Wheat|NavajoWhite|Moccasin|' - r'LightPink|Gainsboro|PeachPuff|' - r'Pink|Bisque|LightGoldenrod|BlanchedAlmond|' - r'LemonChiffon|Beige|' - r'AntiqueWhite|PapayaWhip|Cornsilk|' - r'LightYellow|LightCyan|Linen|' - r'Lavender|MistyRose|OldLace|WhiteSmoke|' - r'Seashell|Ivory|Honeydew|' - r'AliceBlue|LavenderBlush|MintCream|Snow|White))|' - r'SYMBOL_THUMBSUP|SYMBOL_THUMBSDOWN|' - r'SYMBOL_ARROWUP|SYMBOL_ARROWDOWN|' - r'SYMBOL_STOPSIGN|SYMBOL_CHECKSIGN|' - r'SYMBOL_LEFTPRICE|SYMBOL_RIGHTPRICE|' - r'PRICE_CLOSE|PRICE_OPEN|PRICE_HIGH|PRICE_LOW|' - r'PRICE_MEDIAN|PRICE_TYPICAL|PRICE_WEIGHTED|' - r'VOLUME_TICK|VOLUME_REAL|' - r'STO_LOWHIGH|STO_CLOSECLOSE|' - r'MODE_OPEN|MODE_LOW|MODE_HIGH|MODE_CLOSE|MODE_VOLUME|MODE_TIME|' - r'MODE_SMA|MODE_EMA|MODE_SMMA|MODE_LWMA|' - r'MODE_MAIN|MODE_SIGNAL|MODE_MAIN|' - r'MODE_PLUSDI|MODE_MINUSDI|MODE_UPPER|' - r'MODE_LOWER|MODE_GATORJAW|MODE_GATORTEETH|' - r'MODE_GATORLIPS|MODE_TENKANSEN|' - r'MODE_KIJUNSEN|MODE_SENKOUSPANA|' - r'MODE_SENKOUSPANB|MODE_CHINKOUSPAN|' - r'DRAW_LINE|DRAW_SECTION|DRAW_HISTOGRAM|' - r'DRAW_ARROW|DRAW_ZIGZAG|DRAW_NONE|' - r'STYLE_SOLID|STYLE_DASH|STYLE_DOT|' - r'STYLE_DASHDOT|STYLE_DASHDOTDOT|' - r'DRAW_NONE|DRAW_LINE|DRAW_SECTION|DRAW_HISTOGRAM|' - r'DRAW_ARROW|DRAW_ZIGZAG|DRAW_FILLING|' - r'INDICATOR_DATA|INDICATOR_COLOR_INDEX|' - r'INDICATOR_CALCULATIONS|INDICATOR_DIGITS|' - r'INDICATOR_HEIGHT|INDICATOR_LEVELS|' - r'INDICATOR_LEVELCOLOR|INDICATOR_LEVELSTYLE|' - r'INDICATOR_LEVELWIDTH|INDICATOR_MINIMUM|' - r'INDICATOR_MAXIMUM|INDICATOR_LEVELVALUE|' - r'INDICATOR_SHORTNAME|INDICATOR_LEVELTEXT|' - r'TERMINAL_BUILD|TERMINAL_CONNECTED|' - r'TERMINAL_DLLS_ALLOWED|TERMINAL_TRADE_ALLOWED|' - r'TERMINAL_EMAIL_ENABLED|' - r'TERMINAL_FTP_ENABLED|TERMINAL_MAXBARS|' - r'TERMINAL_CODEPAGE|TERMINAL_CPU_CORES|' - r'TERMINAL_DISK_SPACE|TERMINAL_MEMORY_PHYSICAL|' - r'TERMINAL_MEMORY_TOTAL|' - r'TERMINAL_MEMORY_AVAILABLE|TERMINAL_MEMORY_USED|' - r'TERMINAL_X64|' - r'TERMINAL_OPENCL_SUPPORT|TERMINAL_LANGUAGE|' - r'TERMINAL_COMPANY|TERMINAL_NAME|' - r'TERMINAL_PATH|TERMINAL_DATA_PATH|' - r'TERMINAL_COMMONDATA_PATH|' - r'MQL_PROGRAM_TYPE|MQL_DLLS_ALLOWED|' - r'MQL_TRADE_ALLOWED|MQL_DEBUG|' - r'MQL_PROFILER|MQL_TESTER|MQL_OPTIMIZATION|' - r'MQL_VISUAL_MODE|' - r'MQL_FRAME_MODE|MQL_LICENSE_TYPE|MQL_PROGRAM_NAME|' - r'MQL_PROGRAM_PATH|' - r'PROGRAM_SCRIPT|PROGRAM_EXPERT|' - r'PROGRAM_INDICATOR|LICENSE_FREE|' - r'LICENSE_DEMO|LICENSE_FULL|LICENSE_TIME|' - r'MODE_LOW|MODE_HIGH|MODE_TIME|MODE_BID|' - r'MODE_ASK|MODE_POINT|' - r'MODE_DIGITS|MODE_SPREAD|MODE_STOPLEVEL|' - r'MODE_LOTSIZE|MODE_TICKVALUE|' - r'MODE_TICKSIZE|MODE_SWAPLONG|' - r'MODE_SWAPSHORT|MODE_STARTING|' - r'MODE_EXPIRATION|MODE_TRADEALLOWED|' - r'MODE_MINLOT|MODE_LOTSTEP|MODE_MAXLOT|' - r'MODE_SWAPTYPE|MODE_PROFITCALCMODE|' - r'MODE_MARGINCALCMODE|MODE_MARGININIT|' - r'MODE_MARGINMAINTENANCE|MODE_MARGINHEDGED|' - r'MODE_MARGINREQUIRED|MODE_FREEZELEVEL|' - r'SUNDAY|MONDAY|TUESDAY|WEDNESDAY|THURSDAY|' - r'FRIDAY|SATURDAY|' - r'ACCOUNT_LOGIN|ACCOUNT_TRADE_MODE|' - r'ACCOUNT_LEVERAGE|' - r'ACCOUNT_LIMIT_ORDERS|ACCOUNT_MARGIN_SO_MODE|' - r'ACCOUNT_TRADE_ALLOWED|ACCOUNT_TRADE_EXPERT|' - r'ACCOUNT_BALANCE|' - r'ACCOUNT_CREDIT|ACCOUNT_PROFIT|ACCOUNT_EQUITY|' - r'ACCOUNT_MARGIN|' - r'ACCOUNT_FREEMARGIN|ACCOUNT_MARGIN_LEVEL|' - r'ACCOUNT_MARGIN_SO_CALL|' - r'ACCOUNT_MARGIN_SO_SO|ACCOUNT_NAME|' - r'ACCOUNT_SERVER|ACCOUNT_CURRENCY|' - r'ACCOUNT_COMPANY|ACCOUNT_TRADE_MODE_DEMO|' - r'ACCOUNT_TRADE_MODE_CONTEST|' - r'ACCOUNT_TRADE_MODE_REAL|ACCOUNT_STOPOUT_MODE_PERCENT|' - r'ACCOUNT_STOPOUT_MODE_MONEY|' - r'STAT_INITIAL_DEPOSIT|STAT_WITHDRAWAL|STAT_PROFIT|' - r'STAT_GROSS_PROFIT|' - r'STAT_GROSS_LOSS|STAT_MAX_PROFITTRADE|' - r'STAT_MAX_LOSSTRADE|STAT_CONPROFITMAX|' - r'STAT_CONPROFITMAX_TRADES|STAT_MAX_CONWINS|' - r'STAT_MAX_CONPROFIT_TRADES|' - r'STAT_CONLOSSMAX|STAT_CONLOSSMAX_TRADES|' - r'STAT_MAX_CONLOSSES|' - r'STAT_MAX_CONLOSS_TRADES|STAT_BALANCEMIN|' - r'STAT_BALANCE_DD|' - r'STAT_BALANCEDD_PERCENT|STAT_BALANCE_DDREL_PERCENT|' - r'STAT_BALANCE_DD_RELATIVE|STAT_EQUITYMIN|' - r'STAT_EQUITY_DD|' - r'STAT_EQUITYDD_PERCENT|STAT_EQUITY_DDREL_PERCENT|' - r'STAT_EQUITY_DD_RELATIVE|STAT_EXPECTED_PAYOFF|' - r'STAT_PROFIT_FACTOR|' - r'STAT_RECOVERY_FACTOR|STAT_SHARPE_RATIO|' - r'STAT_MIN_MARGINLEVEL|' - r'STAT_CUSTOM_ONTESTER|STAT_DEALS|STAT_TRADES|' - r'STAT_PROFIT_TRADES|' - r'STAT_LOSS_TRADES|STAT_SHORT_TRADES|STAT_LONG_TRADES|' - r'STAT_PROFIT_SHORTTRADES|STAT_PROFIT_LONGTRADES|' - r'STAT_PROFITTRADES_AVGCON|STAT_LOSSTRADES_AVGCON|' - r'SERIES_BARS_COUNT|SERIES_FIRSTDATE|SERIES_LASTBAR_DATE|' - r'SERIES_SERVER_FIRSTDATE|SERIES_TERMINAL_FIRSTDATE|' - r'SERIES_SYNCHRONIZED|' - r'OP_BUY|OP_SELL|OP_BUYLIMIT|OP_SELLLIMIT|' - r'OP_BUYSTOP|OP_SELLSTOP|' - r'TRADE_ACTION_DEAL|TRADE_ACTION_PENDING|' - r'TRADE_ACTION_SLTP|' - r'TRADE_ACTION_MODIFY|TRADE_ACTION_REMOVE|' - r'__DATE__|__DATETIME__|__LINE__|__FILE__|' - r'__PATH__|__FUNCTION__|' - r'__FUNCSIG__|__MQLBUILD__|__MQL4BUILD__|' - r'M_E|M_LOG2E|M_LOG10E|M_LN2|M_LN10|' - r'M_PI|M_PI_2|M_PI_4|M_1_PI|' - r'M_2_PI|M_2_SQRTPI|M_SQRT2|M_SQRT1_2|' - r'CHAR_MIN|CHAR_MAX|UCHAR_MAX|' - r'SHORT_MIN|SHORT_MAX|USHORT_MAX|' - r'INT_MIN|INT_MAX|UINT_MAX|' - r'LONG_MIN|LONG_MAX|ULONG_MAX|' - r'DBL_MIN|DBL_MAX|DBL_EPSILON|DBL_DIG|DBL_MANT_DIG|' - r'DBL_MAX_10_EXP|DBL_MAX_EXP|DBL_MIN_10_EXP|DBL_MIN_EXP|' - r'FLT_MIN|FLT_MAX|FLT_EPSILON|' - r'FLT_DIG|FLT_MANT_DIG|FLT_MAX_10_EXP|' - r'FLT_MAX_EXP|FLT_MIN_10_EXP|FLT_MIN_EXP|REASON_PROGRAM' - r'REASON_REMOVE|REASON_RECOMPILE|' - r'REASON_CHARTCHANGE|REASON_CHARTCLOSE|' - r'REASON_PARAMETERS|REASON_ACCOUNT|' - r'REASON_TEMPLATE|REASON_INITFAILED|' - r'REASON_CLOSE|POINTER_INVALID' - r'POINTER_DYNAMIC|POINTER_AUTOMATIC|' - r'NULL|EMPTY|EMPTY_VALUE|CLR_NONE|WHOLE_ARRAY|' - r'CHARTS_MAX|clrNONE|EMPTY_VALUE|INVALID_HANDLE|' - r'IS_DEBUG_MODE|IS_PROFILE_MODE|NULL|WHOLE_ARRAY|WRONG_VALUE|' - r'ERR_NO_ERROR|ERR_NO_RESULT|ERR_COMMON_ERROR|' - r'ERR_INVALID_TRADE_PARAMETERS|' - r'ERR_SERVER_BUSY|ERR_OLD_VERSION|ERR_NO_CONNECTION|' - r'ERR_NOT_ENOUGH_RIGHTS|' - r'ERR_TOO_FREQUENT_REQUESTS|ERR_MALFUNCTIONAL_TRADE|' - r'ERR_ACCOUNT_DISABLED|' - r'ERR_INVALID_ACCOUNT|ERR_TRADE_TIMEOUT|' - r'ERR_INVALID_PRICE|ERR_INVALID_STOPS|' - r'ERR_INVALID_TRADE_VOLUME|ERR_MARKET_CLOSED|' - r'ERR_TRADE_DISABLED|' - r'ERR_NOT_ENOUGH_MONEY|ERR_PRICE_CHANGED|' - r'ERR_OFF_QUOTES|ERR_BROKER_BUSY|' - r'ERR_REQUOTE|ERR_ORDER_LOCKED|' - r'ERR_LONG_POSITIONS_ONLY_ALLOWED|ERR_TOO_MANY_REQUESTS|' - r'ERR_TRADE_MODIFY_DENIED|ERR_TRADE_CONTEXT_BUSY|' - r'ERR_TRADE_EXPIRATION_DENIED|' - r'ERR_TRADE_TOO_MANY_ORDERS|ERR_TRADE_HEDGE_PROHIBITED|' - r'ERR_TRADE_PROHIBITED_BY_FIFO|' - r'FILE_READ|FILE_WRITE|FILE_BIN|FILE_CSV|FILE_TXT|' - r'FILE_ANSI|FILE_UNICODE|' - r'FILE_SHARE_READ|FILE_SHARE_WRITE|FILE_REWRITE|' - r'FILE_COMMON|FILE_EXISTS|' - r'FILE_CREATE_DATE|FILE_MODIFY_DATE|' - r'FILE_ACCESS_DATE|FILE_SIZE|FILE_POSITION|' - r'FILE_END|FILE_LINE_END|FILE_IS_COMMON|' - r'FILE_IS_TEXT|FILE_IS_BINARY|' - r'FILE_IS_CSV|FILE_IS_ANSI|FILE_IS_READABLE|FILE_IS_WRITABLE|' - r'SEEK_SET|SEEK_CUR|SEEK_END|CP_ACP|' - r'CP_OEMCP|CP_MACCP|CP_THREAD_ACP|' - r'CP_SYMBOL|CP_UTF7|CP_UTF8|IDOK|IDCANCEL|IDABORT|' - r'IDRETRY|IDIGNORE|IDYES|IDNO|IDTRYAGAIN|IDCONTINUE|' - r'MB_OK|MB_OKCANCEL|MB_ABORTRETRYIGNORE|MB_YESNOCANCEL|' - r'MB_YESNO|MB_RETRYCANCEL|' - r'MB_CANCELTRYCONTINUE|MB_ICONSTOP|MB_ICONERROR|' - r'MB_ICONHAND|MB_ICONQUESTION|' - r'MB_ICONEXCLAMATION|MB_ICONWARNING|' - r'MB_ICONINFORMATION|MB_ICONASTERISK|' - r'MB_DEFBUTTON1|MB_DEFBUTTON2|MB_DEFBUTTON3|MB_DEFBUTTON4)\b', - Name.Constant), + (words(( + 'Alert', 'CheckPointer', 'Comment', 'DebugBreak', 'ExpertRemove', + 'GetPointer', 'GetTickCount', 'MessageBox', 'PeriodSeconds', 'PlaySound', + 'Print', 'PrintFormat', 'ResetLastError', 'ResourceCreate', 'ResourceFree', + 'ResourceReadImage', 'ResourceSave', 'SendFTP', 'SendMail', 'SendNotification', + 'Sleep', 'TerminalClose', 'TesterStatistics', 'ZeroMemory', + 'ArrayBsearch', 'ArrayCopy', 'ArrayCompare', 'ArrayFree', 'ArrayGetAsSeries', + 'ArrayInitialize', 'ArrayFill', 'ArrayIsSeries', 'ArrayIsDynamic', + 'ArrayMaximum', 'ArrayMinimum', 'ArrayRange', 'ArrayResize', + 'ArraySetAsSeries', 'ArraySize', 'ArraySort', 'ArrayCopyRates', + 'ArrayCopySeries', 'ArrayDimension', + 'CharToString', 'DoubleToString', 'EnumToString', 'NormalizeDouble', + 'StringToDouble', 'StringToInteger', 'StringToTime', 'TimeToString', + 'IntegerToString', 'ShortToString', 'ShortArrayToString', + 'StringToShortArray', 'CharArrayToString', 'StringToCharArray', + 'ColorToARGB', 'ColorToString', 'StringToColor', 'StringFormat', + 'CharToStr', 'DoubleToStr', 'StrToDouble', 'StrToInteger', 'StrToTime', 'TimeToStr', + 'MathAbs', 'MathArccos', 'MathArcsin', 'MathArctan', 'MathCeil', 'MathCos', 'MathExp', + 'MathFloor', 'MathLog', 'MathMax', 'MathMin', 'MathMod', 'MathPow', 'MathRand', + 'MathRound', 'MathSin', 'MathSqrt', 'MathSrand', 'MathTan', 'MathIsValidNumber', + 'StringAdd', 'StringBufferLen', 'StringCompare', 'StringConcatenate', 'StringFill', + 'StringFind', 'StringGetCharacter', 'StringInit', 'StringLen', 'StringReplace', + 'StringSetCharacter', 'StringSplit', 'StringSubstr', 'StringToLower', 'StringToUpper', + 'StringTrimLeft', 'StringTrimRight', 'StringGetChar', 'StringSetChar', + 'TimeCurrent', 'TimeTradeServer', 'TimeLocal', 'TimeGMT', 'TimeDaylightSavings', + 'TimeGMTOffset', 'TimeToStruct', 'StructToTime', 'Day', 'DayOfWeek', 'DayOfYear', + 'Hour', 'Minute', 'Month', 'Seconds', 'TimeDay', 'TimeDayOfWeek', 'TimeDayOfYear', 'TimeHour', + 'TimeMinute', 'TimeMonth', 'TimeSeconds', 'TimeYear', 'Year', + 'AccountInfoDouble', 'AccountInfoInteger', 'AccountInfoString', 'AccountBalance', + 'AccountCredit', 'AccountCompany', 'AccountCurrency', 'AccountEquity', + 'AccountFreeMargin', 'AccountFreeMarginCheck', 'AccountFreeMarginMode', + 'AccountLeverage', 'AccountMargin', 'AccountName', 'AccountNumber', 'AccountProfit', + 'AccountServer', 'AccountStopoutLevel', 'AccountStopoutMode', + 'GetLastError', 'IsStopped', 'UninitializeReason', 'MQLInfoInteger', 'MQLInfoString', + 'Symbol', 'Period', 'Digits', 'Point', 'IsConnected', 'IsDemo', 'IsDllsAllowed', + 'IsExpertEnabled', 'IsLibrariesAllowed', 'IsOptimization', 'IsTesting', + 'IsTradeAllowed', + 'IsTradeContextBusy', 'IsVisualMode', 'TerminalCompany', 'TerminalName', + 'TerminalPath', + 'SymbolsTotal', 'SymbolName', 'SymbolSelect', 'SymbolIsSynchronized', + 'SymbolInfoDouble', + 'SymbolInfoInteger', 'SymbolInfoString', 'SymbolInfoTick', + 'SymbolInfoSessionQuote', + 'SymbolInfoSessionTrade', 'MarketInfo', + 'SeriesInfoInteger', 'CopyRates', 'CopyTime', 'CopyOpen', + 'CopyHigh', 'CopyLow', 'CopyClose', + 'CopyTickVolume', 'CopyRealVolume', 'CopySpread', 'iBars', 'iBarShift', 'iClose', + 'iHigh', 'iHighest', 'iLow', 'iLowest', 'iOpen', 'iTime', 'iVolume', + 'HideTestIndicators', 'Period', 'RefreshRates', 'Symbol', 'WindowBarsPerChart', + 'WindowExpertName', 'WindowFind', 'WindowFirstVisibleBar', 'WindowHandle', + 'WindowIsVisible', 'WindowOnDropped', 'WindowPriceMax', 'WindowPriceMin', + 'WindowPriceOnDropped', 'WindowRedraw', 'WindowScreenShot', + 'WindowTimeOnDropped', 'WindowsTotal', 'WindowXOnDropped', 'WindowYOnDropped', + 'OrderClose', 'OrderCloseBy', 'OrderClosePrice', 'OrderCloseTime', 'OrderComment', + 'OrderCommission', 'OrderDelete', 'OrderExpiration', 'OrderLots', 'OrderMagicNumber', + 'OrderModify', 'OrderOpenPrice', 'OrderOpenTime', 'OrderPrint', 'OrderProfit', + 'OrderSelect', 'OrderSend', 'OrdersHistoryTotal', 'OrderStopLoss', 'OrdersTotal', + 'OrderSwap', 'OrderSymbol', 'OrderTakeProfit', 'OrderTicket', 'OrderType', + 'GlobalVariableCheck', 'GlobalVariableTime', + 'GlobalVariableDel', 'GlobalVariableGet', 'GlobalVariableName', + 'GlobalVariableSet', 'GlobalVariablesFlush', 'GlobalVariableTemp', + 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', + 'GlobalVariablesTotal', 'GlobalVariableCheck', 'GlobalVariableTime', + 'GlobalVariableDel', 'GlobalVariableGet', + 'GlobalVariableName', 'GlobalVariableSet', 'GlobalVariablesFlush', + 'GlobalVariableTemp', 'GlobalVariableSetOnCondition', + 'GlobalVariablesDeleteAll', 'GlobalVariablesTotal', + 'GlobalVariableCheck', 'GlobalVariableTime', 'GlobalVariableDel', + 'GlobalVariableGet', 'GlobalVariableName', 'GlobalVariableSet', + 'GlobalVariablesFlush', 'GlobalVariableTemp', + 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', + 'GlobalVariablesTotal', + 'FileFindFirst', 'FileFindNext', 'FileFindClose', 'FileOpen', 'FileDelete', + 'FileFlush', 'FileGetInteger', 'FileIsEnding', 'FileIsLineEnding', + 'FileClose', 'FileIsExist', 'FileCopy', 'FileMove', 'FileReadArray', + 'FileReadBool', 'FileReadDatetime', 'FileReadDouble', 'FileReadFloat', + 'FileReadInteger', 'FileReadLong', 'FileReadNumber', 'FileReadString', + 'FileReadStruct', 'FileSeek', 'FileSize', 'FileTell', 'FileWrite', + 'FileWriteArray', 'FileWriteDouble', 'FileWriteFloat', 'FileWriteInteger', + 'FileWriteLong', 'FileWriteString', 'FileWriteStruct', 'FolderCreate', + 'FolderDelete', 'FolderClean', 'FileOpenHistory', + 'IndicatorSetDouble', 'IndicatorSetInteger', 'IndicatorSetString', + 'SetIndexBuffer', 'IndicatorBuffers', 'IndicatorCounted', 'IndicatorDigits', + 'IndicatorShortName', 'SetIndexArrow', 'SetIndexDrawBegin', + 'SetIndexEmptyValue', 'SetIndexLabel', 'SetIndexShift', + 'SetIndexStyle', 'SetLevelStyle', 'SetLevelValue', + 'ObjectCreate', 'ObjectName', 'ObjectDelete', 'ObjectsDeleteAll', + 'ObjectFind', 'ObjectGetTimeByValue', 'ObjectGetValueByTime', + 'ObjectMove', 'ObjectsTotal', 'ObjectGetDouble', 'ObjectGetInteger', + 'ObjectGetString', 'ObjectSetDouble', 'ObjectSetInteger', + 'ObjectSetString', 'TextSetFont', 'TextOut', 'TextGetSize', + 'ObjectDescription', 'ObjectGet', 'ObjectGetFiboDescription', + 'ObjectGetShiftByValue', 'ObjectGetValueByShift', 'ObjectSet', + 'ObjectSetFiboDescription', 'ObjectSetText', 'ObjectType', + 'iAC', 'iAD', 'iADX', 'iAlligator', 'iAO', 'iATR', 'iBearsPower', + 'iBands', 'iBandsOnArray', 'iBullsPower', 'iCCI', 'iCCIOnArray', + 'iCustom', 'iDeMarker', 'iEnvelopes', 'iEnvelopesOnArray', + 'iForce', 'iFractals', 'iGator', 'iIchimoku', 'iBWMFI', 'iMomentum', + 'iMomentumOnArray', 'iMFI', 'iMA', 'iMAOnArray', 'iOsMA', 'iMACD', + 'iOBV', 'iSAR', 'iRSI', 'iRSIOnArray', 'iRVI', 'iStdDev', 'iStdDevOnArray', + 'iStochastic', 'iWPR', + 'EventSetMillisecondTimer', 'EventSetTimer', + 'EventKillTimer', 'EventChartCustom'), suffix=r'\b'), + Name.Function), + (words(( + 'CHARTEVENT_KEYDOWN', 'CHARTEVENT_MOUSE_MOVE', + 'CHARTEVENT_OBJECT_CREATE', + 'CHARTEVENT_OBJECT_CHANGE', 'CHARTEVENT_OBJECT_DELETE', + 'CHARTEVENT_CLICK', + 'CHARTEVENT_OBJECT_CLICK', 'CHARTEVENT_OBJECT_DRAG', + 'CHARTEVENT_OBJECT_ENDEDIT', + 'CHARTEVENT_CHART_CHANGE', 'CHARTEVENT_CUSTOM', + 'CHARTEVENT_CUSTOM_LAST', + 'PERIOD_CURRENT', 'PERIOD_M1', 'PERIOD_M2', 'PERIOD_M3', + 'PERIOD_M4', 'PERIOD_M5', + 'PERIOD_M6', 'PERIOD_M10', 'PERIOD_M12', 'PERIOD_M15', + 'PERIOD_M20', 'PERIOD_M30', + 'PERIOD_H1', 'PERIOD_H2', 'PERIOD_H3', 'PERIOD_H4', + 'PERIOD_H6', 'PERIOD_H8', + 'PERIOD_H12', 'PERIOD_D1', 'PERIOD_W1', 'PERIOD_MN1', + 'CHART_IS_OBJECT', 'CHART_BRING_TO_TOP', + 'CHART_MOUSE_SCROLL', 'CHART_EVENT_MOUSE_MOVE', + 'CHART_EVENT_OBJECT_CREATE', + 'CHART_EVENT_OBJECT_DELETE', 'CHART_MODE', 'CHART_FOREGROUND', + 'CHART_SHIFT', + 'CHART_AUTOSCROLL', 'CHART_SCALE', 'CHART_SCALEFIX', + 'CHART_SCALEFIX_11', + 'CHART_SCALE_PT_PER_BAR', 'CHART_SHOW_OHLC', + 'CHART_SHOW_BID_LINE', + 'CHART_SHOW_ASK_LINE', 'CHART_SHOW_LAST_LINE', + 'CHART_SHOW_PERIOD_SEP', + 'CHART_SHOW_GRID', 'CHART_SHOW_VOLUMES', + 'CHART_SHOW_OBJECT_DESCR', + 'CHART_VISIBLE_BARS', 'CHART_WINDOWS_TOTAL', + 'CHART_WINDOW_IS_VISIBLE', + 'CHART_WINDOW_HANDLE', 'CHART_WINDOW_YDISTANCE', + 'CHART_FIRST_VISIBLE_BAR', + 'CHART_WIDTH_IN_BARS', 'CHART_WIDTH_IN_PIXELS', + 'CHART_HEIGHT_IN_PIXELS', + 'CHART_COLOR_BACKGROUND', 'CHART_COLOR_FOREGROUND', + 'CHART_COLOR_GRID', + 'CHART_COLOR_VOLUME', 'CHART_COLOR_CHART_UP', + 'CHART_COLOR_CHART_DOWN', + 'CHART_COLOR_CHART_LINE', 'CHART_COLOR_CANDLE_BULL', + 'CHART_COLOR_CANDLE_BEAR', + 'CHART_COLOR_BID', 'CHART_COLOR_ASK', 'CHART_COLOR_LAST', + 'CHART_COLOR_STOP_LEVEL', + 'CHART_SHOW_TRADE_LEVELS', 'CHART_DRAG_TRADE_LEVELS', + 'CHART_SHOW_DATE_SCALE', + 'CHART_SHOW_PRICE_SCALE', 'CHART_SHIFT_SIZE', + 'CHART_FIXED_POSITION', + 'CHART_FIXED_MAX', 'CHART_FIXED_MIN', 'CHART_POINTS_PER_BAR', + 'CHART_PRICE_MIN', + 'CHART_PRICE_MAX', 'CHART_COMMENT', 'CHART_BEGIN', + 'CHART_CURRENT_POS', 'CHART_END', + 'CHART_BARS', 'CHART_CANDLES', 'CHART_LINE', 'CHART_VOLUME_HIDE', + 'CHART_VOLUME_TICK', 'CHART_VOLUME_REAL', + 'OBJ_VLINE', 'OBJ_HLINE', 'OBJ_TREND', 'OBJ_TRENDBYANGLE', 'OBJ_CYCLES', + 'OBJ_CHANNEL', 'OBJ_STDDEVCHANNEL', 'OBJ_REGRESSION', 'OBJ_PITCHFORK', + 'OBJ_GANNLINE', 'OBJ_GANNFAN', 'OBJ_GANNGRID', 'OBJ_FIBO', + 'OBJ_FIBOTIMES', 'OBJ_FIBOFAN', 'OBJ_FIBOARC', 'OBJ_FIBOCHANNEL', + 'OBJ_EXPANSION', 'OBJ_RECTANGLE', 'OBJ_TRIANGLE', 'OBJ_ELLIPSE', + 'OBJ_ARROW_THUMB_UP', 'OBJ_ARROW_THUMB_DOWN', + 'OBJ_ARROW_UP', 'OBJ_ARROW_DOWN', + 'OBJ_ARROW_STOP', 'OBJ_ARROW_CHECK', 'OBJ_ARROW_LEFT_PRICE', + 'OBJ_ARROW_RIGHT_PRICE', 'OBJ_ARROW_BUY', 'OBJ_ARROW_SELL', + 'OBJ_ARROW', + 'OBJ_TEXT', 'OBJ_LABEL', 'OBJ_BUTTON', 'OBJ_BITMAP', + 'OBJ_BITMAP_LABEL', + 'OBJ_EDIT', 'OBJ_EVENT', 'OBJ_RECTANGLE_LABEL', + 'OBJPROP_TIME1', 'OBJPROP_PRICE1', 'OBJPROP_TIME2', + 'OBJPROP_PRICE2', 'OBJPROP_TIME3', + 'OBJPROP_PRICE3', 'OBJPROP_COLOR', 'OBJPROP_STYLE', + 'OBJPROP_WIDTH', + 'OBJPROP_BACK', 'OBJPROP_RAY', 'OBJPROP_ELLIPSE', + 'OBJPROP_SCALE', + 'OBJPROP_ANGLE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', + 'OBJPROP_DEVIATION', 'OBJPROP_FONTSIZE', 'OBJPROP_CORNER', + 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_FIBOLEVELS', + 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', + 'OBJPROP_FIRSTLEVEL', 'OBJPROP_COLOR', 'OBJPROP_STYLE', 'OBJPROP_WIDTH', + 'OBJPROP_BACK', 'OBJPROP_ZORDER', 'OBJPROP_FILL', 'OBJPROP_HIDDEN', + 'OBJPROP_SELECTED', 'OBJPROP_READONLY', 'OBJPROP_TYPE', 'OBJPROP_TIME', + 'OBJPROP_SELECTABLE', 'OBJPROP_CREATETIME', 'OBJPROP_LEVELS', + 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', + 'OBJPROP_ALIGN', 'OBJPROP_FONTSIZE', 'OBJPROP_RAY_RIGHT', 'OBJPROP_RAY', + 'OBJPROP_ELLIPSE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', 'OBJPROP_ANCHOR', + 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_DRAWLINES', 'OBJPROP_STATE', + 'OBJPROP_CHART_ID', 'OBJPROP_XSIZE', 'OBJPROP_YSIZE', 'OBJPROP_XOFFSET', + 'OBJPROP_YOFFSET', 'OBJPROP_PERIOD', 'OBJPROP_DATE_SCALE', 'OBJPROP_PRICE_SCALE', + 'OBJPROP_CHART_SCALE', 'OBJPROP_BGCOLOR', 'OBJPROP_CORNER', 'OBJPROP_BORDER_TYPE', + 'OBJPROP_BORDER_COLOR', 'OBJPROP_PRICE', 'OBJPROP_LEVELVALUE', 'OBJPROP_SCALE', + 'OBJPROP_ANGLE', 'OBJPROP_DEVIATION', + 'OBJPROP_NAME', 'OBJPROP_TEXT', 'OBJPROP_TOOLTIP', 'OBJPROP_LEVELTEXT', + 'OBJPROP_FONT', 'OBJPROP_BMPFILE', 'OBJPROP_SYMBOL', + 'BORDER_FLAT', 'BORDER_RAISED', 'BORDER_SUNKEN', 'ALIGN_LEFT', 'ALIGN_CENTER', + 'ALIGN_RIGHT', 'ANCHOR_LEFT_UPPER', 'ANCHOR_LEFT', 'ANCHOR_LEFT_LOWER', + 'ANCHOR_LOWER', 'ANCHOR_RIGHT_LOWER', 'ANCHOR_RIGHT', 'ANCHOR_RIGHT_UPPER', + 'ANCHOR_UPPER', 'ANCHOR_CENTER', 'ANCHOR_TOP', 'ANCHOR_BOTTOM', + 'CORNER_LEFT_UPPER', 'CORNER_LEFT_LOWER', 'CORNER_RIGHT_LOWER', + 'CORNER_RIGHT_UPPER', + 'OBJ_NO_PERIODS', 'EMPTY', 'OBJ_PERIOD_M1', 'OBJ_PERIOD_M5', 'OBJ_PERIOD_M15', + 'OBJ_PERIOD_M30', 'OBJ_PERIOD_H1', 'OBJ_PERIOD_H4', 'OBJ_PERIOD_D1', + 'OBJ_PERIOD_W1', 'OBJ_PERIOD_MN1', 'OBJ_ALL_PERIODS', + 'GANN_UP_TREND', 'GANN_DOWN_TREND', + 'SYMBOL_THUMBSUP', 'SYMBOL_THUMBSDOWN', + 'SYMBOL_ARROWUP', 'SYMBOL_ARROWDOWN', + 'SYMBOL_STOPSIGN', 'SYMBOL_CHECKSIGN', + 'SYMBOL_LEFTPRICE', 'SYMBOL_RIGHTPRICE', + 'PRICE_CLOSE', 'PRICE_OPEN', 'PRICE_HIGH', 'PRICE_LOW', + 'PRICE_MEDIAN', 'PRICE_TYPICAL', 'PRICE_WEIGHTED', + 'VOLUME_TICK', 'VOLUME_REAL', + 'STO_LOWHIGH', 'STO_CLOSECLOSE', + 'MODE_OPEN', 'MODE_LOW', 'MODE_HIGH', 'MODE_CLOSE', 'MODE_VOLUME', 'MODE_TIME', + 'MODE_SMA', 'MODE_EMA', 'MODE_SMMA', 'MODE_LWMA', + 'MODE_MAIN', 'MODE_SIGNAL', 'MODE_MAIN', + 'MODE_PLUSDI', 'MODE_MINUSDI', 'MODE_UPPER', + 'MODE_LOWER', 'MODE_GATORJAW', 'MODE_GATORTEETH', + 'MODE_GATORLIPS', 'MODE_TENKANSEN', + 'MODE_KIJUNSEN', 'MODE_SENKOUSPANA', + 'MODE_SENKOUSPANB', 'MODE_CHINKOUSPAN', + 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', + 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_NONE', + 'STYLE_SOLID', 'STYLE_DASH', 'STYLE_DOT', + 'STYLE_DASHDOT', 'STYLE_DASHDOTDOT', + 'DRAW_NONE', 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', + 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_FILLING', + 'INDICATOR_DATA', 'INDICATOR_COLOR_INDEX', + 'INDICATOR_CALCULATIONS', 'INDICATOR_DIGITS', + 'INDICATOR_HEIGHT', 'INDICATOR_LEVELS', + 'INDICATOR_LEVELCOLOR', 'INDICATOR_LEVELSTYLE', + 'INDICATOR_LEVELWIDTH', 'INDICATOR_MINIMUM', + 'INDICATOR_MAXIMUM', 'INDICATOR_LEVELVALUE', + 'INDICATOR_SHORTNAME', 'INDICATOR_LEVELTEXT', + 'TERMINAL_BUILD', 'TERMINAL_CONNECTED', + 'TERMINAL_DLLS_ALLOWED', 'TERMINAL_TRADE_ALLOWED', + 'TERMINAL_EMAIL_ENABLED', + 'TERMINAL_FTP_ENABLED', 'TERMINAL_MAXBARS', + 'TERMINAL_CODEPAGE', 'TERMINAL_CPU_CORES', + 'TERMINAL_DISK_SPACE', 'TERMINAL_MEMORY_PHYSICAL', + 'TERMINAL_MEMORY_TOTAL', + 'TERMINAL_MEMORY_AVAILABLE', 'TERMINAL_MEMORY_USED', + 'TERMINAL_X64', + 'TERMINAL_OPENCL_SUPPORT', 'TERMINAL_LANGUAGE', + 'TERMINAL_COMPANY', 'TERMINAL_NAME', + 'TERMINAL_PATH', 'TERMINAL_DATA_PATH', + 'TERMINAL_COMMONDATA_PATH', + 'MQL_PROGRAM_TYPE', 'MQL_DLLS_ALLOWED', + 'MQL_TRADE_ALLOWED', 'MQL_DEBUG', + 'MQL_PROFILER', 'MQL_TESTER', 'MQL_OPTIMIZATION', + 'MQL_VISUAL_MODE', + 'MQL_FRAME_MODE', 'MQL_LICENSE_TYPE', 'MQL_PROGRAM_NAME', + 'MQL_PROGRAM_PATH', + 'PROGRAM_SCRIPT', 'PROGRAM_EXPERT', + 'PROGRAM_INDICATOR', 'LICENSE_FREE', + 'LICENSE_DEMO', 'LICENSE_FULL', 'LICENSE_TIME', + 'MODE_LOW', 'MODE_HIGH', 'MODE_TIME', 'MODE_BID', + 'MODE_ASK', 'MODE_POINT', + 'MODE_DIGITS', 'MODE_SPREAD', 'MODE_STOPLEVEL', + 'MODE_LOTSIZE', 'MODE_TICKVALUE', + 'MODE_TICKSIZE', 'MODE_SWAPLONG', + 'MODE_SWAPSHORT', 'MODE_STARTING', + 'MODE_EXPIRATION', 'MODE_TRADEALLOWED', + 'MODE_MINLOT', 'MODE_LOTSTEP', 'MODE_MAXLOT', + 'MODE_SWAPTYPE', 'MODE_PROFITCALCMODE', + 'MODE_MARGINCALCMODE', 'MODE_MARGININIT', + 'MODE_MARGINMAINTENANCE', 'MODE_MARGINHEDGED', + 'MODE_MARGINREQUIRED', 'MODE_FREEZELEVEL', + 'SUNDAY', 'MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', + 'FRIDAY', 'SATURDAY', + 'ACCOUNT_LOGIN', 'ACCOUNT_TRADE_MODE', + 'ACCOUNT_LEVERAGE', + 'ACCOUNT_LIMIT_ORDERS', 'ACCOUNT_MARGIN_SO_MODE', + 'ACCOUNT_TRADE_ALLOWED', 'ACCOUNT_TRADE_EXPERT', + 'ACCOUNT_BALANCE', + 'ACCOUNT_CREDIT', 'ACCOUNT_PROFIT', 'ACCOUNT_EQUITY', + 'ACCOUNT_MARGIN', + 'ACCOUNT_FREEMARGIN', 'ACCOUNT_MARGIN_LEVEL', + 'ACCOUNT_MARGIN_SO_CALL', + 'ACCOUNT_MARGIN_SO_SO', 'ACCOUNT_NAME', + 'ACCOUNT_SERVER', 'ACCOUNT_CURRENCY', + 'ACCOUNT_COMPANY', 'ACCOUNT_TRADE_MODE_DEMO', + 'ACCOUNT_TRADE_MODE_CONTEST', + 'ACCOUNT_TRADE_MODE_REAL', 'ACCOUNT_STOPOUT_MODE_PERCENT', + 'ACCOUNT_STOPOUT_MODE_MONEY', + 'STAT_INITIAL_DEPOSIT', 'STAT_WITHDRAWAL', 'STAT_PROFIT', + 'STAT_GROSS_PROFIT', + 'STAT_GROSS_LOSS', 'STAT_MAX_PROFITTRADE', + 'STAT_MAX_LOSSTRADE', 'STAT_CONPROFITMAX', + 'STAT_CONPROFITMAX_TRADES', 'STAT_MAX_CONWINS', + 'STAT_MAX_CONPROFIT_TRADES', + 'STAT_CONLOSSMAX', 'STAT_CONLOSSMAX_TRADES', + 'STAT_MAX_CONLOSSES', + 'STAT_MAX_CONLOSS_TRADES', 'STAT_BALANCEMIN', + 'STAT_BALANCE_DD', + 'STAT_BALANCEDD_PERCENT', 'STAT_BALANCE_DDREL_PERCENT', + 'STAT_BALANCE_DD_RELATIVE', 'STAT_EQUITYMIN', + 'STAT_EQUITY_DD', + 'STAT_EQUITYDD_PERCENT', 'STAT_EQUITY_DDREL_PERCENT', + 'STAT_EQUITY_DD_RELATIVE', 'STAT_EXPECTED_PAYOFF', + 'STAT_PROFIT_FACTOR', + 'STAT_RECOVERY_FACTOR', 'STAT_SHARPE_RATIO', + 'STAT_MIN_MARGINLEVEL', + 'STAT_CUSTOM_ONTESTER', 'STAT_DEALS', 'STAT_TRADES', + 'STAT_PROFIT_TRADES', + 'STAT_LOSS_TRADES', 'STAT_SHORT_TRADES', 'STAT_LONG_TRADES', + 'STAT_PROFIT_SHORTTRADES', 'STAT_PROFIT_LONGTRADES', + 'STAT_PROFITTRADES_AVGCON', 'STAT_LOSSTRADES_AVGCON', + 'SERIES_BARS_COUNT', 'SERIES_FIRSTDATE', 'SERIES_LASTBAR_DATE', + 'SERIES_SERVER_FIRSTDATE', 'SERIES_TERMINAL_FIRSTDATE', + 'SERIES_SYNCHRONIZED', + 'OP_BUY', 'OP_SELL', 'OP_BUYLIMIT', 'OP_SELLLIMIT', + 'OP_BUYSTOP', 'OP_SELLSTOP', + 'TRADE_ACTION_DEAL', 'TRADE_ACTION_PENDING', + 'TRADE_ACTION_SLTP', + 'TRADE_ACTION_MODIFY', 'TRADE_ACTION_REMOVE', + '__DATE__', '__DATETIME__', '__LINE__', '__FILE__', + '__PATH__', '__FUNCTION__', + '__FUNCSIG__', '__MQLBUILD__', '__MQL4BUILD__', + 'M_E', 'M_LOG2E', 'M_LOG10E', 'M_LN2', 'M_LN10', + 'M_PI', 'M_PI_2', 'M_PI_4', 'M_1_PI', + 'M_2_PI', 'M_2_SQRTPI', 'M_SQRT2', 'M_SQRT1_2', + 'CHAR_MIN', 'CHAR_MAX', 'UCHAR_MAX', + 'SHORT_MIN', 'SHORT_MAX', 'USHORT_MAX', + 'INT_MIN', 'INT_MAX', 'UINT_MAX', + 'LONG_MIN', 'LONG_MAX', 'ULONG_MAX', + 'DBL_MIN', 'DBL_MAX', 'DBL_EPSILON', 'DBL_DIG', 'DBL_MANT_DIG', + 'DBL_MAX_10_EXP', 'DBL_MAX_EXP', 'DBL_MIN_10_EXP', 'DBL_MIN_EXP', + 'FLT_MIN', 'FLT_MAX', 'FLT_EPSILON', + 'FLT_DIG', 'FLT_MANT_DIG', 'FLT_MAX_10_EXP', + 'FLT_MAX_EXP', 'FLT_MIN_10_EXP', 'FLT_MIN_EXP', 'REASON_PROGRAM' + 'REASON_REMOVE', 'REASON_RECOMPILE', + 'REASON_CHARTCHANGE', 'REASON_CHARTCLOSE', + 'REASON_PARAMETERS', 'REASON_ACCOUNT', + 'REASON_TEMPLATE', 'REASON_INITFAILED', + 'REASON_CLOSE', 'POINTER_INVALID' + 'POINTER_DYNAMIC', 'POINTER_AUTOMATIC', + 'NULL', 'EMPTY', 'EMPTY_VALUE', 'CLR_NONE', 'WHOLE_ARRAY', + 'CHARTS_MAX', 'clrNONE', 'EMPTY_VALUE', 'INVALID_HANDLE', + 'IS_DEBUG_MODE', 'IS_PROFILE_MODE', 'NULL', 'WHOLE_ARRAY', 'WRONG_VALUE', + 'ERR_NO_ERROR', 'ERR_NO_RESULT', 'ERR_COMMON_ERROR', + 'ERR_INVALID_TRADE_PARAMETERS', + 'ERR_SERVER_BUSY', 'ERR_OLD_VERSION', 'ERR_NO_CONNECTION', + 'ERR_NOT_ENOUGH_RIGHTS', + 'ERR_TOO_FREQUENT_REQUESTS', 'ERR_MALFUNCTIONAL_TRADE', + 'ERR_ACCOUNT_DISABLED', + 'ERR_INVALID_ACCOUNT', 'ERR_TRADE_TIMEOUT', + 'ERR_INVALID_PRICE', 'ERR_INVALID_STOPS', + 'ERR_INVALID_TRADE_VOLUME', 'ERR_MARKET_CLOSED', + 'ERR_TRADE_DISABLED', + 'ERR_NOT_ENOUGH_MONEY', 'ERR_PRICE_CHANGED', + 'ERR_OFF_QUOTES', 'ERR_BROKER_BUSY', + 'ERR_REQUOTE', 'ERR_ORDER_LOCKED', + 'ERR_LONG_POSITIONS_ONLY_ALLOWED', 'ERR_TOO_MANY_REQUESTS', + 'ERR_TRADE_MODIFY_DENIED', 'ERR_TRADE_CONTEXT_BUSY', + 'ERR_TRADE_EXPIRATION_DENIED', + 'ERR_TRADE_TOO_MANY_ORDERS', 'ERR_TRADE_HEDGE_PROHIBITED', + 'ERR_TRADE_PROHIBITED_BY_FIFO', + 'FILE_READ', 'FILE_WRITE', 'FILE_BIN', 'FILE_CSV', 'FILE_TXT', + 'FILE_ANSI', 'FILE_UNICODE', + 'FILE_SHARE_READ', 'FILE_SHARE_WRITE', 'FILE_REWRITE', + 'FILE_COMMON', 'FILE_EXISTS', + 'FILE_CREATE_DATE', 'FILE_MODIFY_DATE', + 'FILE_ACCESS_DATE', 'FILE_SIZE', 'FILE_POSITION', + 'FILE_END', 'FILE_LINE_END', 'FILE_IS_COMMON', + 'FILE_IS_TEXT', 'FILE_IS_BINARY', + 'FILE_IS_CSV', 'FILE_IS_ANSI', 'FILE_IS_READABLE', 'FILE_IS_WRITABLE', + 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'CP_ACP', + 'CP_OEMCP', 'CP_MACCP', 'CP_THREAD_ACP', + 'CP_SYMBOL', 'CP_UTF7', 'CP_UTF8', 'IDOK', 'IDCANCEL', 'IDABORT', + 'IDRETRY', 'IDIGNORE', 'IDYES', 'IDNO', 'IDTRYAGAIN', 'IDCONTINUE', + 'MB_OK', 'MB_OKCANCEL', 'MB_ABORTRETRYIGNORE', 'MB_YESNOCANCEL', + 'MB_YESNO', 'MB_RETRYCANCEL', + 'MB_CANCELTRYCONTINUE', 'MB_ICONSTOP', 'MB_ICONERROR', + 'MB_ICONHAND', 'MB_ICONQUESTION', + 'MB_ICONEXCLAMATION', 'MB_ICONWARNING', + 'MB_ICONINFORMATION', 'MB_ICONASTERISK', + 'MB_DEFBUTTON1', 'MB_DEFBUTTON2', 'MB_DEFBUTTON3', + 'MB_DEFBUTTON4'), suffix=r'\b'), + Name.Constant), + (words(( + 'Black', 'DarkGreen', 'DarkSlateGray', 'Olive', + 'Green', 'Teal', 'Navy', 'Purple', + 'Maroon', 'Indigo', 'MidnightBlue', 'DarkBlue', + 'DarkOliveGreen', 'SaddleBrown', + 'ForestGreen', 'OliveDrab', 'SeaGreen', + 'DarkGoldenrod', 'DarkSlateBlue', + 'Sienna', 'MediumBlue', 'Brown', 'DarkTurquoise', + 'DimGray', 'LightSeaGreen', + 'DarkViolet', 'FireBrick', 'MediumVioletRed', + 'MediumSeaGreen', 'Chocolate', + 'Crimson', 'SteelBlue', 'Goldenrod', 'MediumSpringGreen', + 'LawnGreen', 'CadetBlue', + 'DarkOrchid', 'YellowGreen', 'LimeGreen', 'OrangeRed', + 'DarkOrange', 'Orange', + 'Gold', 'Yellow', 'Chartreuse', 'Lime', 'SpringGreen', + 'Aqua', 'DeepSkyBlue', 'Blue', + 'Magenta', 'Red', 'Gray', 'SlateGray', 'Peru', 'BlueViolet', + 'LightSlateGray', 'DeepPink', + 'MediumTurquoise', 'DodgerBlue', 'Turquoise', 'RoyalBlue', + 'SlateBlue', 'DarkKhaki', + 'IndianRed', 'MediumOrchid', 'GreenYellow', + 'MediumAquamarine', 'DarkSeaGreen', + 'Tomato', 'RosyBrown', 'Orchid', 'MediumPurple', + 'PaleVioletRed', 'Coral', 'CornflowerBlue', + 'DarkGray', 'SandyBrown', 'MediumSlateBlue', + 'Tan', 'DarkSalmon', 'BurlyWood', + 'HotPink', 'Salmon', 'Violet', 'LightCoral', 'SkyBlue', + 'LightSalmon', 'Plum', + 'Khaki', 'LightGreen', 'Aquamarine', 'Silver', + 'LightSkyBlue', 'LightSteelBlue', + 'LightBlue', 'PaleGreen', 'Thistle', 'PowderBlue', + 'PaleGoldenrod', 'PaleTurquoise', + 'LightGray', 'Wheat', 'NavajoWhite', 'Moccasin', + 'LightPink', 'Gainsboro', 'PeachPuff', + 'Pink', 'Bisque', 'LightGoldenrod', 'BlanchedAlmond', + 'LemonChiffon', 'Beige', + 'AntiqueWhite', 'PapayaWhip', 'Cornsilk', + 'LightYellow', 'LightCyan', 'Linen', + 'Lavender', 'MistyRose', 'OldLace', 'WhiteSmoke', + 'Seashell', 'Ivory', 'Honeydew', + 'AliceBlue', 'LavenderBlush', 'MintCream', 'Snow', + 'White'), prefix='(clr)?', suffix=r'\b'), + Name.Constant), inherit, ], } -- cgit v1.2.1 From dfd22ba7b6725043a86475a2657ed5207e3162db Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:19:01 +0200 Subject: More words() in c-like languages --- pygments/lexers/compiled.py | 123 ++++++++++++++++++++++++++------------------ 1 file changed, 72 insertions(+), 51 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 0fc41199..ef1987ea 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3914,10 +3914,10 @@ class LogosLexer(ObjectiveCppLexer): (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', bygroups(Keyword, Text, Name.Class), '#pop'), (r'(%subclass)(\s+)', bygroups(Keyword, Text), - ('#pop', 'logos_classname')), + ('#pop', 'logos_classname')), inherit, ], - 'logos_init_directive' : [ + 'logos_init_directive': [ ('\s+', Text), (',', Punctuation, ('logos_init_directive', '#pop')), ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)', @@ -3925,7 +3925,7 @@ class LogosLexer(ObjectiveCppLexer): ('([a-zA-Z$_][\w$]*)', Name.Class), ('\)', Punctuation, '#pop'), ], - 'logos_classname' : [ + 'logos_classname': [ ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', bygroups(Name.Class, Text, Name.Class), '#pop'), ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') @@ -3979,11 +3979,14 @@ class ChapelLexer(RegexLexer): (r'(false|nil|true)\b', Keyword.Constant), (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', Keyword.Type), - (r'(align|atomic|begin|break|by|cobegin|coforall|continue|' - r'delete|dmapped|do|domain|else|enum|export|extern|for|forall|' - r'if|index|inline|iter|label|lambda|let|local|new|noinit|on|' - r'otherwise|pragma|reduce|return|scan|select|serial|single|sparse|' - r'subdomain|sync|then|use|when|where|while|with|yield|zip)\b', + (words(( + 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall', + 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum', + 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline', + 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on', + 'otherwise', 'pragma', 'reduce', 'return', 'scan', 'select', + 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use', + 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'), Keyword), (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), @@ -4049,15 +4052,19 @@ class EiffelLexer(RegexLexer): (r'[^\S\n]+', Text), (r'--.*?\n', Comment.Single), (r'[^\S\n]+', Text), - # Please note that keyword and operator are case insensitive. + # Please note that keyword and operator are case insensitive. (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant), (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word), - (r'(?i)\b(across|agent|alias|all|as|assign|attached|attribute|check|' - r'class|convert|create|debug|deferred|detachable|do|else|elseif|' - r'end|ensure|expanded|export|external|feature|from|frozen|if|' - r'inherit|inspect|invariant|like|local|loop|none|note|obsolete|' - r'old|once|only|redefine|rename|require|rescue|retry|select|' - r'separate|then|undefine|until|variant|when)\b',Keyword.Reserved), + (words(( + 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached', + 'attribute', 'check', 'class', 'convert', 'create', 'debug', + 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure', + 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if', + 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none', + 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename', + 'require', 'rescue', 'retry', 'select', 'separate', 'then', + 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'), + Keyword.Reserved), (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String), (r'"([^"%\n]|%.)*?"', String), include('numbers'), @@ -4195,33 +4202,39 @@ class Inform6Lexer(RegexLexer): (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'), (r'#', Name.Builtin, ('#pop', 'system-constant')), # System functions - (r'(child|children|elder|eldest|glk|indirect|metaclass|parent|' - r'random|sibling|younger|youngest)\b', Name.Builtin, '#pop'), + (words(( + 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass', + 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'), + Name.Builtin, '#pop'), # Metaclasses (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'), # Veneer routines - (r'(?i)(Box__Routine|CA__Pr|CDefArt|CInDefArt|Cl__Ms|' - r'Copy__Primitive|CP__Tab|DA__Pr|DB__Pr|DefArt|Dynam__String|' - r'EnglishNumber|Glk__Wrap|IA__Pr|IB__Pr|InDefArt|Main__|' - r'Meta__class|OB__Move|OB__Remove|OC__Cl|OP__Pr|Print__Addr|' - r'Print__PName|PrintShortName|RA__Pr|RA__Sc|RL__Pr|R_Process|' - r'RT__ChG|RT__ChGt|RT__ChLDB|RT__ChLDW|RT__ChPR|RT__ChPrintA|' - r'RT__ChPrintC|RT__ChPrintO|RT__ChPrintS|RT__ChPS|RT__ChR|' - r'RT__ChSTB|RT__ChSTW|RT__ChT|RT__Err|RT__TrPS|RV__Pr|' - r'Symb__Tab|Unsigned__Compare|WV__Pr|Z__Region)\b', Name.Builtin, - '#pop'), + (words(( + 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms', + 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String', + 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__', + 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr', + 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process', + 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA', + 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR', + 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr', + 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'), + prefix='(?i)', suffix=r'\b'), + Name.Builtin, '#pop'), # Other built-in symbols - (r'(?i)(call|copy|create|DEBUG|destroy|DICT_CHAR_SIZE|' - r'DICT_ENTRY_BYTES|DICT_IS_UNICODE|DICT_WORD_SIZE|false|' - r'FLOAT_INFINITY|FLOAT_NAN|FLOAT_NINFINITY|GOBJFIELD_CHAIN|' - r'GOBJFIELD_CHILD|GOBJFIELD_NAME|GOBJFIELD_PARENT|' - r'GOBJFIELD_PROPTAB|GOBJFIELD_SIBLING|GOBJ_EXT_START|' - r'GOBJ_TOTAL_LENGTH|Grammar__Version|INDIV_PROP_START|INFIX|' - r'infix__watching|MODULE_MODE|name|nothing|NUM_ATTR_BYTES|print|' - r'print_to_array|recreate|remaining|self|sender|STRICT_MODE|' - r'sw__var|sys__glob0|sys__glob1|sys__glob2|sys_statusline_flag|' - r'TARGET_GLULX|TARGET_ZCODE|temp__global2|temp__global3|' - r'temp__global4|temp_global|true|USE_MODULES|WORDSIZE)\b', + (words(( + 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE', + 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false', + 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN', + 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT', + 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START', + 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX', + 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print', + 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE', + 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag', + 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3', + 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'), + prefix='(?i)', suffix=r'\b'), Name.Builtin, '#pop'), # Other values (_name, Name, '#pop') @@ -4290,9 +4303,11 @@ class Inform6Lexer(RegexLexer): (r';', Punctuation, '#pop'), (r'\[', Punctuation, ('default', 'statements', 'locals', 'routine-name?')), - (r'(?i)(abbreviate|endif|dictionary|ifdef|iffalse|ifndef|ifnot|' - r'iftrue|ifv3|ifv5|release|serial|switches|system_file|version)' - r'\b', Keyword, 'default'), + (words(( + 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot', + 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file', + 'version'), prefix='(?i)', suffix=r'\b'), + Keyword, 'default'), (r'(?i)(array|global)\b', Keyword, ('default', 'directive-keyword?', '_global')), (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')), @@ -4423,11 +4438,13 @@ class Inform6Lexer(RegexLexer): # Keywords used in directives '_directive-keyword!': [ include('_whitespace'), - (r'(additive|alias|buffer|class|creature|data|error|fatalerror|' - r'first|has|held|initial|initstr|last|long|meta|multi|' - r'multiexcept|multiheld|multiinside|noun|number|only|private|' - r'replace|reverse|scope|score|special|string|table|terminating|' - r'time|topic|warning|with)\b', Keyword, '#pop'), + (words(( + 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror', + 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi', + 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private', + 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating', + 'time', 'topic', 'warning', 'with'), suffix=r'\b'), + Keyword, '#pop'), (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop') ], '_directive-keyword': [ @@ -4445,8 +4462,10 @@ class Inform6Lexer(RegexLexer): ], 'trace-keyword?': [ include('_whitespace'), - (r'(assembly|dictionary|expressions|lines|linker|objects|off|on|' - r'symbols|tokens|verbs)\b', Keyword, '#pop'), + (words(( + 'assembly', 'dictionary', 'expressions', 'lines', 'linker', + 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'), + Keyword, '#pop'), default('#pop') ], @@ -4455,9 +4474,11 @@ class Inform6Lexer(RegexLexer): include('_whitespace'), (r'\]', Punctuation, '#pop'), (r'[;{}]', Punctuation), - (r'(box|break|continue|default|give|inversion|new_line|quit|read|' - r'remove|return|rfalse|rtrue|spaces|string|until)\b', Keyword, - 'default'), + (words(( + 'box', 'break', 'continue', 'default', 'give', 'inversion', + 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue', + 'spaces', 'string', 'until'), suffix=r'\b'), + Keyword, 'default'), (r'(do|else)\b', Keyword), (r'(font|style)\b', Keyword, ('default', 'miscellaneous-keyword?')), -- cgit v1.2.1 From 6c718a847c4be44b1722159782727080c27d597c Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:23:42 +0200 Subject: More words() in Rust and Monkey languages --- pygments/lexers/compiled.py | 114 ++++++++++++++++++++++---------------------- 1 file changed, 58 insertions(+), 56 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index ef1987ea..3f3ec82f 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3377,46 +3377,49 @@ class RustLexer(RegexLexer): (r'/\*', Comment.Multiline, 'comment'), # Keywords - (r'(as|box|break|continue' - r'|do|else|enum|extern' - r'|fn|for|if|impl|in' - r'|loop|match|mut|priv|proc|pub' - r'|ref|return|static|\'static|struct|trait|true|type' - r'|unsafe|while)\b', + (words(( + 'as', 'box', 'break', 'continue', 'do', 'else', 'enum', 'extern', + 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv', + 'proc', 'pub', 'ref', 'return', 'static', '\'static', 'struct', + 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'), Keyword), - (r'(alignof|be|const|offsetof|pure|sizeof|typeof|once|unsized' - r'|yield)\b', Keyword.Reserved), + (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof', + 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'), + Keyword.Reserved), (r'(mod|use)\b', Keyword.Namespace), (r'(true|false)\b', Keyword.Constant), (r'let\b', Keyword.Declaration), - (r'(u8|u16|u32|u64|i8|i16|i32|i64|uint|int|f32|f64' - r'|str|bool)\b', Keyword.Type), + (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'uint', + 'int', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'), + Keyword.Type), (r'self\b', Name.Builtin.Pseudo), # Prelude - (r'(Freeze|Pod|Send|Sized|Add|Sub|Mul|Div|Rem|Neg|Not|BitAnd' - r'|BitOr|BitXor|Drop|Shl|Shr|Index|Option|Some|None|Result' - r'|Ok|Err|from_str|range|print|println|Any|AnyOwnExt|AnyRefExt' - r'|AnyMutRefExt|Ascii|AsciiCast|OnwedAsciiCast|AsciiStr' - r'|IntoBytes|Bool|ToCStr|Char|Clone|DeepClone|Eq|ApproxEq' - r'|Ord|TotalEq|Ordering|Less|Equal|Greater|Equiv|Container' - r'|Mutable|Map|MutableMap|Set|MutableSet|Default|FromStr' - r'|Hash|FromIterator|Extendable|Iterator|DoubleEndedIterator' - r'|RandomAccessIterator|CloneableIterator|OrdIterator' - r'|MutableDoubleEndedIterator|ExactSize|Times|Algebraic' - r'|Trigonometric|Exponential|Hyperbolic|Bitwise|BitCount' - r'|Bounded|Integer|Fractional|Real|RealExt|Num|NumCast' - r'|CheckedAdd|CheckedSub|CheckedMul|Orderable|Signed' - r'|Unsigned|Round|Primitive|Int|Float|ToStrRadix' - r'|ToPrimitive|FromPrimitive|GenericPath|Path|PosixPath' - r'|WindowsPath|RawPtr|Buffer|Writer|Reader|Seek' - r'|SendStr|SendStrOwned|SendStrStatic|IntoSendStr|Str' - r'|StrVector|StrSlice|OwnedStr|IterBytes|ToStr|IntoStr' - r'|CopyableTuple|ImmutableTuple|ImmutableTuple\d+' - r'|Tuple\d+|ImmutableEqVector|ImmutableTotalOrdVector' - r'|ImmutableCopyableVector|OwnedVector|OwnedCopyableVector' - r'|OwnedEqVector|MutableVector|MutableTotalOrdVector' - r'|Vector|VectorVector|CopyableVector|ImmutableVector' - r'|Port|Chan|SharedChan|spawn|drop)\b', Name.Builtin), + (words(( + 'Freeze', 'Pod', 'Send', 'Sized', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Neg', 'Not', 'BitAnd', + 'BitOr', 'BitXor', 'Drop', 'Shl', 'Shr', 'Index', 'Option', 'Some', 'None', 'Result', + 'Ok', 'Err', 'from_str', 'range', 'print', 'println', 'Any', 'AnyOwnExt', 'AnyRefExt', + 'AnyMutRefExt', 'Ascii', 'AsciiCast', 'OnwedAsciiCast', 'AsciiStr', + 'IntoBytes', 'Bool', 'ToCStr', 'Char', 'Clone', 'DeepClone', 'Eq', 'ApproxEq', + 'Ord', 'TotalEq', 'Ordering', 'Less', 'Equal', 'Greater', 'Equiv', 'Container', + 'Mutable', 'Map', 'MutableMap', 'Set', 'MutableSet', 'Default', 'FromStr', + 'Hash', 'FromIterator', 'Extendable', 'Iterator', 'DoubleEndedIterator', + 'RandomAccessIterator', 'CloneableIterator', 'OrdIterator', + 'MutableDoubleEndedIterator', 'ExactSize', 'Times', 'Algebraic', + 'Trigonometric', 'Exponential', 'Hyperbolic', 'Bitwise', 'BitCount', + 'Bounded', 'Integer', 'Fractional', 'Real', 'RealExt', 'Num', 'NumCast', + 'CheckedAdd', 'CheckedSub', 'CheckedMul', 'Orderable', 'Signed', + 'Unsigned', 'Round', 'Primitive', 'Int', 'Float', 'ToStrRadix', + 'ToPrimitive', 'FromPrimitive', 'GenericPath', 'Path', 'PosixPath', + 'WindowsPath', 'RawPtr', 'Buffer', 'Writer', 'Reader', 'Seek', + 'SendStr', 'SendStrOwned', 'SendStrStatic', 'IntoSendStr', 'Str', + 'StrVector', 'StrSlice', 'OwnedStr', 'IterBytes', 'ToStr', 'IntoStr', + 'CopyableTuple', 'ImmutableTuple', 'ImmutableEqVector', 'ImmutableTotalOrdVector', + 'ImmutableCopyableVector', 'OwnedVector', 'OwnedCopyableVector', + 'OwnedEqVector', 'MutableVector', 'MutableTotalOrdVector', + 'Vector', 'VectorVector', 'CopyableVector', 'ImmutableVector', + 'Port', 'Chan', 'SharedChan', 'spawn', 'drop'), suffix=r'\b'), + Name.Builtin), + (r'(ImmutableTuple\d+|Tuple\d+)\b', Name.Builtin), # Borrowed pointer (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)), # Labels @@ -3510,27 +3513,26 @@ class CudaLexer(CLexer): aliases = ['cuda', 'cu'] mimetypes = ['text/x-cuda'] - function_qualifiers = ['__device__', '__global__', '__host__', - '__noinline__', '__forceinline__'] - variable_qualifiers = ['__device__', '__constant__', '__shared__', - '__restrict__'] - vector_types = ['char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', - 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', - 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', - 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', - 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', - 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', - 'ulonglong2', 'float1', 'float2', 'float3', 'float4', - 'double1', 'double2', 'dim3'] - variables = ['gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize'] - functions = ['__threadfence_block', '__threadfence', '__threadfence_system', - '__syncthreads', '__syncthreads_count', '__syncthreads_and', - '__syncthreads_or'] - execution_confs = ['<<<', '>>>'] + function_qualifiers = set(('__device__', '__global__', '__host__', + '__noinline__', '__forceinline__')) + variable_qualifiers = set(('__device__', '__constant__', '__shared__', + '__restrict__')) + vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', + 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', + 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', + 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', + 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', + 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', + 'ulonglong2', 'float1', 'float2', 'float3', 'float4', + 'double1', 'double2', 'dim3')) + variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize')) + functions = set(('__threadfence_block', '__threadfence', '__threadfence_system', + '__syncthreads', '__syncthreads_count', '__syncthreads_and', + '__syncthreads_or')) + execution_confs = set(('<<<', '>>>')) def get_tokens_unprocessed(self, text): - for index, token, value in \ - CLexer.get_tokens_unprocessed(self, text): + for index, token, value in CLexer.get_tokens_unprocessed(self, text): if token is Name: if value in self.variable_qualifiers: token = Keyword.Type @@ -3575,7 +3577,7 @@ class MonkeyLexer(RegexLexer): tokens = { 'root': [ - #Text + # Text (r'\s+', Text), # Comments (r"'.*", Comment), @@ -3668,7 +3670,7 @@ class MonkeyLexer(RegexLexer): (r'~q|~n|~r|~t|~z|~~', String.Escape), (r'"', String.Double, '#pop'), ], - 'comment' : [ + 'comment': [ (r'(?i)^#rem.*?', Comment.Multiline, "#push"), (r'(?i)^#end.*?', Comment.Multiline, "#pop"), (r'\n', Comment.Multiline), @@ -3839,7 +3841,7 @@ class CobolLexer(RegexLexer): r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|' r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|' r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|' - r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG10|LOG|' + r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|' r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|' r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|' r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|' -- cgit v1.2.1 From f600d259a0a1053dca8a4ee1eef327970c917d92 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:31:05 +0200 Subject: some PEP8 and minor words() introduction --- pygments/lexers/compiled.py | 122 ++++++++++++++++++++++---------------------- 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 3f3ec82f..9abe5485 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3015,7 +3015,7 @@ class NimrodLexer(RegexLexer): 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method', 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try', - 'tuple', 'type' , 'when', 'while', 'with', 'without', 'xor' + 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor' ] keywordsPseudo = [ @@ -3061,7 +3061,7 @@ class NimrodLexer(RegexLexer): (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name), # Numbers (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))', - Number.Float, ('float-suffix', 'float-number')), + Number.Float, ('float-suffix', 'float-number')), (r'0[xX][a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'), (r'0[bB][01][01_]*', Number.Bin, 'int-suffix'), (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'), @@ -3071,9 +3071,9 @@ class NimrodLexer(RegexLexer): (r'.+$', Error), ], 'chars': [ - (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape), - (r"'", String.Char, '#pop'), - (r".", String.Char) + (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape), + (r"'", String.Char, '#pop'), + (r".", String.Char) ], 'strings': [ (r'(?\?]*?', - ) ) - + ) tokens = { 'comments': [ - (r'(?s)/\*.*?\*/', Comment.Multiline), #Multiline - (r'//.*?\n', Comment.Single), #Single line - #todo: highlight references in fandocs - (r'\*\*.*?\n', Comment.Special), #Fandoc - (r'#.*\n', Comment.Single) #Shell-style + (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline + (r'//.*?\n', Comment.Single), # Single line + # TODO: highlight references in fandocs + (r'\*\*.*?\n', Comment.Special), # Fandoc + (r'#.*\n', Comment.Single) # Shell-style ], 'literals': [ - (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), #Duration + (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), - #Duration with dot - (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), #Float/Decimal - (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), #Hex - (r'\b-?[\d_]+', Number.Integer), #Int - (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), #Char - (r'"', Punctuation, 'insideStr'), #Opening quote - (r'`', Punctuation, 'insideUri'), #Opening accent - (r'\b(true|false|null)\b', Keyword.Constant), #Bool & null - (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', #DSL + # Duration with dot + (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal + (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex + (r'\b-?[\d_]+', Number.Integer), # Int + (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char + (r'"', Punctuation, 'insideStr'), # Opening quote + (r'`', Punctuation, 'insideUri'), # Opening accent + (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null + (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL bygroups(Name.Namespace, Punctuation, Name.Class, Punctuation, String, Punctuation)), - (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', #Type/slot literal + (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal bygroups(Name.Namespace, Punctuation, Name.Class, Punctuation, Name.Function)), (r'\[,\]', Literal), # Empty list (s(r'($type)(\[,\])'), # Typed empty list - bygroups(using(this, state = 'inType'), Literal)), + bygroups(using(this, state='inType'), Literal)), (r'\[:\]', Literal), # Empty Map (s(r'($type)(\[:\])'), - bygroups(using(this, state = 'inType'), Literal)), + bygroups(using(this, state='inType'), Literal)), ], 'insideStr': [ - (r'\\\\', String.Escape), #Escaped backslash - (r'\\"', String.Escape), #Escaped " - (r'\\`', String.Escape), #Escaped ` - (r'\$\w+', String.Interpol), #Subst var - (r'\${.*?}', String.Interpol), #Subst expr - (r'"', Punctuation, '#pop'), #Closing quot - (r'.', String) #String content - ], - 'insideUri': [ #TODO: remove copy/paste str/uri - (r'\\\\', String.Escape), #Escaped backslash - (r'\\"', String.Escape), #Escaped " - (r'\\`', String.Escape), #Escaped ` - (r'\$\w+', String.Interpol), #Subst var - (r'\${.*?}', String.Interpol), #Subst expr - (r'`', Punctuation, '#pop'), #Closing tick - (r'.', String.Backtick) #URI content + (r'\\\\', String.Escape), # Escaped backslash + (r'\\"', String.Escape), # Escaped " + (r'\\`', String.Escape), # Escaped ` + (r'\$\w+', String.Interpol), # Subst var + (r'\${.*?}', String.Interpol), # Subst expr + (r'"', Punctuation, '#pop'), # Closing quot + (r'.', String) # String content + ], + 'insideUri': [ # TODO: remove copy/paste str/uri + (r'\\\\', String.Escape), # Escaped backslash + (r'\\"', String.Escape), # Escaped " + (r'\\`', String.Escape), # Escaped ` + (r'\$\w+', String.Interpol), # Subst var + (r'\${.*?}', String.Interpol), # Subst expr + (r'`', Punctuation, '#pop'), # Closing tick + (r'.', String.Backtick) # URI content ], 'protectionKeywords': [ (r'\b(public|protected|private|internal)\b', Keyword), @@ -3214,8 +3213,10 @@ class FantomLexer(RegexLexer): r'readonly)\b', Keyword) ], 'otherKeywords': [ - (r'\b(try|catch|throw|finally|for|if|else|while|as|is|isnot|' - r'switch|case|default|continue|break|do|return|get|set)\b', + (words(( + 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while', + 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue', + 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'), Keyword), (r'\b(it|this|super)\b', Name.Builtin.Pseudo), ], @@ -3242,7 +3243,6 @@ class FantomLexer(RegexLexer): (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class), 'inheritance'), # Inheritance list - ### Type var := val (s(r'($type)([ \t]+)($id)(\s*)(:=)'), bygroups(using(this, state = 'inType'), Text, @@ -3267,8 +3267,8 @@ class FantomLexer(RegexLexer): 'insideMethodDeclArgs'), ### Type name ( #### - (s(r'($type)([ \t]+)' #Return type and whitespace - r'($id)(\s*)(\()'), #method name + open brace + (s(r'($type)([ \t]+)' # Return type and whitespace + r'($id)(\s*)(\()'), # method name + open brace bygroups(using(this, state = 'inType'), Text, Name.Function, Text, Punctuation), 'insideMethodDeclArgs'), @@ -3317,27 +3317,27 @@ class FantomLexer(RegexLexer): include('root'), ], 'inheritance': [ - (r'\s+', Text), #Whitespace + (r'\s+', Text), # Whitespace (r':|,', Punctuation), (r'(?:(\w+)(::))?(\w+)', bygroups(Name.Namespace, Punctuation, Name.Class)), (r'{', Punctuation, '#pop') ], 'using': [ - (r'[ \t]+', Text), # consume whitespaces + (r'[ \t]+', Text), # consume whitespaces (r'(\[)(\w+)(\])', - bygroups(Punctuation, Comment.Special, Punctuation)), #ffi + bygroups(Punctuation, Comment.Special, Punctuation)), # ffi (r'(\")?([\w\.]+)(\")?', - bygroups(Punctuation, Name.Namespace, Punctuation)), #podname + bygroups(Punctuation, Name.Namespace, Punctuation)), # podname (r'::', Punctuation, 'usingClass'), default('#pop') ], 'usingClass': [ - (r'[ \t]+', Text), # consume whitespaces + (r'[ \t]+', Text), # consume whitespaces (r'(as)(\s+)(\w+)', bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'), (r'[\w\$]+', Name.Class), - default('#pop:2') # jump out to root state + default('#pop:2') # jump out to root state ], 'facet': [ (r'\s+', Text), -- cgit v1.2.1 From dafe7af681e20e43429a37fbe9a9202fc5cfe14a Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:46:14 +0200 Subject: More words() for c-like languages. --- pygments/lexers/compiled.py | 346 ++++++++++++++++++++++++-------------------- 1 file changed, 186 insertions(+), 160 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 9abe5485..31cd5f98 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -1881,36 +1881,46 @@ class CythonLexer(RegexLexer): include('numbers'), ], 'keywords': [ - (r'(assert|break|by|continue|ctypedef|del|elif|else|except\??|exec|' - r'finally|for|gil|global|if|include|lambda|nogil|pass|print|raise|' - r'return|try|while|yield|as|with)\b', Keyword), + (words(( + 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', + 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil', + 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print', + 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'), + Keyword), (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc), ], 'builtins': [ - (r'(?>', Name.Label), (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)', bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)), - (r'\b(abort|abs|abstract|accept|access|aliased|all|array|at|begin|' - r'body|case|constant|declare|delay|delta|digits|do|else|elsif|end|' - r'entry|exception|exit|interface|for|goto|if|is|limited|loop|new|' - r'null|of|or|others|out|overriding|pragma|protected|raise|range|' - r'record|renames|requeue|return|reverse|select|separate|subtype|' - r'synchronized|task|tagged|terminate|then|type|until|when|while|' - r'xor)\b', + (words(( + 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all', + 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare', + 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry', + 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited', + 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding', + 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue', + 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized', + 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when', + 'while', 'xor'), prefix=r'\b', suffix=r'\b'), Keyword.Reserved), (r'"[^"]*"', String), include('attribute'), @@ -2544,22 +2577,22 @@ class AdaLexer(RegexLexer): (r'[*<>+=/&-]', Operator), (r'\n+', Text), ], - 'numbers' : [ + 'numbers': [ (r'[0-9_]+#[0-9a-f]+#', Number.Hex), (r'[0-9_]+\.[0-9_]*', Number.Float), (r'[0-9_]+', Number.Integer), ], - 'attribute' : [ + 'attribute': [ (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)), ], - 'subprogram' : [ + 'subprogram': [ (r'\(', Punctuation, ('#pop', 'formal_part')), (r';', Punctuation, '#pop'), (r'is\b', Keyword.Reserved, '#pop'), (r'"[^"]+"|[a-z0-9_]+', Name.Function), include('root'), ], - 'end' : [ + 'end': [ ('(if|case|record|loop|select)', Keyword.Reserved), ('"[^"]+"|[\w.]+', Name.Function), ('\s+', Text), @@ -2574,13 +2607,13 @@ class AdaLexer(RegexLexer): (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'), include('root'), ], - 'array_def' : [ + 'array_def': [ (r';', Punctuation, '#pop'), (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text, Keyword.Reserved)), include('root'), ], - 'record_def' : [ + 'record_def': [ (r'end record', Keyword.Reserved, '#pop'), include('root'), ], @@ -2588,7 +2621,7 @@ class AdaLexer(RegexLexer): (r'[a-z0-9_.]+', Name.Namespace, '#pop'), default('#pop'), ], - 'formal_part' : [ + 'formal_part': [ (r'\)', Punctuation, '#pop'), (r'[a-z0-9_]+', Name.Variable), (r',|:[^=]', Punctuation), @@ -2795,8 +2828,7 @@ class Modula2Lexer(RegexLexer): RegexLexer.__init__(self, **options) def get_tokens_unprocessed(self, text): - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): + for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): # check for reserved words and pervasives if token is Name: if value in self.reserved_words: @@ -2825,7 +2857,7 @@ class BlitzMaxLexer(RegexLexer): bmax_name = r'[a-z_]\w*' bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)' r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \ - (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) + (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])' flags = re.MULTILINE | re.IGNORECASE @@ -2833,7 +2865,7 @@ class BlitzMaxLexer(RegexLexer): 'root': [ # Text (r'[ \t]+', Text), - (r'\.\.\n', Text), # Line continuation + (r'\.\.\n', Text), # Line continuation # Comments (r"'.*?\n", Comment.Single), (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline), @@ -2870,26 +2902,23 @@ class BlitzMaxLexer(RegexLexer): (r'\b(Ptr)\b', Keyword.Type), (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant), (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration), - (r'\b(TNullMethodException|TNullFunctionException|' - r'TNullObjectException|TArrayBoundsException|' - r'TRuntimeException)\b', Name.Exception), - (r'\b(Strict|SuperStrict|Module|ModuleInfo|' - r'End|Return|Continue|Exit|Public|Private|' - r'Var|VarPtr|Chr|Len|Asc|SizeOf|Sgn|Abs|Min|Max|' - r'New|Release|Delete|' - r'Incbin|IncbinPtr|IncbinLen|' - r'Framework|Include|Import|Extern|EndExtern|' - r'Function|EndFunction|' - r'Type|EndType|Extends|' - r'Method|EndMethod|' - r'Abstract|Final|' - r'If|Then|Else|ElseIf|EndIf|' - r'For|To|Next|Step|EachIn|' - r'While|Wend|EndWhile|' - r'Repeat|Until|Forever|' - r'Select|Case|Default|EndSelect|' - r'Try|Catch|EndTry|Throw|Assert|' - r'Goto|DefData|ReadData|RestoreData)\b', Keyword.Reserved), + (words(( + 'TNullMethodException', 'TNullFunctionException', + 'TNullObjectException', 'TArrayBoundsException', + 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception), + (words(( + 'Strict', 'SuperStrict', 'Module', 'ModuleInfo', + 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private', + 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max', + 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen', + 'Framework', 'Include', 'Import', 'Extern', 'EndExtern', + 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod', + 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect', + 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData', + 'RestoreData'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), # Final resolve (for variable names and such) (r'(%s)' % (bmax_name), Name.Variable), ], @@ -2913,13 +2942,10 @@ class BlitzBasicLexer(RegexLexer): filenames = ['*.bb', '*.decls'] mimetypes = ['text/x-bb'] - bb_vopwords = (r'\b(Shl|Shr|Sar|Mod|Or|And|Not|' - r'Abs|Sgn|Handle|Int|Float|Str|' - r'First|Last|Before|After)\b') bb_sktypes = r'@{1,2}|[#$%]' bb_name = r'[a-z]\w*' bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \ - (bb_name, bb_sktypes, bb_name) + (bb_name, bb_sktypes, bb_name) flags = re.MULTILINE | re.IGNORECASE tokens = { @@ -2937,7 +2963,12 @@ class BlitzBasicLexer(RegexLexer): (r'\$[0-9a-f]+', Number.Hex), (r'\%[10]+', Number.Bin), # Other - (r'(?:%s|([+\-*/~=<>^]))' % (bb_vopwords), Operator), + (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not', + 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str', + 'First', 'Last', 'Before', 'After'), + prefix=r'\b', suffix=r'\b'), + Operator), + (r'([+\-*/~=<>^])', Operator), (r'[(),:\[\]\\]', Punctuation), (r'\.([ \t]*)(%s)' % bb_name, Name.Label), # Identifiers @@ -2948,30 +2979,25 @@ class BlitzBasicLexer(RegexLexer): (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name), bygroups(Operator, Text, Punctuation, Text, Name.Class)), (r'\b%s\b([ \t]*)(\()' % bb_var, - bygroups(Name.Function, Text, Keyword.Type,Text, Punctuation, + bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation, Text, Name.Class, Text, Punctuation)), (r'\b(Function)\b([ \t]+)%s' % bb_var, bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type, - Text, Punctuation, Text, Name.Class)), + Text, Punctuation, Text, Name.Class)), (r'\b(Type)([ \t]+)(%s)' % (bb_name), bygroups(Keyword.Reserved, Text, Name.Class)), # Keywords (r'\b(Pi|True|False|Null)\b', Keyword.Constant), (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration), - (r'\b(End|Return|Exit|' - r'Chr|Len|Asc|' - r'New|Delete|Insert|' - r'Include|' - r'Function|' - r'Type|' - r'If|Then|Else|ElseIf|EndIf|' - r'For|To|Next|Step|Each|' - r'While|Wend|' - r'Repeat|Until|Forever|' - r'Select|Case|Default|' - r'Goto|Gosub|Data|Read|Restore)\b', Keyword.Reserved), + (words(( + 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert', + 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', + 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), # Final resolve (for variable names and such) -# (r'(%s)' % (bb_name), Name.Variable), + # (r'(%s)' % (bb_name), Name.Variable), (bb_var, bygroups(Name.Variable, Text, Keyword.Type, Text, Punctuation, Text, Name.Class)), ], -- cgit v1.2.1 From 39eb79a325171a943af3fb74c031f42be1bc78c8 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 15:55:21 +0200 Subject: fix up words() use in the rest of compiled.py --- pygments/lexers/compiled.py | 212 +++++++++++++++++++++++--------------------- 1 file changed, 113 insertions(+), 99 deletions(-) diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 31cd5f98..cdf0a6fc 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -482,7 +482,7 @@ class DLexer(RegexLexer): 'root': [ (r'\n', Text), (r'\s+', Text), - #(r'\\\n', Text), # line continuations + # (r'\\\n', Text), # line continuations # Comments (r'//(.*?)\n', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), @@ -537,8 +537,7 @@ class DLexer(RegexLexer): # CharacterLiteral (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""", - String.Char - ), + String.Char), # StringLiteral # -- WysiwygString (r'r"[^"]*"[cwd]?', String), @@ -549,8 +548,7 @@ class DLexer(RegexLexer): # -- EscapeSequence (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}" r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)", - String - ), + String), # -- HexString (r'x"[0-9a-fA-F_\s]*"[cwd]?', String), # -- DelimitedString @@ -567,8 +565,7 @@ class DLexer(RegexLexer): # Tokens (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>=' r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)' - r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation - ), + r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation), # Identifier (r'[a-zA-Z_]\w*', Name), # Line @@ -656,7 +653,7 @@ class DelphiLexer(Lexer): filenames = ['*.pas'] mimetypes = ['text/x-pascal'] - TURBO_PASCAL_KEYWORDS = [ + TURBO_PASCAL_KEYWORDS = ( 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case', 'const', 'constructor', 'continue', 'destructor', 'div', 'do', 'downto', 'else', 'end', 'file', 'for', 'function', 'goto', @@ -665,41 +662,41 @@ class DelphiLexer(Lexer): 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce', 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to', 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor' - ] + ) - DELPHI_KEYWORDS = [ + DELPHI_KEYWORDS = ( 'as', 'class', 'except', 'exports', 'finalization', 'finally', 'initialization', 'is', 'library', 'on', 'property', 'raise', 'threadvar', 'try' - ] + ) - FREE_PASCAL_KEYWORDS = [ + FREE_PASCAL_KEYWORDS = ( 'dispose', 'exit', 'false', 'new', 'true' - ] + ) - BLOCK_KEYWORDS = set([ + BLOCK_KEYWORDS = set(( 'begin', 'class', 'const', 'constructor', 'destructor', 'end', 'finalization', 'function', 'implementation', 'initialization', 'label', 'library', 'operator', 'procedure', 'program', 'property', 'record', 'threadvar', 'type', 'unit', 'uses', 'var' - ]) + )) - FUNCTION_MODIFIERS = set([ + FUNCTION_MODIFIERS = set(( 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe', 'pascal', 'register', 'safecall', 'softfloat', 'stdcall', 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external', 'override', 'assembler' - ]) + )) # XXX: those aren't global. but currently we know no way for defining # them just for the type context. - DIRECTIVES = set([ + DIRECTIVES = set(( 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far', 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected', 'published', 'public' - ]) + )) - BUILTIN_TYPES = set([ + BUILTIN_TYPES = set(( 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool', 'cardinal', 'char', 'comp', 'currency', 'double', 'dword', 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint', @@ -713,10 +710,10 @@ class DelphiLexer(Lexer): 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate', 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant', 'widechar', 'widestring', 'word', 'wordbool' - ]) + )) BUILTIN_UNITS = { - 'System': [ + 'System': ( 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8', 'append', 'arctan', 'assert', 'assigned', 'assignfile', 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir', @@ -748,8 +745,8 @@ class DelphiLexer(Lexer): 'widecharlentostring', 'widecharlentostrvar', 'widechartostring', 'widechartostrvar', 'widestringtoucs4string', 'write', 'writeln' - ], - 'SysUtils': [ + ), + 'SysUtils': ( 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks', 'allocmem', 'ansicomparefilename', 'ansicomparestr', 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr', @@ -815,8 +812,8 @@ class DelphiLexer(Lexer): 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase', 'widesamestr', 'widesametext', 'wideuppercase', 'win32check', 'wraptext' - ], - 'Classes': [ + ), + 'Classes': ( 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize', 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect', 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass', @@ -831,8 +828,8 @@ class DelphiLexer(Lexer): 'teststreamformat', 'unregisterclass', 'unregisterclasses', 'unregisterintegerconsts', 'unregistermoduleclasses', 'writecomponentresfile' - ], - 'Math': [ + ), + 'Math': ( 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec', 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil', 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc', @@ -852,10 +849,10 @@ class DelphiLexer(Lexer): 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev', 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation', 'tan', 'tanh', 'totalvariance', 'variance' - ] + ) } - ASM_REGISTERS = set([ + ASM_REGISTERS = set(( 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0', 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0', 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx', @@ -864,9 +861,9 @@ class DelphiLexer(Lexer): 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5', 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5', 'xmm6', 'xmm7' - ]) + )) - ASM_INSTRUCTIONS = set([ + ASM_INSTRUCTIONS = set(( 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound', 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw', 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae', @@ -905,7 +902,7 @@ class DelphiLexer(Lexer): 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait', 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat', 'xlatb', 'xor' - ]) + )) def __init__(self, **options): Lexer.__init__(self, **options) @@ -999,12 +996,12 @@ class DelphiLexer(Lexer): # is in the set of registered modifiers. highlight # it as pseudo keyword elif in_function_block and \ - lowercase_name in self.FUNCTION_MODIFIERS: + lowercase_name in self.FUNCTION_MODIFIERS: token = Keyword.Pseudo # if we are in a property highlight some more # modifiers elif in_property_block and \ - lowercase_name in ('read', 'write'): + lowercase_name in ('read', 'write'): token = Keyword.Pseudo next_token_is_function = True # if the last iteration set next_token_is_function @@ -1131,27 +1128,27 @@ class DylanLexer(RegexLexer): flags = re.IGNORECASE - builtins = set([ + builtins = set(( 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class', 'compiler-open', 'compiler-sideways', 'domain', 'dynamic', 'each-subclass', 'exception', 'exclude', 'function', 'generic', 'handler', 'inherited', 'inline', 'inline-only', 'instance', 'interface', 'import', 'keyword', 'library', 'macro', 'method', 'module', 'open', 'primary', 'required', 'sealed', 'sideways', - 'singleton', 'slot', 'thread', 'variable', 'virtual']) + 'singleton', 'slot', 'thread', 'variable', 'virtual')) - keywords = set([ + keywords = set(( 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup', 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally', 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename', 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when', - 'while']) + 'while')) - operators = set([ + operators = set(( '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=', - '>', '>=', '&', '|']) + '>', '>=', '&', '|')) - functions = set([ + functions = set(( 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!', 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply', 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!', @@ -1185,7 +1182,7 @@ class DylanLexer(RegexLexer): 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third', 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type', 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values', - 'vector', 'zero?']) + 'vector', 'zero?')) valid_name = '\\\\?[a-z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+' @@ -1219,7 +1216,7 @@ class DylanLexer(RegexLexer): (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)', bygroups(Name.Attribute, Operator, Text, String)), - ('', Text, 'code') # no header match, switch to code + ('', Text, 'code') # no header match, switch to code ], 'code': [ # Whitespace @@ -1297,14 +1294,14 @@ class DylanLexer(RegexLexer): ], 'keyword': [ (r'"', String.Symbol, '#pop'), - (r'[^\\"]+', String.Symbol), # all other characters + (r'[^\\"]+', String.Symbol), # all other characters ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash ] } @@ -1358,7 +1355,7 @@ class DylanConsoleLexer(Lexer): filenames = ['*.dylan-console'] mimetypes = ['text/x-dylan-console'] - _line_re = re.compile('.*?\n') + _line_re = re.compile('.*?\n') _prompt_re = re.compile('\?| ') def get_tokens_unprocessed(self, text): @@ -1377,7 +1374,7 @@ class DylanConsoleLexer(Lexer): else: if curcode: for item in do_insertions(insertions, - dylexer.get_tokens_unprocessed(curcode)): + dylexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] @@ -1425,13 +1422,18 @@ def objective(baselexer): (r'@\(', Literal, 'literal_number'), (r'@\[', Literal, 'literal_array'), (r'@\{', Literal, 'literal_dictionary'), - (r'(@selector|@private|@protected|@public|@encode|' - r'@synchronized|@try|@throw|@catch|@finally|@end|@property|@synthesize|' - r'__bridge|__bridge_transfer|__autoreleasing|__block|__weak|__strong|' - r'weak|strong|copy|retain|assign|unsafe_unretained|atomic|nonatomic|' - r'readonly|readwrite|setter|getter|typeof|in|out|inout|release|class|' - r'@dynamic|@optional|@required|@autoreleasepool)\b', Keyword), - (r'(id|instancetype|Class|IMP|SEL|BOOL|IBOutlet|IBAction|unichar)\b', + (words(( + '@selector', '@private', '@protected', '@public', '@encode', + '@synchronized', '@try', '@throw', '@catch', '@finally', + '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer', + '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong', + 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic', + 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in', + 'out', 'inout', 'release', 'class', '@dynamic', '@optional', + '@required', '@autoreleasepool'), suffix=r'\b'), + Keyword), + (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL', + 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'), Keyword.Type), (r'@(true|false|YES|NO)\n', Name.Builtin), (r'(YES|NO|nil|self|super)\b', Name.Builtin), @@ -1447,7 +1449,7 @@ def objective(baselexer): (r'@', Punctuation), inherit, ], - 'oc_classname' : [ + 'oc_classname': [ # interface definition that inherits ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)({)', bygroups(Name.Class, Text, Name.Class, Text, Punctuation), @@ -1465,28 +1467,28 @@ def objective(baselexer): bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') ], - 'oc_forward_classname' : [ - ('([a-zA-Z$_][\w$]*)(\s*,\s*)', - bygroups(Name.Class, Text), 'oc_forward_classname'), - ('([a-zA-Z$_][\w$]*)(\s*;?)', - bygroups(Name.Class, Text), '#pop') + 'oc_forward_classname': [ + ('([a-zA-Z$_][\w$]*)(\s*,\s*)', + bygroups(Name.Class, Text), 'oc_forward_classname'), + ('([a-zA-Z$_][\w$]*)(\s*;?)', + bygroups(Name.Class, Text), '#pop') ], - 'oc_ivars' : [ - include('whitespace'), - include('statements'), - (';', Punctuation), - ('{', Punctuation, '#push'), - ('}', Punctuation, '#pop'), + 'oc_ivars': [ + include('whitespace'), + include('statements'), + (';', Punctuation), + ('{', Punctuation, '#push'), + ('}', Punctuation, '#pop'), ], 'root': [ - # methods - (r'^([-+])(\s*)' # method marker - r'(\(.*?\))?(\s*)' # return type - r'([a-zA-Z$_][\w$]*:?)', # begin of method name - bygroups(Punctuation, Text, using(this), - Text, Name.Function), - 'method'), - inherit, + # methods + (r'^([-+])(\s*)' # method marker + r'(\(.*?\))?(\s*)' # return type + r'([a-zA-Z$_][\w$]*:?)', # begin of method name + bygroups(Punctuation, Text, using(this), + Text, Name.Function), + 'method'), + inherit, ], 'method': [ include('whitespace'), @@ -1626,7 +1628,7 @@ class FortranLexer(RegexLexer): 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE', 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES', 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE', - 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix='\\b', suffix='\\s*\\b'), + 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'), Keyword), # Data Types @@ -1638,7 +1640,7 @@ class FortranLexer(RegexLexer): 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T', 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', - 'C_FUNPTR'), prefix='\\b', suffix='\\s*\\b'), + 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'), Keyword.Type), # Operators @@ -1698,7 +1700,7 @@ class FortranLexer(RegexLexer): 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer', 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask', 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp', - 'ZLog', 'ZSin', 'ZSqRt'), prefix='\\b', suffix='\\s*\\b'), + 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'), Name.Builtin), # Booleans @@ -1738,26 +1740,38 @@ class GLShaderLexer(RegexLexer): (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', Operator), - (r'[?:]', Operator), # quick hack for ternary + (r'[?:]', Operator), # quick hack for ternary (r'\bdefined\b', Operator), (r'[;{}(),\[\]]', Punctuation), - #FIXME when e is present, no decimal point needed + # FIXME when e is present, no decimal point needed (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), (r'0[xX][0-9a-fA-F]*', Number.Hex), (r'0[0-7]*', Number.Oct), (r'[1-9][0-9]*', Number.Integer), - (r'\b(attribute|const|uniform|varying|centroid|break|continue|' - r'do|for|while|if|else|in|out|inout|float|int|void|bool|true|' - r'false|invariant|discard|return|mat[234]|mat[234]x[234]|' - r'vec[234]|[ib]vec[234]|sampler[123]D|samplerCube|' - r'sampler[12]DShadow|struct)\b', Keyword), - (r'\b(asm|class|union|enum|typedef|template|this|packed|goto|' - r'switch|default|inline|noinline|volatile|public|static|extern|' - r'external|interface|long|short|double|half|fixed|unsigned|' - r'lowp|mediump|highp|precision|input|output|hvec[234]|' - r'[df]vec[234]|sampler[23]DRect|sampler2DRectShadow|sizeof|' - r'cast|namespace|using)\b', Keyword), #future use + (words(( + 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break', + 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out', + 'inout', 'float', 'int', 'void', 'bool', 'true', 'false', + 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4', + 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3', + 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4', + 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4', + 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube', + 'sampler1DShadow', 'sampler2DShadow', 'struct'), + prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this', + 'packed', 'goto', 'switch', 'default', 'inline', 'noinline', + 'volatile', 'public', 'static', 'extern', 'external', 'interface', + 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp', + 'mediump', 'highp', 'precision', 'input', 'output', + 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4', + 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect', + 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'), + prefix=r'\b', suffix=r'\b'), + Keyword), # future use (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), (r'\.', Punctuation), (r'\s+', Text), @@ -1794,29 +1808,29 @@ class PrologLexer(RegexLexer): (r':-|-->', Punctuation), (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|' r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double), - (r"'(?:''|[^'])*'", String.Atom), # quoted atom + (r"'(?:''|[^'])*'", String.Atom), # quoted atom # Needs to not be followed by an atom. - #(r'=(?=\s|[a-zA-Z\[])', Operator), + # (r'=(?=\s|[a-zA-Z\[])', Operator), (r'is\b', Operator), (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])', Operator), (r'(mod|div|not)\b', Operator), - (r'_', Keyword), # The don't-care variable + (r'_', Keyword), # The don't-care variable (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)), (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' u'(\\s*)(:-|-->)', - bygroups(Name.Function, Text, Operator)), # function defn + bygroups(Name.Function, Text, Operator)), # function defn (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' u'(\\s*)(\\()', bygroups(Name.Function, Text, Punctuation)), (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', - String.Atom), # atom, characters + String.Atom), # atom, characters # This one includes ! (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+', - String.Atom), # atom, graphics + String.Atom), # atom, graphics (r'[A-Z_]\w*', Name.Variable), (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text), ], -- cgit v1.2.1 From aa1e7d94fdc1722809a8f045edec4a3454c58ad0 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 16:08:41 +0200 Subject: support submodules in _mapping regeneration --- pygments/lexers/_mapping.py | 30 ++++++++++++++++-------------- 1 file changed, 16 insertions(+), 14 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index d505dcf5..631a893a 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -367,20 +367,22 @@ if __name__ == '__main__': # lookup lexers found_lexers = [] sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - for filename in os.listdir('.'): - if filename.endswith('.py') and not filename.startswith('_'): - module_name = 'pygments.lexers.%s' % filename[:-3] - print(module_name) - module = __import__(module_name, None, None, ['']) - for lexer_name in module.__all__: - lexer = getattr(module, lexer_name) - found_lexers.append( - '%r: %r' % (lexer_name, - (module_name, - lexer.name, - tuple(lexer.aliases), - tuple(lexer.filenames), - tuple(lexer.mimetypes)))) + for root, dirs, files in os.walk('.'): + for filename in files: + if filename.endswith('.py') and not filename.startswith('_'): + module_name = 'pygments.lexers%s.%s' % ( + root[1:].replace('/', '.'), filename[:-3]) + print(module_name) + module = __import__(module_name, None, None, ['']) + for lexer_name in module.__all__: + lexer = getattr(module, lexer_name) + found_lexers.append( + '%r: %r' % (lexer_name, + (module_name, + lexer.name, + tuple(lexer.aliases), + tuple(lexer.filenames), + tuple(lexer.mimetypes)))) # sort them, that should make the diff files for svn smaller found_lexers.sort() -- cgit v1.2.1 From 98dac60db6d954fae554b9b77eec2197f533fdaf Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 16:45:53 +0200 Subject: fix PEP8 --- pygments/lexers/graph.py | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/pygments/lexers/graph.py b/pygments/lexers/graph.py index 6aa446c7..04ec3262 100644 --- a/pygments/lexers/graph.py +++ b/pygments/lexers/graph.py @@ -12,8 +12,8 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, this -from pygments.token import Keyword, Punctuation, Text, Comment, Operator, Name,\ - String, Number, Generic, Whitespace +from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\ + String, Number, Whitespace __all__ = ['CypherLexer'] @@ -30,7 +30,7 @@ class CypherLexer(RegexLexer): """ name = 'Cypher' aliases = ['cypher'] - filenames = ['*.cyp','*.cypher'] + filenames = ['*.cyp', '*.cypher'] flags = re.MULTILINE | re.IGNORECASE @@ -43,7 +43,7 @@ class CypherLexer(RegexLexer): include('strings'), include('whitespace'), include('barewords'), - ], + ], 'comment': [ (r'^.*//.*\n', Comment.Single), ], @@ -77,5 +77,3 @@ class CypherLexer(RegexLexer): (r'\d+', Number), ], } - - -- cgit v1.2.1 From ca80bbb894f18e7ccbda553d33b50ed222e7727f Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 18:58:00 +0200 Subject: split up lexers.compiled into multiple submodules --- pygments/lexers/_mapping.py | 84 +- pygments/lexers/c_like/__init__.py | 0 pygments/lexers/c_like/c_cpp.py | 231 ++ pygments/lexers/c_like/d.py | 179 ++ pygments/lexers/c_like/go.py | 101 + pygments/lexers/c_like/objective.py | 322 +++ pygments/lexers/c_like/other.py | 839 ++++++ pygments/lexers/c_like/rust.py | 161 ++ pygments/lexers/cobol.py | 231 ++ pygments/lexers/compiled.py | 5386 +---------------------------------- pygments/lexers/dotnet.py | 80 +- pygments/lexers/eiffel.py | 65 + pygments/lexers/fortran.py | 160 ++ pygments/lexers/graphics.py | 73 + pygments/lexers/inform.py | 724 +++++ pygments/lexers/misc/__init__.py | 10 + pygments/lexers/misc/blitz.py | 318 +++ pygments/lexers/misc/chapel.py | 98 + pygments/lexers/misc/dylan.py | 289 ++ pygments/lexers/misc/fantom.py | 250 ++ pygments/lexers/misc/felix.py | 273 ++ pygments/lexers/misc/nimrod.py | 159 ++ pygments/lexers/misc/nit.py | 64 + pygments/lexers/misc/ooc.py | 85 + pygments/lexers/pascal.py | 833 ++++++ pygments/lexers/prolog.py | 87 + pygments/lexers/python.py | 196 ++ 27 files changed, 5853 insertions(+), 5445 deletions(-) create mode 100644 pygments/lexers/c_like/__init__.py create mode 100644 pygments/lexers/c_like/c_cpp.py create mode 100644 pygments/lexers/c_like/d.py create mode 100644 pygments/lexers/c_like/go.py create mode 100644 pygments/lexers/c_like/objective.py create mode 100644 pygments/lexers/c_like/other.py create mode 100644 pygments/lexers/c_like/rust.py create mode 100644 pygments/lexers/cobol.py create mode 100644 pygments/lexers/eiffel.py create mode 100644 pygments/lexers/fortran.py create mode 100644 pygments/lexers/graphics.py create mode 100644 pygments/lexers/inform.py create mode 100644 pygments/lexers/misc/__init__.py create mode 100644 pygments/lexers/misc/blitz.py create mode 100644 pygments/lexers/misc/chapel.py create mode 100644 pygments/lexers/misc/dylan.py create mode 100644 pygments/lexers/misc/fantom.py create mode 100644 pygments/lexers/misc/felix.py create mode 100644 pygments/lexers/misc/nimrod.py create mode 100644 pygments/lexers/misc/nit.py create mode 100644 pygments/lexers/misc/ooc.py create mode 100644 pygments/lexers/pascal.py create mode 100644 pygments/lexers/prolog.py create mode 100644 pygments/lexers/python.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 631a893a..716e0009 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -20,7 +20,7 @@ LEXERS = { 'APLLexer': ('pygments.lexers.other', 'APL', ('apl',), ('*.apl',), ()), 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), - 'AdaLexer': ('pygments.lexers.compiled', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), + 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), 'AlloyLexer': ('pygments.lexers.other', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), 'AmbientTalkLexer': ('pygments.lexers.other', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), @@ -46,13 +46,13 @@ LEXERS = { 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), 'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), - 'BlitzBasicLexer': ('pygments.lexers.compiled', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), - 'BlitzMaxLexer': ('pygments.lexers.compiled', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BlitzBasicLexer': ('pygments.lexers.misc.blitz', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), + 'BlitzMaxLexer': ('pygments.lexers.misc.blitz', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), 'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), - 'CLexer': ('pygments.lexers.compiled', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), + 'CLexer': ('pygments.lexers.c_like.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), 'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), @@ -62,24 +62,24 @@ LEXERS = { 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), 'ChaiscriptLexer': ('pygments.lexers.agile', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), - 'ChapelLexer': ('pygments.lexers.compiled', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), + 'ChapelLexer': ('pygments.lexers.misc.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), 'CirruLexer': ('pygments.lexers.web', 'Cirru', ('cirru',), ('*.cirru', '*.cr'), ('text/x-cirru',)), - 'ClayLexer': ('pygments.lexers.compiled', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), + 'ClayLexer': ('pygments.lexers.c_like.other', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), - 'CobolFreeformatLexer': ('pygments.lexers.compiled', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), - 'CobolLexer': ('pygments.lexers.compiled', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), + 'CobolFreeformatLexer': ('pygments.lexers.cobol', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + 'CobolLexer': ('pygments.lexers.cobol', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)), 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), - 'CppLexer': ('pygments.lexers.compiled', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), + 'CppLexer': ('pygments.lexers.c_like.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), 'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), 'CryptolLexer': ('pygments.lexers.functional', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), @@ -89,28 +89,28 @@ LEXERS = { 'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)), 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), - 'CudaLexer': ('pygments.lexers.compiled', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), + 'CudaLexer': ('pygments.lexers.c_like.other', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), 'CypherLexer': ('pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), - 'CythonLexer': ('pygments.lexers.compiled', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), - 'DLexer': ('pygments.lexers.compiled', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), + 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), + 'DLexer': ('pygments.lexers.c_like.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), 'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), - 'DelphiLexer': ('pygments.lexers.compiled', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)), + 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)), 'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), 'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), 'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), - 'DylanConsoleLexer': ('pygments.lexers.compiled', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), - 'DylanLexer': ('pygments.lexers.compiled', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), - 'DylanLidLexer': ('pygments.lexers.compiled', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), + 'DylanConsoleLexer': ('pygments.lexers.misc.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), + 'DylanLexer': ('pygments.lexers.misc.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), + 'DylanLidLexer': ('pygments.lexers.misc.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), - 'ECLexer': ('pygments.lexers.compiled', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), + 'ECLexer': ('pygments.lexers.c_like.other', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), - 'EiffelLexer': ('pygments.lexers.compiled', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), + 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), @@ -122,19 +122,19 @@ LEXERS = { 'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)), 'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), - 'FantomLexer': ('pygments.lexers.compiled', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), - 'FelixLexer': ('pygments.lexers.compiled', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), - 'FortranLexer': ('pygments.lexers.compiled', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), + 'FantomLexer': ('pygments.lexers.misc.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), + 'FelixLexer': ('pygments.lexers.misc.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), + 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), 'FoxProLexer': ('pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), 'GAPLexer': ('pygments.lexers.math', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), - 'GLShaderLexer': ('pygments.lexers.compiled', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), + 'GLShaderLexer': ('pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), 'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), 'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), 'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), - 'GoLexer': ('pygments.lexers.compiled', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), + 'GoLexer': ('pygments.lexers.c_like.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), 'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), @@ -158,9 +158,9 @@ LEXERS = { 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), - 'Inform6Lexer': ('pygments.lexers.compiled', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), - 'Inform6TemplateLexer': ('pygments.lexers.compiled', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), - 'Inform7Lexer': ('pygments.lexers.compiled', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), + 'Inform6Lexer': ('pygments.lexers.inform', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), + 'Inform6TemplateLexer': ('pygments.lexers.inform', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), + 'Inform7Lexer': ('pygments.lexers.inform', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)), 'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), @@ -198,7 +198,7 @@ LEXERS = { 'LiterateIdrisLexer': ('pygments.lexers.functional', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), - 'LogosLexer': ('pygments.lexers.compiled', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), + 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), @@ -216,11 +216,11 @@ LEXERS = { 'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()), 'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)), 'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), - 'Modula2Lexer': ('pygments.lexers.compiled', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), + 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), - 'MonkeyLexer': ('pygments.lexers.compiled', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), + 'MonkeyLexer': ('pygments.lexers.misc.blitz', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), - 'MqlLexer': ('pygments.lexers.compiled', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), + 'MqlLexer': ('pygments.lexers.c_like.other', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), 'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()), 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()), @@ -234,21 +234,21 @@ LEXERS = { 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), - 'NesCLexer': ('pygments.lexers.compiled', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), + 'NesCLexer': ('pygments.lexers.c_like.other', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')), 'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), - 'NimrodLexer': ('pygments.lexers.compiled', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), - 'NitLexer': ('pygments.lexers.compiled', 'Nit', ('nit',), ('*.nit',), ()), + 'NimrodLexer': ('pygments.lexers.misc.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), + 'NitLexer': ('pygments.lexers.misc.nit', 'Nit', ('nit',), ('*.nit',), ()), 'NixLexer': ('pygments.lexers.functional', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()), 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), - 'ObjectiveCLexer': ('pygments.lexers.compiled', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), - 'ObjectiveCppLexer': ('pygments.lexers.compiled', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), + 'ObjectiveCLexer': ('pygments.lexers.c_like.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), + 'ObjectiveCppLexer': ('pygments.lexers.c_like.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), 'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)), - 'OocLexer': ('pygments.lexers.compiled', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), + 'OocLexer': ('pygments.lexers.misc.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), @@ -257,14 +257,14 @@ LEXERS = { 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), - 'PikeLexer': ('pygments.lexers.compiled', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), + 'PikeLexer': ('pygments.lexers.c_like.other', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), - 'PrologLexer': ('pygments.lexers.compiled', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), + 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()), @@ -301,7 +301,7 @@ LEXERS = { 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('pygments.lexers.compiled', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), + 'RustLexer': ('pygments.lexers.c_like.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), @@ -324,8 +324,8 @@ LEXERS = { 'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), 'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()), - 'SwiftLexer': ('pygments.lexers.compiled', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), - 'SwigLexer': ('pygments.lexers.compiled', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), + 'SwiftLexer': ('pygments.lexers.c_like.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), + 'SwigLexer': ('pygments.lexers.c_like.other', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), 'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), @@ -338,7 +338,7 @@ LEXERS = { 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), 'VCTreeStatusLexer': ('pygments.lexers.other', 'VCTreeStatus', ('vctreestatus',), (), ()), 'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()), - 'ValaLexer': ('pygments.lexers.compiled', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), + 'ValaLexer': ('pygments.lexers.c_like.other', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), 'VelocityHtmlLexer': ('pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), diff --git a/pygments/lexers/c_like/__init__.py b/pygments/lexers/c_like/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/pygments/lexers/c_like/c_cpp.py b/pygments/lexers/c_like/c_cpp.py new file mode 100644 index 00000000..88a5bb8a --- /dev/null +++ b/pygments/lexers/c_like/c_cpp.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.c_cpp + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for C/C++ languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, inherit, default, words +from pygments.util import get_bool_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['CLexer', 'CppLexer'] + + +class CFamilyLexer(RegexLexer): + """ + For C family source code. This is used as a base class to avoid repetitious + definitions. + """ + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' + #: only one /* */ style comment + _ws1 = r'\s*(?:/[*].*?[*]/\s*)*' + + tokens = { + 'whitespace': [ + # preprocessor directives: without whitespace + ('^#if\s+0', Comment.Preproc, 'if0'), + ('^#', Comment.Preproc, 'macro'), + # or with whitespace + ('^(' + _ws1 + r')(#if\s+0)', + bygroups(using(this), Comment.Preproc), 'if0'), + ('^(' + _ws1 + ')(#)', + bygroups(using(this), Comment.Preproc), 'macro'), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + ], + 'statements': [ + (r'L?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'0[0-7]+[LlUu]*', Number.Oct), + (r'\d+[LlUu]*', Number.Integer), + (r'\*/', Error), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'[()\[\],.]', Punctuation), + (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do', + 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register', + 'restricted', 'return', 'sizeof', 'static', 'struct', + 'switch', 'typedef', 'union', 'volatile', 'while'), + suffix=r'\b'), Keyword), + (r'(bool|int|long|float|short|double|char|unsigned|signed|void|' + r'[a-z_][a-z0-9_]*_t)\b', + Keyword.Type), + (words(('inline', '_inline', '__inline', 'naked', 'restrict', + 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved), + # Vector intrinsics + (r'(__m(128i|128d|128|64))\b', Keyword.Reserved), + # Microsoft-isms + (words(( + 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl', + 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try', + 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop', + 'identifier', 'forceinline', 'assume'), + prefix=r'__', suffix=r'\b'), Keyword.Reserved), + (r'(true|false|NULL)\b', Name.Builtin), + (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), + ('[a-zA-Z_]\w*', Name), + ], + 'root': [ + include('whitespace'), + # functions + (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')?({)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation), + 'function'), + # function declarations + (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')?(;)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation)), + default('statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + ('[{}]', Punctuation), + (';', Punctuation, '#pop'), + ], + 'function': [ + include('whitespace'), + include('statements'), + (';', Punctuation), + ('{', Punctuation, '#push'), + ('}', Punctuation, '#pop'), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' + r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'macro': [ + (r'[^/\n]+', Comment.Preproc), + (r'/[*](.|\n)*?[*]/', Comment.Multiline), + (r'//.*?\n', Comment.Single, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Comment.Preproc, '#pop'), + ], + 'if0': [ + (r'^\s*#if.*?(?)', Text, '#pop'), + ], + } + + def analyse_text(text): + if re.search('#include <[a-z]+>', text): + return 0.2 + if re.search('using namespace ', text): + return 0.4 diff --git a/pygments/lexers/c_like/d.py b/pygments/lexers/c_like/d.py new file mode 100644 index 00000000..e4aadd40 --- /dev/null +++ b/pygments/lexers/c_like/d.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.d + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for D languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, words +from pygments.token import Text, Comment, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['DLexer'] + + +class DLexer(RegexLexer): + """ + For D source. + + .. versionadded:: 1.2 + """ + name = 'D' + filenames = ['*.d', '*.di'] + aliases = ['d'] + mimetypes = ['text/x-dsrc'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text), + # (r'\\\n', Text), # line continuations + # Comments + (r'//(.*?)\n', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'/\+', Comment.Multiline, 'nested_comment'), + # Keywords + (words(( + 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body', + 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue', + 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else', + 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse', + 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import', + 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin', + 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma', + 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope', + 'shared', 'static', 'struct', 'super', 'switch', 'synchronized', + 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof', + 'union', 'unittest', 'version', 'volatile', 'while', 'with', + '__gshared', '__traits', '__vector', '__parameters'), + suffix=r'\b'), + Keyword), + (words(( + 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal', + 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal', + 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong', + 'ushort', 'void', 'wchar'), suffix=r'\b'), + Keyword.Type), + (r'(false|true|null)\b', Keyword.Constant), + (words(( + '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__' + '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__', + '__VERSION__'), suffix=r'\b'), + Keyword.Pseudo), + (r'macro\b', Keyword.Reserved), + (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin), + # FloatLiteral + # -- HexFloat + (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)' + r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float), + # -- DecimalFloat + (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|' + r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float), + (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float), + # IntegerLiteral + # -- Binary + (r'0[Bb][01_]+', Number.Bin), + # -- Octal + (r'0[0-7_]+', Number.Oct), + # -- Hexadecimal + (r'0[xX][0-9a-fA-F_]+', Number.Hex), + # -- Decimal + (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer), + # CharacterLiteral + (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" + r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""", + String.Char), + # StringLiteral + # -- WysiwygString + (r'r"[^"]*"[cwd]?', String), + # -- AlternateWysiwygString + (r'`[^`]*`[cwd]?', String), + # -- DoubleQuotedString + (r'"(\\\\|\\"|[^"])*"[cwd]?', String), + # -- EscapeSequence + (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}" + r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)", + String), + # -- HexString + (r'x"[0-9a-fA-F_\s]*"[cwd]?', String), + # -- DelimitedString + (r'q"\[', String, 'delimited_bracket'), + (r'q"\(', String, 'delimited_parenthesis'), + (r'q"<', String, 'delimited_angle'), + (r'q"{', String, 'delimited_curly'), + (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String), + (r'q"(.).*?\1"', String), + # -- TokenString + (r'q{', String, 'token_string'), + # Attributes + (r'@([a-zA-Z_]\w*)?', Name.Decorator), + # Tokens + (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>=' + r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)' + r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation), + # Identifier + (r'[a-zA-Z_]\w*', Name), + # Line + (r'#line\s.*\n', Comment.Special), + ], + 'nested_comment': [ + (r'[^+/]+', Comment.Multiline), + (r'/\+', Comment.Multiline, '#push'), + (r'\+/', Comment.Multiline, '#pop'), + (r'[+/]', Comment.Multiline), + ], + 'token_string': [ + (r'{', Punctuation, 'token_string_nest'), + (r'}', String, '#pop'), + include('root'), + ], + 'token_string_nest': [ + (r'{', Punctuation, '#push'), + (r'}', Punctuation, '#pop'), + include('root'), + ], + 'delimited_bracket': [ + (r'[^\[\]]+', String), + (r'\[', String, 'delimited_inside_bracket'), + (r'\]"', String, '#pop'), + ], + 'delimited_inside_bracket': [ + (r'[^\[\]]+', String), + (r'\[', String, '#push'), + (r'\]', String, '#pop'), + ], + 'delimited_parenthesis': [ + (r'[^\(\)]+', String), + (r'\(', String, 'delimited_inside_parenthesis'), + (r'\)"', String, '#pop'), + ], + 'delimited_inside_parenthesis': [ + (r'[^\(\)]+', String), + (r'\(', String, '#push'), + (r'\)', String, '#pop'), + ], + 'delimited_angle': [ + (r'[^<>]+', String), + (r'<', String, 'delimited_inside_angle'), + (r'>"', String, '#pop'), + ], + 'delimited_inside_angle': [ + (r'[^<>]+', String), + (r'<', String, '#push'), + (r'>', String, '#pop'), + ], + 'delimited_curly': [ + (r'[^{}]+', String), + (r'{', String, 'delimited_inside_curly'), + (r'}"', String, '#pop'), + ], + 'delimited_inside_curly': [ + (r'[^{}]+', String), + (r'{', String, '#push'), + (r'}', String, '#pop'), + ], + } diff --git a/pygments/lexers/c_like/go.py b/pygments/lexers/c_like/go.py new file mode 100644 index 00000000..04e2dcab --- /dev/null +++ b/pygments/lexers/c_like/go.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.go + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Go languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['GoLexer'] + + +class GoLexer(RegexLexer): + """ + For `Go `_ source. + + .. versionadded:: 1.2 + """ + name = 'Go' + filenames = ['*.go'] + aliases = ['go'] + mimetypes = ['text/x-gosrc'] + + flags = re.MULTILINE | re.UNICODE + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuations + (r'//(.*?)\n', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'(import|package)\b', Keyword.Namespace), + (r'(var|func|struct|map|chan|type|interface|const)\b', + Keyword.Declaration), + (words(( + 'break', 'default', 'select', 'case', 'defer', 'go', + 'else', 'goto', 'switch', 'fallthrough', 'if', 'range', + 'continue', 'for', 'return'), suffix=r'\b'), + Keyword), + (r'(true|false|iota|nil)\b', Keyword.Constant), + # It seems the builtin types aren't actually keywords, but + # can be used as functions. So we need two declarations. + (words(( + 'uint', 'uint8', 'uint16', 'uint32', 'uint64', + 'int', 'int8', 'int16', 'int32', 'int64', + 'float', 'float32', 'float64', + 'complex64', 'complex128', 'byte', 'rune', + 'string', 'bool', 'error', 'uintptr', + 'print', 'println', 'panic', 'recover', 'close', 'complex', + 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete', + 'new', 'make'), suffix=r'\b(\()'), + bygroups(Name.Builtin, Punctuation)), + (words(( + 'uint', 'uint8', 'uint16', 'uint32', 'uint64', + 'int', 'int8', 'int16', 'int32', 'int64', + 'float', 'float32', 'float64', + 'complex64', 'complex128', 'byte', 'rune', + 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'), + Keyword.Type), + # imaginary_lit + (r'\d+i', Number), + (r'\d+\.\d*([Ee][-+]\d+)?i', Number), + (r'\.\d+([Ee][-+]\d+)?i', Number), + (r'\d+[Ee][-+]\d+i', Number), + # float_lit + (r'\d+(\.\d+[eE][+\-]?\d+|' + r'\.\d*|[eE][+\-]?\d+)', Number.Float), + (r'\.\d+([eE][+\-]?\d+)?', Number.Float), + # int_lit + # -- octal_lit + (r'0[0-7]+', Number.Oct), + # -- hex_lit + (r'0[xX][0-9a-fA-F]+', Number.Hex), + # -- decimal_lit + (r'(0|[1-9][0-9]*)', Number.Integer), + # char_lit + (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" + r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""", + String.Char), + # StringLiteral + # -- raw_string_lit + (r'`[^`]*`', String), + # -- interpreted_string_lit + (r'"(\\\\|\\"|[^"])*"', String), + # Tokens + (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|' + r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator), + (r'[|^<>=!()\[\]{}.,;:]', Punctuation), + # identifier + (r'[^\W\d]\w*', Name.Other), + ] + } diff --git a/pygments/lexers/c_like/objective.py b/pygments/lexers/c_like/objective.py new file mode 100644 index 00000000..6f34bc06 --- /dev/null +++ b/pygments/lexers/c_like/objective.py @@ -0,0 +1,322 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.objective + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Objective-C family languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import include, bygroups, using, this, words, inherit +from pygments.token import Text, Keyword, Name, String, Operator, \ + Number, Punctuation, Literal + +from pygments.lexers.c_like.c_cpp import CLexer, CppLexer + +__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer'] + + +def objective(baselexer): + """ + Generate a subclass of baselexer that accepts the Objective-C syntax + extensions. + """ + + # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here, + # since that's quite common in ordinary C/C++ files. It's OK to match + # JavaDoc/Doxygen keywords that only apply to Objective-C, mind. + # + # The upshot of this is that we CANNOT match @class or @interface + _oc_keywords = re.compile(r'@(?:end|implementation|protocol)') + + # Matches [ ? identifier ( identifier ? ] | identifier? : ) + # (note the identifier is *optional* when there is a ':'!) + _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+' + r'(?:[a-zA-Z_]\w*\s*\]|' + r'(?:[a-zA-Z_]\w*)?:)') + + class GeneratedObjectiveCVariant(baselexer): + """ + Implements Objective-C syntax on top of an existing C family lexer. + """ + + tokens = { + 'statements': [ + (r'@"', String, 'string'), + (r'@(YES|NO)', Number), + (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex), + (r'@0[0-7]+[Ll]?', Number.Oct), + (r'@\d+[Ll]?', Number.Integer), + (r'@\(', Literal, 'literal_number'), + (r'@\[', Literal, 'literal_array'), + (r'@\{', Literal, 'literal_dictionary'), + (words(( + '@selector', '@private', '@protected', '@public', '@encode', + '@synchronized', '@try', '@throw', '@catch', '@finally', + '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer', + '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong', + 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic', + 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in', + 'out', 'inout', 'release', 'class', '@dynamic', '@optional', + '@required', '@autoreleasepool'), suffix=r'\b'), + Keyword), + (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL', + 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'), + Keyword.Type), + (r'@(true|false|YES|NO)\n', Name.Builtin), + (r'(YES|NO|nil|self|super)\b', Name.Builtin), + # Carbon types + (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type), + # Carbon built-ins + (r'(TRUE|FALSE)\b', Name.Builtin), + (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text), + ('#pop', 'oc_classname')), + (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text), + ('#pop', 'oc_forward_classname')), + # @ can also prefix other expressions like @{...} or @(...) + (r'@', Punctuation), + inherit, + ], + 'oc_classname': [ + # interface definition that inherits + ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)({)', + bygroups(Name.Class, Text, Name.Class, Text, Punctuation), + ('#pop', 'oc_ivars')), + ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', + bygroups(Name.Class, Text, Name.Class), '#pop'), + # interface definition for a category + ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)({)', + bygroups(Name.Class, Text, Name.Label, Text, Punctuation), + ('#pop', 'oc_ivars')), + ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))', + bygroups(Name.Class, Text, Name.Label), '#pop'), + # simple interface / implementation + ('([a-zA-Z$_][\w$]*)(\s*)({)', + bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), + ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') + ], + 'oc_forward_classname': [ + ('([a-zA-Z$_][\w$]*)(\s*,\s*)', + bygroups(Name.Class, Text), 'oc_forward_classname'), + ('([a-zA-Z$_][\w$]*)(\s*;?)', + bygroups(Name.Class, Text), '#pop') + ], + 'oc_ivars': [ + include('whitespace'), + include('statements'), + (';', Punctuation), + ('{', Punctuation, '#push'), + ('}', Punctuation, '#pop'), + ], + 'root': [ + # methods + (r'^([-+])(\s*)' # method marker + r'(\(.*?\))?(\s*)' # return type + r'([a-zA-Z$_][\w$]*:?)', # begin of method name + bygroups(Punctuation, Text, using(this), + Text, Name.Function), + 'method'), + inherit, + ], + 'method': [ + include('whitespace'), + # TODO unsure if ellipses are allowed elsewhere, see + # discussion in Issue 789 + (r',', Punctuation), + (r'\.\.\.', Punctuation), + (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)', + bygroups(using(this), Text, Name.Variable)), + (r'[a-zA-Z$_][\w$]*:', Name.Function), + (';', Punctuation, '#pop'), + ('{', Punctuation, 'function'), + ('', Text, '#pop'), + ], + 'literal_number': [ + (r'\(', Punctuation, 'literal_number_inner'), + (r'\)', Literal, '#pop'), + include('statement'), + ], + 'literal_number_inner': [ + (r'\(', Punctuation, '#push'), + (r'\)', Punctuation, '#pop'), + include('statement'), + ], + 'literal_array': [ + (r'\[', Punctuation, 'literal_array_inner'), + (r'\]', Literal, '#pop'), + include('statement'), + ], + 'literal_array_inner': [ + (r'\[', Punctuation, '#push'), + (r'\]', Punctuation, '#pop'), + include('statement'), + ], + 'literal_dictionary': [ + (r'\}', Literal, '#pop'), + include('statement'), + ], + } + + def analyse_text(text): + if _oc_keywords.search(text): + return 1.0 + elif '@"' in text: # strings + return 0.8 + elif re.search('@[0-9]+', text): + return 0.7 + elif _oc_message.search(text): + return 0.8 + return 0 + + def get_tokens_unprocessed(self, text): + from pygments.lexers._cocoabuiltins import COCOA_INTERFACES, \ + COCOA_PROTOCOLS, COCOA_PRIMITIVES + + for index, token, value in \ + baselexer.get_tokens_unprocessed(self, text): + if token is Name or token is Name.Class: + if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \ + or value in COCOA_PRIMITIVES: + token = Name.Builtin.Pseudo + + yield index, token, value + + return GeneratedObjectiveCVariant + + +class ObjectiveCLexer(objective(CLexer)): + """ + For Objective-C source code with preprocessor directives. + """ + + name = 'Objective-C' + aliases = ['objective-c', 'objectivec', 'obj-c', 'objc'] + filenames = ['*.m', '*.h'] + mimetypes = ['text/x-objective-c'] + priority = 0.05 # Lower than C + + +class ObjectiveCppLexer(objective(CppLexer)): + """ + For Objective-C++ source code with preprocessor directives. + """ + + name = 'Objective-C++' + aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++'] + filenames = ['*.mm', '*.hh'] + mimetypes = ['text/x-objective-c++'] + priority = 0.05 # Lower than C++ + + +class LogosLexer(ObjectiveCppLexer): + """ + For Logos + Objective-C source code with preprocessor directives. + + .. versionadded:: 1.6 + """ + + name = 'Logos' + aliases = ['logos'] + filenames = ['*.x', '*.xi', '*.xm', '*.xmi'] + mimetypes = ['text/x-logos'] + priority = 0.25 + + tokens = { + 'statements': [ + (r'(%orig|%log)\b', Keyword), + (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))', + bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)), + (r'(%init)\b(\()', + bygroups(Keyword, Punctuation), 'logos_init_directive'), + (r'(%init)(?=\s*;)', bygroups(Keyword)), + (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', + bygroups(Keyword, Text, Name.Class), '#pop'), + (r'(%subclass)(\s+)', bygroups(Keyword, Text), + ('#pop', 'logos_classname')), + inherit, + ], + 'logos_init_directive': [ + ('\s+', Text), + (',', Punctuation, ('logos_init_directive', '#pop')), + ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)', + bygroups(Name.Class, Text, Punctuation, Text, Text)), + ('([a-zA-Z$_][\w$]*)', Name.Class), + ('\)', Punctuation, '#pop'), + ], + 'logos_classname': [ + ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', + bygroups(Name.Class, Text, Name.Class), '#pop'), + ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') + ], + 'root': [ + (r'(%subclass)(\s+)', bygroups(Keyword, Text), + 'logos_classname'), + (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', + bygroups(Keyword, Text, Name.Class)), + (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)', + bygroups(Keyword, Text, Name.Variable, Text, String, Text)), + (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation), + 'function'), + (r'(%new)(\s*)(\()(\s*.*?\s*)(\))', + bygroups(Keyword, Text, Keyword, String, Keyword)), + (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)), + inherit, + ], + } + + _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()') + + def analyse_text(text): + if LogosLexer._logos_keywords.search(text): + return 1.0 + return 0 + + +class SwiftLexer(ObjectiveCLexer): + """ + For `Swift `_ source. + + .. versionadded:: 2.0 + """ + name = 'Swift' + filenames = ['*.swift'] + aliases = ['swift'] + mimetypes = ['text/x-swift'] + + keywords_decl = set(('class', 'deinit', 'enum', 'extension', 'func', 'import', + 'init', 'let', 'protocol', 'static', 'struct', 'subscript', + 'typealias', 'var')) + keywords_stmt = set(('break', 'case', 'continue', 'default', 'do', 'else', + 'fallthrough', 'if', 'in', 'for', 'return', 'switch', + 'where', 'while')) + keywords_type = set(('as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self', + 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__', + '__LINE__')) + keywords_resrv = set(('associativity', 'didSet', 'get', 'infix', 'inout', 'left', + 'mutating', 'none', 'nonmutating', 'operator', 'override', + 'postfix', 'precedence', 'prefix', 'right', 'set', + 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak', + 'willSet')) + operators = set(('->',)) + + def get_tokens_unprocessed(self, text): + for index, token, value in ObjectiveCLexer.get_tokens_unprocessed(self, text): + if token is Name: + if value in self.keywords_decl: + token = Keyword + elif value in self.keywords_stmt: + token = Keyword + elif value in self.keywords_type: + token = Keyword.Type + elif value in self.keywords_resrv: + token = Keyword.Reserved + elif value in self.operators: + token = Operator + yield index, token, value diff --git a/pygments/lexers/c_like/other.py b/pygments/lexers/c_like/other.py new file mode 100644 index 00000000..d9fb6f88 --- /dev/null +++ b/pygments/lexers/c_like/other.py @@ -0,0 +1,839 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.other + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for other C-like languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, inherit, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +from pygments.lexers.c_like.c_cpp import CLexer, CppLexer + +__all__ = ['PikeLexer', 'NesCLexer', 'ClayLexer', 'ECLexer', 'ValaLexer', + 'CudaLexer', 'SwigLexer', 'MqlLexer'] + + +class PikeLexer(CppLexer): + """ + For `Pike `_ source code. + + .. versionadded:: 2.0 + """ + name = 'Pike' + aliases = ['pike'] + filenames = ['*.pike', '*.pmod'] + mimetypes = ['text/x-pike'] + + tokens = { + 'statements': [ + (words(( + 'catch', 'new', 'private', 'protected', 'public', 'gauge', + 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from', + 'this', 'super', 'new', 'constant', 'final', 'static', 'import', 'use', 'extern', + 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for', + 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null', + '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__', + '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__', + '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__', + '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'), + Keyword), + (r'(bool|int|long|float|short|double|char|string|object|void|mapping|' + r'array|multiset|program|function|lambda|mixed|' + r'[a-z_][a-z0-9_]*_t)\b', + Keyword.Type), + (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), + (r'[~!%^&*+=|?:<>/-@]', Operator), + inherit, + ], + 'classname': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + # template specification + (r'\s*(?=>)', Text, '#pop'), + ], + } + + +class NesCLexer(CLexer): + """ + For `nesC `_ source code with preprocessor + directives. + + .. versionadded:: 2.0 + """ + name = 'nesC' + aliases = ['nesc'] + filenames = ['*.nc'] + mimetypes = ['text/x-nescsrc'] + + tokens = { + 'statements': [ + (words(( + 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component', + 'components', 'configuration', 'event', 'extends', 'generic', + 'implementation', 'includes', 'interface', 'module', 'new', 'norace', + 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'), + Keyword), + (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t', + 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t', + 'nx_uint64_t'), suffix=r'\b'), + Keyword.Type), + inherit, + ], + } + + +class ClayLexer(RegexLexer): + """ + For `Clay `_ source. + + .. versionadded:: 2.0 + """ + name = 'Clay' + filenames = ['*.clay'] + aliases = ['clay'] + mimetypes = ['text/x-clay'] + tokens = { + 'root': [ + (r'\s', Text), + (r'//.*?$', Comment.Singleline), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\b(public|private|import|as|record|variant|instance' + r'|define|overload|default|external|alias' + r'|rvalue|ref|forward|inline|noinline|forceinline' + r'|enum|var|and|or|not|if|else|goto|return|while' + r'|switch|case|break|continue|for|in|true|false|try|catch|throw' + r'|finally|onerror|staticassert|eval|when|newtype' + r'|__FILE__|__LINE__|__COLUMN__|__ARG__' + r')\b', Keyword), + (r'[~!%^&*+=|:<>/-]', Operator), + (r'[#(){}\[\],;.]', Punctuation), + (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'\d+[LlUu]*', Number.Integer), + (r'\b(true|false)\b', Name.Builtin), + (r'(?i)[a-z_?][a-z_?0-9]*', Name), + (r'"""', String, 'tdqs'), + (r'"', String, 'dqs'), + ], + 'strings': [ + (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape), + (r'.', String), + ], + 'nl': [ + (r'\n', String), + ], + 'dqs': [ + (r'"', String, '#pop'), + include('strings'), + ], + 'tdqs': [ + (r'"""', String, '#pop'), + include('strings'), + include('nl'), + ], + } + + +class ECLexer(CLexer): + """ + For eC source code with preprocessor directives. + + .. versionadded:: 1.5 + """ + name = 'eC' + aliases = ['ec'] + filenames = ['*.ec', '*.eh'] + mimetypes = ['text/x-echdr', 'text/x-ecsrc'] + + tokens = { + 'statements': [ + (words(( + 'virtual', 'class', 'private', 'public', 'property', 'import', + 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get', + 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass', + '__on_register_module', 'namespace', 'using', 'typed_object', + 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers', + 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset', + 'class_default_property', 'property_category', 'class_data', + 'class_property', 'virtual', 'thisclass', 'dbtable', 'dbindex', + 'database_open', 'dbfield'), suffix=r'\b'), Keyword), + (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte', + 'unichar', 'int64'), suffix=r'\b'), + Keyword.Type), + (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), + (r'(null|value|this)\b', Name.Builtin), + inherit, + ], + 'classname': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + # template specification + (r'\s*(?=>)', Text, '#pop'), + ], + } + + +class ValaLexer(RegexLexer): + """ + For Vala source code with preprocessor directives. + + .. versionadded:: 1.1 + """ + name = 'Vala' + aliases = ['vala', 'vapi'] + filenames = ['*.vala', '*.vapi'] + mimetypes = ['text/x-vala'] + + tokens = { + 'whitespace': [ + (r'^\s*#if\s+0', Comment.Preproc, 'if0'), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + ], + 'statements': [ + (r'L?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", + String.Char), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), + (r'0[0-7]+[Ll]?', Number.Oct), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])', + bygroups(Punctuation, Name.Decorator, Punctuation)), + # TODO: "correctly" parse complex code attributes + (r'(\[)(CCode|(?:Integer|Floating)Type)', + bygroups(Punctuation, Name.Decorator)), + (r'[()\[\],.]', Punctuation), + (words(( + 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue', + 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for', + 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params', + 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try', + 'typeof', 'while', 'yield'), suffix=r'\b'), + Keyword), + (words(( + 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern', + 'inline', 'internal', 'override', 'owned', 'private', 'protected', + 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned', + 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'), + Keyword.Declaration), + (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text), + 'namespace'), + (r'(class|errordomain|interface|struct)(\s+)', + bygroups(Keyword.Declaration, Text), 'class'), + (r'(\.)([a-zA-Z_]\w*)', + bygroups(Operator, Name.Attribute)), + # void is an actual keyword, others are in glib-2.0.vapi + (words(( + 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16', + 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string', + 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', + 'ulong', 'unichar', 'ushort'), suffix=r'\b'), + Keyword.Type), + (r'(true|false|null)\b', Name.Builtin), + ('[a-zA-Z_]\w*', Name), + ], + 'root': [ + include('whitespace'), + ('', Text, 'statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + ('[{}]', Punctuation), + (';', Punctuation, '#pop'), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'if0': [ + (r'^\s*#if.*?(?`_ + source. + + .. versionadded:: 1.6 + """ + name = 'CUDA' + filenames = ['*.cu', '*.cuh'] + aliases = ['cuda', 'cu'] + mimetypes = ['text/x-cuda'] + + function_qualifiers = set(('__device__', '__global__', '__host__', + '__noinline__', '__forceinline__')) + variable_qualifiers = set(('__device__', '__constant__', '__shared__', + '__restrict__')) + vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', + 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', + 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', + 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', + 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', + 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', + 'ulonglong2', 'float1', 'float2', 'float3', 'float4', + 'double1', 'double2', 'dim3')) + variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize')) + functions = set(('__threadfence_block', '__threadfence', '__threadfence_system', + '__syncthreads', '__syncthreads_count', '__syncthreads_and', + '__syncthreads_or')) + execution_confs = set(('<<<', '>>>')) + + def get_tokens_unprocessed(self, text): + for index, token, value in CLexer.get_tokens_unprocessed(self, text): + if token is Name: + if value in self.variable_qualifiers: + token = Keyword.Type + elif value in self.vector_types: + token = Keyword.Type + elif value in self.variables: + token = Name.Builtin + elif value in self.execution_confs: + token = Keyword.Pseudo + elif value in self.function_qualifiers: + token = Keyword.Reserved + elif value in self.functions: + token = Name.Function + yield index, token, value + + +class SwigLexer(CppLexer): + """ + For `SWIG `_ source code. + + .. versionadded:: 2.0 + """ + name = 'SWIG' + aliases = ['swig'] + filenames = ['*.swg', '*.i'] + mimetypes = ['text/swig'] + priority = 0.04 # Lower than C/C++ and Objective C/C++ + + tokens = { + 'statements': [ + # SWIG directives + (r'(%[a-z_][a-z0-9_]*)', Name.Function), + # Special variables + ('\$\**\&?\w+', Name), + # Stringification / additional preprocessor directives + (r'##*[a-zA-Z_]\w*', Comment.Preproc), + inherit, + ], + } + + # This is a far from complete set of SWIG directives + swig_directives = set(( + # Most common directives + '%apply', '%define', '%director', '%enddef', '%exception', '%extend', + '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include', + '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma', + '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap', + # Less common directives + '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear', + '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum', + '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor', + '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor', + '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments', + '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv', + '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception', + '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar', + '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend', + '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall', + '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof', + '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', + '%warnfilter')) + + def analyse_text(text): + rv = 0 + # Search for SWIG directives, which are conventionally at the beginning of + # a line. The probability of them being within a line is low, so let another + # lexer win in this case. + matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M) + for m in matches: + if m in SwigLexer.swig_directives: + rv = 0.98 + break + else: + rv = 0.91 # Fraction higher than MatlabLexer + return rv + + +class MqlLexer(CppLexer): + """ + For `MQL4 `_ and + `MQL5 `_ source code. + + .. versionadded:: 2.0 + """ + name = 'MQL' + aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5'] + filenames = ['*.mq4', '*.mq5', '*.mqh'] + mimetypes = ['text/x-mql'] + + tokens = { + 'statements': [ + (words(( + 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed', + '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid', + 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time', + 'Volume'), suffix=r'\b'), + Keyword), + (words(( + 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint', + 'color', 'long', 'ulong', 'datetime', 'float', 'double', + 'string'), suffix=r'\b'), + Keyword.Type), + (words(( + 'Alert', 'CheckPointer', 'Comment', 'DebugBreak', 'ExpertRemove', + 'GetPointer', 'GetTickCount', 'MessageBox', 'PeriodSeconds', 'PlaySound', + 'Print', 'PrintFormat', 'ResetLastError', 'ResourceCreate', 'ResourceFree', + 'ResourceReadImage', 'ResourceSave', 'SendFTP', 'SendMail', 'SendNotification', + 'Sleep', 'TerminalClose', 'TesterStatistics', 'ZeroMemory', + 'ArrayBsearch', 'ArrayCopy', 'ArrayCompare', 'ArrayFree', 'ArrayGetAsSeries', + 'ArrayInitialize', 'ArrayFill', 'ArrayIsSeries', 'ArrayIsDynamic', + 'ArrayMaximum', 'ArrayMinimum', 'ArrayRange', 'ArrayResize', + 'ArraySetAsSeries', 'ArraySize', 'ArraySort', 'ArrayCopyRates', + 'ArrayCopySeries', 'ArrayDimension', + 'CharToString', 'DoubleToString', 'EnumToString', 'NormalizeDouble', + 'StringToDouble', 'StringToInteger', 'StringToTime', 'TimeToString', + 'IntegerToString', 'ShortToString', 'ShortArrayToString', + 'StringToShortArray', 'CharArrayToString', 'StringToCharArray', + 'ColorToARGB', 'ColorToString', 'StringToColor', 'StringFormat', + 'CharToStr', 'DoubleToStr', 'StrToDouble', 'StrToInteger', 'StrToTime', 'TimeToStr', + 'MathAbs', 'MathArccos', 'MathArcsin', 'MathArctan', 'MathCeil', 'MathCos', 'MathExp', + 'MathFloor', 'MathLog', 'MathMax', 'MathMin', 'MathMod', 'MathPow', 'MathRand', + 'MathRound', 'MathSin', 'MathSqrt', 'MathSrand', 'MathTan', 'MathIsValidNumber', + 'StringAdd', 'StringBufferLen', 'StringCompare', 'StringConcatenate', 'StringFill', + 'StringFind', 'StringGetCharacter', 'StringInit', 'StringLen', 'StringReplace', + 'StringSetCharacter', 'StringSplit', 'StringSubstr', 'StringToLower', 'StringToUpper', + 'StringTrimLeft', 'StringTrimRight', 'StringGetChar', 'StringSetChar', + 'TimeCurrent', 'TimeTradeServer', 'TimeLocal', 'TimeGMT', 'TimeDaylightSavings', + 'TimeGMTOffset', 'TimeToStruct', 'StructToTime', 'Day', 'DayOfWeek', 'DayOfYear', + 'Hour', 'Minute', 'Month', 'Seconds', 'TimeDay', 'TimeDayOfWeek', 'TimeDayOfYear', 'TimeHour', + 'TimeMinute', 'TimeMonth', 'TimeSeconds', 'TimeYear', 'Year', + 'AccountInfoDouble', 'AccountInfoInteger', 'AccountInfoString', 'AccountBalance', + 'AccountCredit', 'AccountCompany', 'AccountCurrency', 'AccountEquity', + 'AccountFreeMargin', 'AccountFreeMarginCheck', 'AccountFreeMarginMode', + 'AccountLeverage', 'AccountMargin', 'AccountName', 'AccountNumber', 'AccountProfit', + 'AccountServer', 'AccountStopoutLevel', 'AccountStopoutMode', + 'GetLastError', 'IsStopped', 'UninitializeReason', 'MQLInfoInteger', 'MQLInfoString', + 'Symbol', 'Period', 'Digits', 'Point', 'IsConnected', 'IsDemo', 'IsDllsAllowed', + 'IsExpertEnabled', 'IsLibrariesAllowed', 'IsOptimization', 'IsTesting', + 'IsTradeAllowed', + 'IsTradeContextBusy', 'IsVisualMode', 'TerminalCompany', 'TerminalName', + 'TerminalPath', + 'SymbolsTotal', 'SymbolName', 'SymbolSelect', 'SymbolIsSynchronized', + 'SymbolInfoDouble', + 'SymbolInfoInteger', 'SymbolInfoString', 'SymbolInfoTick', + 'SymbolInfoSessionQuote', + 'SymbolInfoSessionTrade', 'MarketInfo', + 'SeriesInfoInteger', 'CopyRates', 'CopyTime', 'CopyOpen', + 'CopyHigh', 'CopyLow', 'CopyClose', + 'CopyTickVolume', 'CopyRealVolume', 'CopySpread', 'iBars', 'iBarShift', 'iClose', + 'iHigh', 'iHighest', 'iLow', 'iLowest', 'iOpen', 'iTime', 'iVolume', + 'HideTestIndicators', 'Period', 'RefreshRates', 'Symbol', 'WindowBarsPerChart', + 'WindowExpertName', 'WindowFind', 'WindowFirstVisibleBar', 'WindowHandle', + 'WindowIsVisible', 'WindowOnDropped', 'WindowPriceMax', 'WindowPriceMin', + 'WindowPriceOnDropped', 'WindowRedraw', 'WindowScreenShot', + 'WindowTimeOnDropped', 'WindowsTotal', 'WindowXOnDropped', 'WindowYOnDropped', + 'OrderClose', 'OrderCloseBy', 'OrderClosePrice', 'OrderCloseTime', 'OrderComment', + 'OrderCommission', 'OrderDelete', 'OrderExpiration', 'OrderLots', 'OrderMagicNumber', + 'OrderModify', 'OrderOpenPrice', 'OrderOpenTime', 'OrderPrint', 'OrderProfit', + 'OrderSelect', 'OrderSend', 'OrdersHistoryTotal', 'OrderStopLoss', 'OrdersTotal', + 'OrderSwap', 'OrderSymbol', 'OrderTakeProfit', 'OrderTicket', 'OrderType', + 'GlobalVariableCheck', 'GlobalVariableTime', + 'GlobalVariableDel', 'GlobalVariableGet', 'GlobalVariableName', + 'GlobalVariableSet', 'GlobalVariablesFlush', 'GlobalVariableTemp', + 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', + 'GlobalVariablesTotal', 'GlobalVariableCheck', 'GlobalVariableTime', + 'GlobalVariableDel', 'GlobalVariableGet', + 'GlobalVariableName', 'GlobalVariableSet', 'GlobalVariablesFlush', + 'GlobalVariableTemp', 'GlobalVariableSetOnCondition', + 'GlobalVariablesDeleteAll', 'GlobalVariablesTotal', + 'GlobalVariableCheck', 'GlobalVariableTime', 'GlobalVariableDel', + 'GlobalVariableGet', 'GlobalVariableName', 'GlobalVariableSet', + 'GlobalVariablesFlush', 'GlobalVariableTemp', + 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', + 'GlobalVariablesTotal', + 'FileFindFirst', 'FileFindNext', 'FileFindClose', 'FileOpen', 'FileDelete', + 'FileFlush', 'FileGetInteger', 'FileIsEnding', 'FileIsLineEnding', + 'FileClose', 'FileIsExist', 'FileCopy', 'FileMove', 'FileReadArray', + 'FileReadBool', 'FileReadDatetime', 'FileReadDouble', 'FileReadFloat', + 'FileReadInteger', 'FileReadLong', 'FileReadNumber', 'FileReadString', + 'FileReadStruct', 'FileSeek', 'FileSize', 'FileTell', 'FileWrite', + 'FileWriteArray', 'FileWriteDouble', 'FileWriteFloat', 'FileWriteInteger', + 'FileWriteLong', 'FileWriteString', 'FileWriteStruct', 'FolderCreate', + 'FolderDelete', 'FolderClean', 'FileOpenHistory', + 'IndicatorSetDouble', 'IndicatorSetInteger', 'IndicatorSetString', + 'SetIndexBuffer', 'IndicatorBuffers', 'IndicatorCounted', 'IndicatorDigits', + 'IndicatorShortName', 'SetIndexArrow', 'SetIndexDrawBegin', + 'SetIndexEmptyValue', 'SetIndexLabel', 'SetIndexShift', + 'SetIndexStyle', 'SetLevelStyle', 'SetLevelValue', + 'ObjectCreate', 'ObjectName', 'ObjectDelete', 'ObjectsDeleteAll', + 'ObjectFind', 'ObjectGetTimeByValue', 'ObjectGetValueByTime', + 'ObjectMove', 'ObjectsTotal', 'ObjectGetDouble', 'ObjectGetInteger', + 'ObjectGetString', 'ObjectSetDouble', 'ObjectSetInteger', + 'ObjectSetString', 'TextSetFont', 'TextOut', 'TextGetSize', + 'ObjectDescription', 'ObjectGet', 'ObjectGetFiboDescription', + 'ObjectGetShiftByValue', 'ObjectGetValueByShift', 'ObjectSet', + 'ObjectSetFiboDescription', 'ObjectSetText', 'ObjectType', + 'iAC', 'iAD', 'iADX', 'iAlligator', 'iAO', 'iATR', 'iBearsPower', + 'iBands', 'iBandsOnArray', 'iBullsPower', 'iCCI', 'iCCIOnArray', + 'iCustom', 'iDeMarker', 'iEnvelopes', 'iEnvelopesOnArray', + 'iForce', 'iFractals', 'iGator', 'iIchimoku', 'iBWMFI', 'iMomentum', + 'iMomentumOnArray', 'iMFI', 'iMA', 'iMAOnArray', 'iOsMA', 'iMACD', + 'iOBV', 'iSAR', 'iRSI', 'iRSIOnArray', 'iRVI', 'iStdDev', 'iStdDevOnArray', + 'iStochastic', 'iWPR', + 'EventSetMillisecondTimer', 'EventSetTimer', + 'EventKillTimer', 'EventChartCustom'), suffix=r'\b'), + Name.Function), + (words(( + 'CHARTEVENT_KEYDOWN', 'CHARTEVENT_MOUSE_MOVE', + 'CHARTEVENT_OBJECT_CREATE', + 'CHARTEVENT_OBJECT_CHANGE', 'CHARTEVENT_OBJECT_DELETE', + 'CHARTEVENT_CLICK', + 'CHARTEVENT_OBJECT_CLICK', 'CHARTEVENT_OBJECT_DRAG', + 'CHARTEVENT_OBJECT_ENDEDIT', + 'CHARTEVENT_CHART_CHANGE', 'CHARTEVENT_CUSTOM', + 'CHARTEVENT_CUSTOM_LAST', + 'PERIOD_CURRENT', 'PERIOD_M1', 'PERIOD_M2', 'PERIOD_M3', + 'PERIOD_M4', 'PERIOD_M5', + 'PERIOD_M6', 'PERIOD_M10', 'PERIOD_M12', 'PERIOD_M15', + 'PERIOD_M20', 'PERIOD_M30', + 'PERIOD_H1', 'PERIOD_H2', 'PERIOD_H3', 'PERIOD_H4', + 'PERIOD_H6', 'PERIOD_H8', + 'PERIOD_H12', 'PERIOD_D1', 'PERIOD_W1', 'PERIOD_MN1', + 'CHART_IS_OBJECT', 'CHART_BRING_TO_TOP', + 'CHART_MOUSE_SCROLL', 'CHART_EVENT_MOUSE_MOVE', + 'CHART_EVENT_OBJECT_CREATE', + 'CHART_EVENT_OBJECT_DELETE', 'CHART_MODE', 'CHART_FOREGROUND', + 'CHART_SHIFT', + 'CHART_AUTOSCROLL', 'CHART_SCALE', 'CHART_SCALEFIX', + 'CHART_SCALEFIX_11', + 'CHART_SCALE_PT_PER_BAR', 'CHART_SHOW_OHLC', + 'CHART_SHOW_BID_LINE', + 'CHART_SHOW_ASK_LINE', 'CHART_SHOW_LAST_LINE', + 'CHART_SHOW_PERIOD_SEP', + 'CHART_SHOW_GRID', 'CHART_SHOW_VOLUMES', + 'CHART_SHOW_OBJECT_DESCR', + 'CHART_VISIBLE_BARS', 'CHART_WINDOWS_TOTAL', + 'CHART_WINDOW_IS_VISIBLE', + 'CHART_WINDOW_HANDLE', 'CHART_WINDOW_YDISTANCE', + 'CHART_FIRST_VISIBLE_BAR', + 'CHART_WIDTH_IN_BARS', 'CHART_WIDTH_IN_PIXELS', + 'CHART_HEIGHT_IN_PIXELS', + 'CHART_COLOR_BACKGROUND', 'CHART_COLOR_FOREGROUND', + 'CHART_COLOR_GRID', + 'CHART_COLOR_VOLUME', 'CHART_COLOR_CHART_UP', + 'CHART_COLOR_CHART_DOWN', + 'CHART_COLOR_CHART_LINE', 'CHART_COLOR_CANDLE_BULL', + 'CHART_COLOR_CANDLE_BEAR', + 'CHART_COLOR_BID', 'CHART_COLOR_ASK', 'CHART_COLOR_LAST', + 'CHART_COLOR_STOP_LEVEL', + 'CHART_SHOW_TRADE_LEVELS', 'CHART_DRAG_TRADE_LEVELS', + 'CHART_SHOW_DATE_SCALE', + 'CHART_SHOW_PRICE_SCALE', 'CHART_SHIFT_SIZE', + 'CHART_FIXED_POSITION', + 'CHART_FIXED_MAX', 'CHART_FIXED_MIN', 'CHART_POINTS_PER_BAR', + 'CHART_PRICE_MIN', + 'CHART_PRICE_MAX', 'CHART_COMMENT', 'CHART_BEGIN', + 'CHART_CURRENT_POS', 'CHART_END', + 'CHART_BARS', 'CHART_CANDLES', 'CHART_LINE', 'CHART_VOLUME_HIDE', + 'CHART_VOLUME_TICK', 'CHART_VOLUME_REAL', + 'OBJ_VLINE', 'OBJ_HLINE', 'OBJ_TREND', 'OBJ_TRENDBYANGLE', 'OBJ_CYCLES', + 'OBJ_CHANNEL', 'OBJ_STDDEVCHANNEL', 'OBJ_REGRESSION', 'OBJ_PITCHFORK', + 'OBJ_GANNLINE', 'OBJ_GANNFAN', 'OBJ_GANNGRID', 'OBJ_FIBO', + 'OBJ_FIBOTIMES', 'OBJ_FIBOFAN', 'OBJ_FIBOARC', 'OBJ_FIBOCHANNEL', + 'OBJ_EXPANSION', 'OBJ_RECTANGLE', 'OBJ_TRIANGLE', 'OBJ_ELLIPSE', + 'OBJ_ARROW_THUMB_UP', 'OBJ_ARROW_THUMB_DOWN', + 'OBJ_ARROW_UP', 'OBJ_ARROW_DOWN', + 'OBJ_ARROW_STOP', 'OBJ_ARROW_CHECK', 'OBJ_ARROW_LEFT_PRICE', + 'OBJ_ARROW_RIGHT_PRICE', 'OBJ_ARROW_BUY', 'OBJ_ARROW_SELL', + 'OBJ_ARROW', + 'OBJ_TEXT', 'OBJ_LABEL', 'OBJ_BUTTON', 'OBJ_BITMAP', + 'OBJ_BITMAP_LABEL', + 'OBJ_EDIT', 'OBJ_EVENT', 'OBJ_RECTANGLE_LABEL', + 'OBJPROP_TIME1', 'OBJPROP_PRICE1', 'OBJPROP_TIME2', + 'OBJPROP_PRICE2', 'OBJPROP_TIME3', + 'OBJPROP_PRICE3', 'OBJPROP_COLOR', 'OBJPROP_STYLE', + 'OBJPROP_WIDTH', + 'OBJPROP_BACK', 'OBJPROP_RAY', 'OBJPROP_ELLIPSE', + 'OBJPROP_SCALE', + 'OBJPROP_ANGLE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', + 'OBJPROP_DEVIATION', 'OBJPROP_FONTSIZE', 'OBJPROP_CORNER', + 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_FIBOLEVELS', + 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', + 'OBJPROP_FIRSTLEVEL', 'OBJPROP_COLOR', 'OBJPROP_STYLE', 'OBJPROP_WIDTH', + 'OBJPROP_BACK', 'OBJPROP_ZORDER', 'OBJPROP_FILL', 'OBJPROP_HIDDEN', + 'OBJPROP_SELECTED', 'OBJPROP_READONLY', 'OBJPROP_TYPE', 'OBJPROP_TIME', + 'OBJPROP_SELECTABLE', 'OBJPROP_CREATETIME', 'OBJPROP_LEVELS', + 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', + 'OBJPROP_ALIGN', 'OBJPROP_FONTSIZE', 'OBJPROP_RAY_RIGHT', 'OBJPROP_RAY', + 'OBJPROP_ELLIPSE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', 'OBJPROP_ANCHOR', + 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_DRAWLINES', 'OBJPROP_STATE', + 'OBJPROP_CHART_ID', 'OBJPROP_XSIZE', 'OBJPROP_YSIZE', 'OBJPROP_XOFFSET', + 'OBJPROP_YOFFSET', 'OBJPROP_PERIOD', 'OBJPROP_DATE_SCALE', 'OBJPROP_PRICE_SCALE', + 'OBJPROP_CHART_SCALE', 'OBJPROP_BGCOLOR', 'OBJPROP_CORNER', 'OBJPROP_BORDER_TYPE', + 'OBJPROP_BORDER_COLOR', 'OBJPROP_PRICE', 'OBJPROP_LEVELVALUE', 'OBJPROP_SCALE', + 'OBJPROP_ANGLE', 'OBJPROP_DEVIATION', + 'OBJPROP_NAME', 'OBJPROP_TEXT', 'OBJPROP_TOOLTIP', 'OBJPROP_LEVELTEXT', + 'OBJPROP_FONT', 'OBJPROP_BMPFILE', 'OBJPROP_SYMBOL', + 'BORDER_FLAT', 'BORDER_RAISED', 'BORDER_SUNKEN', 'ALIGN_LEFT', 'ALIGN_CENTER', + 'ALIGN_RIGHT', 'ANCHOR_LEFT_UPPER', 'ANCHOR_LEFT', 'ANCHOR_LEFT_LOWER', + 'ANCHOR_LOWER', 'ANCHOR_RIGHT_LOWER', 'ANCHOR_RIGHT', 'ANCHOR_RIGHT_UPPER', + 'ANCHOR_UPPER', 'ANCHOR_CENTER', 'ANCHOR_TOP', 'ANCHOR_BOTTOM', + 'CORNER_LEFT_UPPER', 'CORNER_LEFT_LOWER', 'CORNER_RIGHT_LOWER', + 'CORNER_RIGHT_UPPER', + 'OBJ_NO_PERIODS', 'EMPTY', 'OBJ_PERIOD_M1', 'OBJ_PERIOD_M5', 'OBJ_PERIOD_M15', + 'OBJ_PERIOD_M30', 'OBJ_PERIOD_H1', 'OBJ_PERIOD_H4', 'OBJ_PERIOD_D1', + 'OBJ_PERIOD_W1', 'OBJ_PERIOD_MN1', 'OBJ_ALL_PERIODS', + 'GANN_UP_TREND', 'GANN_DOWN_TREND', + 'SYMBOL_THUMBSUP', 'SYMBOL_THUMBSDOWN', + 'SYMBOL_ARROWUP', 'SYMBOL_ARROWDOWN', + 'SYMBOL_STOPSIGN', 'SYMBOL_CHECKSIGN', + 'SYMBOL_LEFTPRICE', 'SYMBOL_RIGHTPRICE', + 'PRICE_CLOSE', 'PRICE_OPEN', 'PRICE_HIGH', 'PRICE_LOW', + 'PRICE_MEDIAN', 'PRICE_TYPICAL', 'PRICE_WEIGHTED', + 'VOLUME_TICK', 'VOLUME_REAL', + 'STO_LOWHIGH', 'STO_CLOSECLOSE', + 'MODE_OPEN', 'MODE_LOW', 'MODE_HIGH', 'MODE_CLOSE', 'MODE_VOLUME', 'MODE_TIME', + 'MODE_SMA', 'MODE_EMA', 'MODE_SMMA', 'MODE_LWMA', + 'MODE_MAIN', 'MODE_SIGNAL', 'MODE_MAIN', + 'MODE_PLUSDI', 'MODE_MINUSDI', 'MODE_UPPER', + 'MODE_LOWER', 'MODE_GATORJAW', 'MODE_GATORTEETH', + 'MODE_GATORLIPS', 'MODE_TENKANSEN', + 'MODE_KIJUNSEN', 'MODE_SENKOUSPANA', + 'MODE_SENKOUSPANB', 'MODE_CHINKOUSPAN', + 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', + 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_NONE', + 'STYLE_SOLID', 'STYLE_DASH', 'STYLE_DOT', + 'STYLE_DASHDOT', 'STYLE_DASHDOTDOT', + 'DRAW_NONE', 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', + 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_FILLING', + 'INDICATOR_DATA', 'INDICATOR_COLOR_INDEX', + 'INDICATOR_CALCULATIONS', 'INDICATOR_DIGITS', + 'INDICATOR_HEIGHT', 'INDICATOR_LEVELS', + 'INDICATOR_LEVELCOLOR', 'INDICATOR_LEVELSTYLE', + 'INDICATOR_LEVELWIDTH', 'INDICATOR_MINIMUM', + 'INDICATOR_MAXIMUM', 'INDICATOR_LEVELVALUE', + 'INDICATOR_SHORTNAME', 'INDICATOR_LEVELTEXT', + 'TERMINAL_BUILD', 'TERMINAL_CONNECTED', + 'TERMINAL_DLLS_ALLOWED', 'TERMINAL_TRADE_ALLOWED', + 'TERMINAL_EMAIL_ENABLED', + 'TERMINAL_FTP_ENABLED', 'TERMINAL_MAXBARS', + 'TERMINAL_CODEPAGE', 'TERMINAL_CPU_CORES', + 'TERMINAL_DISK_SPACE', 'TERMINAL_MEMORY_PHYSICAL', + 'TERMINAL_MEMORY_TOTAL', + 'TERMINAL_MEMORY_AVAILABLE', 'TERMINAL_MEMORY_USED', + 'TERMINAL_X64', + 'TERMINAL_OPENCL_SUPPORT', 'TERMINAL_LANGUAGE', + 'TERMINAL_COMPANY', 'TERMINAL_NAME', + 'TERMINAL_PATH', 'TERMINAL_DATA_PATH', + 'TERMINAL_COMMONDATA_PATH', + 'MQL_PROGRAM_TYPE', 'MQL_DLLS_ALLOWED', + 'MQL_TRADE_ALLOWED', 'MQL_DEBUG', + 'MQL_PROFILER', 'MQL_TESTER', 'MQL_OPTIMIZATION', + 'MQL_VISUAL_MODE', + 'MQL_FRAME_MODE', 'MQL_LICENSE_TYPE', 'MQL_PROGRAM_NAME', + 'MQL_PROGRAM_PATH', + 'PROGRAM_SCRIPT', 'PROGRAM_EXPERT', + 'PROGRAM_INDICATOR', 'LICENSE_FREE', + 'LICENSE_DEMO', 'LICENSE_FULL', 'LICENSE_TIME', + 'MODE_LOW', 'MODE_HIGH', 'MODE_TIME', 'MODE_BID', + 'MODE_ASK', 'MODE_POINT', + 'MODE_DIGITS', 'MODE_SPREAD', 'MODE_STOPLEVEL', + 'MODE_LOTSIZE', 'MODE_TICKVALUE', + 'MODE_TICKSIZE', 'MODE_SWAPLONG', + 'MODE_SWAPSHORT', 'MODE_STARTING', + 'MODE_EXPIRATION', 'MODE_TRADEALLOWED', + 'MODE_MINLOT', 'MODE_LOTSTEP', 'MODE_MAXLOT', + 'MODE_SWAPTYPE', 'MODE_PROFITCALCMODE', + 'MODE_MARGINCALCMODE', 'MODE_MARGININIT', + 'MODE_MARGINMAINTENANCE', 'MODE_MARGINHEDGED', + 'MODE_MARGINREQUIRED', 'MODE_FREEZELEVEL', + 'SUNDAY', 'MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', + 'FRIDAY', 'SATURDAY', + 'ACCOUNT_LOGIN', 'ACCOUNT_TRADE_MODE', + 'ACCOUNT_LEVERAGE', + 'ACCOUNT_LIMIT_ORDERS', 'ACCOUNT_MARGIN_SO_MODE', + 'ACCOUNT_TRADE_ALLOWED', 'ACCOUNT_TRADE_EXPERT', + 'ACCOUNT_BALANCE', + 'ACCOUNT_CREDIT', 'ACCOUNT_PROFIT', 'ACCOUNT_EQUITY', + 'ACCOUNT_MARGIN', + 'ACCOUNT_FREEMARGIN', 'ACCOUNT_MARGIN_LEVEL', + 'ACCOUNT_MARGIN_SO_CALL', + 'ACCOUNT_MARGIN_SO_SO', 'ACCOUNT_NAME', + 'ACCOUNT_SERVER', 'ACCOUNT_CURRENCY', + 'ACCOUNT_COMPANY', 'ACCOUNT_TRADE_MODE_DEMO', + 'ACCOUNT_TRADE_MODE_CONTEST', + 'ACCOUNT_TRADE_MODE_REAL', 'ACCOUNT_STOPOUT_MODE_PERCENT', + 'ACCOUNT_STOPOUT_MODE_MONEY', + 'STAT_INITIAL_DEPOSIT', 'STAT_WITHDRAWAL', 'STAT_PROFIT', + 'STAT_GROSS_PROFIT', + 'STAT_GROSS_LOSS', 'STAT_MAX_PROFITTRADE', + 'STAT_MAX_LOSSTRADE', 'STAT_CONPROFITMAX', + 'STAT_CONPROFITMAX_TRADES', 'STAT_MAX_CONWINS', + 'STAT_MAX_CONPROFIT_TRADES', + 'STAT_CONLOSSMAX', 'STAT_CONLOSSMAX_TRADES', + 'STAT_MAX_CONLOSSES', + 'STAT_MAX_CONLOSS_TRADES', 'STAT_BALANCEMIN', + 'STAT_BALANCE_DD', + 'STAT_BALANCEDD_PERCENT', 'STAT_BALANCE_DDREL_PERCENT', + 'STAT_BALANCE_DD_RELATIVE', 'STAT_EQUITYMIN', + 'STAT_EQUITY_DD', + 'STAT_EQUITYDD_PERCENT', 'STAT_EQUITY_DDREL_PERCENT', + 'STAT_EQUITY_DD_RELATIVE', 'STAT_EXPECTED_PAYOFF', + 'STAT_PROFIT_FACTOR', + 'STAT_RECOVERY_FACTOR', 'STAT_SHARPE_RATIO', + 'STAT_MIN_MARGINLEVEL', + 'STAT_CUSTOM_ONTESTER', 'STAT_DEALS', 'STAT_TRADES', + 'STAT_PROFIT_TRADES', + 'STAT_LOSS_TRADES', 'STAT_SHORT_TRADES', 'STAT_LONG_TRADES', + 'STAT_PROFIT_SHORTTRADES', 'STAT_PROFIT_LONGTRADES', + 'STAT_PROFITTRADES_AVGCON', 'STAT_LOSSTRADES_AVGCON', + 'SERIES_BARS_COUNT', 'SERIES_FIRSTDATE', 'SERIES_LASTBAR_DATE', + 'SERIES_SERVER_FIRSTDATE', 'SERIES_TERMINAL_FIRSTDATE', + 'SERIES_SYNCHRONIZED', + 'OP_BUY', 'OP_SELL', 'OP_BUYLIMIT', 'OP_SELLLIMIT', + 'OP_BUYSTOP', 'OP_SELLSTOP', + 'TRADE_ACTION_DEAL', 'TRADE_ACTION_PENDING', + 'TRADE_ACTION_SLTP', + 'TRADE_ACTION_MODIFY', 'TRADE_ACTION_REMOVE', + '__DATE__', '__DATETIME__', '__LINE__', '__FILE__', + '__PATH__', '__FUNCTION__', + '__FUNCSIG__', '__MQLBUILD__', '__MQL4BUILD__', + 'M_E', 'M_LOG2E', 'M_LOG10E', 'M_LN2', 'M_LN10', + 'M_PI', 'M_PI_2', 'M_PI_4', 'M_1_PI', + 'M_2_PI', 'M_2_SQRTPI', 'M_SQRT2', 'M_SQRT1_2', + 'CHAR_MIN', 'CHAR_MAX', 'UCHAR_MAX', + 'SHORT_MIN', 'SHORT_MAX', 'USHORT_MAX', + 'INT_MIN', 'INT_MAX', 'UINT_MAX', + 'LONG_MIN', 'LONG_MAX', 'ULONG_MAX', + 'DBL_MIN', 'DBL_MAX', 'DBL_EPSILON', 'DBL_DIG', 'DBL_MANT_DIG', + 'DBL_MAX_10_EXP', 'DBL_MAX_EXP', 'DBL_MIN_10_EXP', 'DBL_MIN_EXP', + 'FLT_MIN', 'FLT_MAX', 'FLT_EPSILON', + 'FLT_DIG', 'FLT_MANT_DIG', 'FLT_MAX_10_EXP', + 'FLT_MAX_EXP', 'FLT_MIN_10_EXP', 'FLT_MIN_EXP', 'REASON_PROGRAM' + 'REASON_REMOVE', 'REASON_RECOMPILE', + 'REASON_CHARTCHANGE', 'REASON_CHARTCLOSE', + 'REASON_PARAMETERS', 'REASON_ACCOUNT', + 'REASON_TEMPLATE', 'REASON_INITFAILED', + 'REASON_CLOSE', 'POINTER_INVALID' + 'POINTER_DYNAMIC', 'POINTER_AUTOMATIC', + 'NULL', 'EMPTY', 'EMPTY_VALUE', 'CLR_NONE', 'WHOLE_ARRAY', + 'CHARTS_MAX', 'clrNONE', 'EMPTY_VALUE', 'INVALID_HANDLE', + 'IS_DEBUG_MODE', 'IS_PROFILE_MODE', 'NULL', 'WHOLE_ARRAY', 'WRONG_VALUE', + 'ERR_NO_ERROR', 'ERR_NO_RESULT', 'ERR_COMMON_ERROR', + 'ERR_INVALID_TRADE_PARAMETERS', + 'ERR_SERVER_BUSY', 'ERR_OLD_VERSION', 'ERR_NO_CONNECTION', + 'ERR_NOT_ENOUGH_RIGHTS', + 'ERR_TOO_FREQUENT_REQUESTS', 'ERR_MALFUNCTIONAL_TRADE', + 'ERR_ACCOUNT_DISABLED', + 'ERR_INVALID_ACCOUNT', 'ERR_TRADE_TIMEOUT', + 'ERR_INVALID_PRICE', 'ERR_INVALID_STOPS', + 'ERR_INVALID_TRADE_VOLUME', 'ERR_MARKET_CLOSED', + 'ERR_TRADE_DISABLED', + 'ERR_NOT_ENOUGH_MONEY', 'ERR_PRICE_CHANGED', + 'ERR_OFF_QUOTES', 'ERR_BROKER_BUSY', + 'ERR_REQUOTE', 'ERR_ORDER_LOCKED', + 'ERR_LONG_POSITIONS_ONLY_ALLOWED', 'ERR_TOO_MANY_REQUESTS', + 'ERR_TRADE_MODIFY_DENIED', 'ERR_TRADE_CONTEXT_BUSY', + 'ERR_TRADE_EXPIRATION_DENIED', + 'ERR_TRADE_TOO_MANY_ORDERS', 'ERR_TRADE_HEDGE_PROHIBITED', + 'ERR_TRADE_PROHIBITED_BY_FIFO', + 'FILE_READ', 'FILE_WRITE', 'FILE_BIN', 'FILE_CSV', 'FILE_TXT', + 'FILE_ANSI', 'FILE_UNICODE', + 'FILE_SHARE_READ', 'FILE_SHARE_WRITE', 'FILE_REWRITE', + 'FILE_COMMON', 'FILE_EXISTS', + 'FILE_CREATE_DATE', 'FILE_MODIFY_DATE', + 'FILE_ACCESS_DATE', 'FILE_SIZE', 'FILE_POSITION', + 'FILE_END', 'FILE_LINE_END', 'FILE_IS_COMMON', + 'FILE_IS_TEXT', 'FILE_IS_BINARY', + 'FILE_IS_CSV', 'FILE_IS_ANSI', 'FILE_IS_READABLE', 'FILE_IS_WRITABLE', + 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'CP_ACP', + 'CP_OEMCP', 'CP_MACCP', 'CP_THREAD_ACP', + 'CP_SYMBOL', 'CP_UTF7', 'CP_UTF8', 'IDOK', 'IDCANCEL', 'IDABORT', + 'IDRETRY', 'IDIGNORE', 'IDYES', 'IDNO', 'IDTRYAGAIN', 'IDCONTINUE', + 'MB_OK', 'MB_OKCANCEL', 'MB_ABORTRETRYIGNORE', 'MB_YESNOCANCEL', + 'MB_YESNO', 'MB_RETRYCANCEL', + 'MB_CANCELTRYCONTINUE', 'MB_ICONSTOP', 'MB_ICONERROR', + 'MB_ICONHAND', 'MB_ICONQUESTION', + 'MB_ICONEXCLAMATION', 'MB_ICONWARNING', + 'MB_ICONINFORMATION', 'MB_ICONASTERISK', + 'MB_DEFBUTTON1', 'MB_DEFBUTTON2', 'MB_DEFBUTTON3', + 'MB_DEFBUTTON4'), suffix=r'\b'), + Name.Constant), + (words(( + 'Black', 'DarkGreen', 'DarkSlateGray', 'Olive', + 'Green', 'Teal', 'Navy', 'Purple', + 'Maroon', 'Indigo', 'MidnightBlue', 'DarkBlue', + 'DarkOliveGreen', 'SaddleBrown', + 'ForestGreen', 'OliveDrab', 'SeaGreen', + 'DarkGoldenrod', 'DarkSlateBlue', + 'Sienna', 'MediumBlue', 'Brown', 'DarkTurquoise', + 'DimGray', 'LightSeaGreen', + 'DarkViolet', 'FireBrick', 'MediumVioletRed', + 'MediumSeaGreen', 'Chocolate', + 'Crimson', 'SteelBlue', 'Goldenrod', 'MediumSpringGreen', + 'LawnGreen', 'CadetBlue', + 'DarkOrchid', 'YellowGreen', 'LimeGreen', 'OrangeRed', + 'DarkOrange', 'Orange', + 'Gold', 'Yellow', 'Chartreuse', 'Lime', 'SpringGreen', + 'Aqua', 'DeepSkyBlue', 'Blue', + 'Magenta', 'Red', 'Gray', 'SlateGray', 'Peru', 'BlueViolet', + 'LightSlateGray', 'DeepPink', + 'MediumTurquoise', 'DodgerBlue', 'Turquoise', 'RoyalBlue', + 'SlateBlue', 'DarkKhaki', + 'IndianRed', 'MediumOrchid', 'GreenYellow', + 'MediumAquamarine', 'DarkSeaGreen', + 'Tomato', 'RosyBrown', 'Orchid', 'MediumPurple', + 'PaleVioletRed', 'Coral', 'CornflowerBlue', + 'DarkGray', 'SandyBrown', 'MediumSlateBlue', + 'Tan', 'DarkSalmon', 'BurlyWood', + 'HotPink', 'Salmon', 'Violet', 'LightCoral', 'SkyBlue', + 'LightSalmon', 'Plum', + 'Khaki', 'LightGreen', 'Aquamarine', 'Silver', + 'LightSkyBlue', 'LightSteelBlue', + 'LightBlue', 'PaleGreen', 'Thistle', 'PowderBlue', + 'PaleGoldenrod', 'PaleTurquoise', + 'LightGray', 'Wheat', 'NavajoWhite', 'Moccasin', + 'LightPink', 'Gainsboro', 'PeachPuff', + 'Pink', 'Bisque', 'LightGoldenrod', 'BlanchedAlmond', + 'LemonChiffon', 'Beige', + 'AntiqueWhite', 'PapayaWhip', 'Cornsilk', + 'LightYellow', 'LightCyan', 'Linen', + 'Lavender', 'MistyRose', 'OldLace', 'WhiteSmoke', + 'Seashell', 'Ivory', 'Honeydew', + 'AliceBlue', 'LavenderBlush', 'MintCream', 'Snow', + 'White'), prefix='(clr)?', suffix=r'\b'), + Name.Constant), + inherit, + ], + } diff --git a/pygments/lexers/c_like/rust.py b/pygments/lexers/c_like/rust.py new file mode 100644 index 00000000..0b658c5f --- /dev/null +++ b/pygments/lexers/c_like/rust.py @@ -0,0 +1,161 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.c_like.rust + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Rust languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['RustLexer'] + + +class RustLexer(RegexLexer): + """ + Lexer for the Rust programming language (version 0.9). + + .. versionadded:: 1.6 + """ + name = 'Rust' + filenames = ['*.rs'] + aliases = ['rust'] + mimetypes = ['text/x-rustsrc'] + + tokens = { + 'root': [ + # Whitespace and Comments + (r'\n', Text), + (r'\s+', Text), + (r'//[/!](.*?)\n', Comment.Doc), + (r'//(.*?)\n', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), + + # Keywords + (words(( + 'as', 'box', 'break', 'continue', 'do', 'else', 'enum', 'extern', + 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv', + 'proc', 'pub', 'ref', 'return', 'static', '\'static', 'struct', + 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'), + Keyword), + (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof', + 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'), + Keyword.Reserved), + (r'(mod|use)\b', Keyword.Namespace), + (r'(true|false)\b', Keyword.Constant), + (r'let\b', Keyword.Declaration), + (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'uint', + 'int', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'), + Keyword.Type), + (r'self\b', Name.Builtin.Pseudo), + # Prelude + (words(( + 'Freeze', 'Pod', 'Send', 'Sized', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Neg', 'Not', 'BitAnd', + 'BitOr', 'BitXor', 'Drop', 'Shl', 'Shr', 'Index', 'Option', 'Some', 'None', 'Result', + 'Ok', 'Err', 'from_str', 'range', 'print', 'println', 'Any', 'AnyOwnExt', 'AnyRefExt', + 'AnyMutRefExt', 'Ascii', 'AsciiCast', 'OnwedAsciiCast', 'AsciiStr', + 'IntoBytes', 'Bool', 'ToCStr', 'Char', 'Clone', 'DeepClone', 'Eq', 'ApproxEq', + 'Ord', 'TotalEq', 'Ordering', 'Less', 'Equal', 'Greater', 'Equiv', 'Container', + 'Mutable', 'Map', 'MutableMap', 'Set', 'MutableSet', 'Default', 'FromStr', + 'Hash', 'FromIterator', 'Extendable', 'Iterator', 'DoubleEndedIterator', + 'RandomAccessIterator', 'CloneableIterator', 'OrdIterator', + 'MutableDoubleEndedIterator', 'ExactSize', 'Times', 'Algebraic', + 'Trigonometric', 'Exponential', 'Hyperbolic', 'Bitwise', 'BitCount', + 'Bounded', 'Integer', 'Fractional', 'Real', 'RealExt', 'Num', 'NumCast', + 'CheckedAdd', 'CheckedSub', 'CheckedMul', 'Orderable', 'Signed', + 'Unsigned', 'Round', 'Primitive', 'Int', 'Float', 'ToStrRadix', + 'ToPrimitive', 'FromPrimitive', 'GenericPath', 'Path', 'PosixPath', + 'WindowsPath', 'RawPtr', 'Buffer', 'Writer', 'Reader', 'Seek', + 'SendStr', 'SendStrOwned', 'SendStrStatic', 'IntoSendStr', 'Str', + 'StrVector', 'StrSlice', 'OwnedStr', 'IterBytes', 'ToStr', 'IntoStr', + 'CopyableTuple', 'ImmutableTuple', 'ImmutableEqVector', 'ImmutableTotalOrdVector', + 'ImmutableCopyableVector', 'OwnedVector', 'OwnedCopyableVector', + 'OwnedEqVector', 'MutableVector', 'MutableTotalOrdVector', + 'Vector', 'VectorVector', 'CopyableVector', 'ImmutableVector', + 'Port', 'Chan', 'SharedChan', 'spawn', 'drop'), suffix=r'\b'), + Name.Builtin), + (r'(ImmutableTuple\d+|Tuple\d+)\b', Name.Builtin), + # Borrowed pointer + (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)), + # Labels + (r'\'[A-Za-z_]\w*:', Name.Label), + # Character Literal + (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" + r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""", + String.Char), + # Lifetime + (r"""'[a-zA-Z_]\w*""", Name.Label), + # Binary Literal + (r'0b[01_]+', Number.Bin, 'number_lit'), + # Octal Literal + (r'0o[0-7_]+', Number.Oct, 'number_lit'), + # Hexadecimal Literal + (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'), + # Decimal Literal + (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|' + r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'), + (r'[0-9][0-9_]*', Number.Integer, 'number_lit'), + # String Literal + (r'"', String, 'string'), + (r'r(#*)".*?"\1', String.Raw), + + # Operators and Punctuation + (r'[{}()\[\],.;]', Punctuation), + (r'[+\-*/%&|<>^!~@=:?]', Operator), + + # Identifier + (r'[a-zA-Z_]\w*', Name), + + # Attributes + (r'#\[', Comment.Preproc, 'attribute['), + # Macros + (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\s*\{', + bygroups(Comment.Preproc, Name), 'macro{'), + (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\(', + bygroups(Comment.Preproc, Name), 'macro('), + ], + 'comment': [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ], + 'number_lit': [ + (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'), + ], + 'string': [ + (r'"', String, '#pop'), + (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" + r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape), + (r'[^\\"]+', String), + (r'\\', String), + ], + 'macro{': [ + (r'\{', Operator, '#push'), + (r'\}', Operator, '#pop'), + ], + 'macro(': [ + (r'\(', Operator, '#push'), + (r'\)', Operator, '#pop'), + ], + 'attribute_common': [ + (r'"', String, 'string'), + (r'\[', Comment.Preproc, 'attribute['), + (r'\(', Comment.Preproc, 'attribute('), + ], + 'attribute[': [ + include('attribute_common'), + (r'\];?', Comment.Preproc, '#pop'), + (r'[^"\]]+', Comment.Preproc), + ], + 'attribute(': [ + include('attribute_common'), + (r'\);?', Comment.Preproc, '#pop'), + (r'[^"\)]+', Comment.Preproc), + ], + } diff --git a/pygments/lexers/cobol.py b/pygments/lexers/cobol.py new file mode 100644 index 00000000..55c8e959 --- /dev/null +++ b/pygments/lexers/cobol.py @@ -0,0 +1,231 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.cobol + ~~~~~~~~~~~~~~~~~~~~~ + + Lexers for COBOL languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['CobolLexer', 'CobolFreeformatLexer'] + + +class CobolLexer(RegexLexer): + """ + Lexer for OpenCOBOL code. + + .. versionadded:: 1.6 + """ + name = 'COBOL' + aliases = ['cobol'] + filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY'] + mimetypes = ['text/x-cobol'] + flags = re.IGNORECASE | re.MULTILINE + + # Data Types: by PICTURE and USAGE + # Operators: **, *, +, -, /, <, >, <=, >=, =, <> + # Logical (?): NOT, AND, OR + + # Reserved words: + # http://opencobol.add1tocobol.com/#reserved-words + # Intrinsics: + # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions + + tokens = { + 'root': [ + include('comment'), + include('strings'), + include('core'), + include('nums'), + (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable), + # (r'[\s]+', Text), + (r'[ \t]+', Text), + ], + 'comment': [ + (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment), + ], + 'core': [ + # Figurative constants + (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?' + r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)' + r'\s*($|(?=[^0-9a-z_\-]))', + Name.Constant), + + # Reserved words STATEMENTS and other bolds + (r'(^|(?<=[^0-9a-z_\-]))' + r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|' + r'CONFIGURATION|CONTINUE|' + r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|' + r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|' + r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|' + r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|' + r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|' + r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|' + r'FREE|GENERATE|GO|GOBACK|' + r'IDENTIFICATION|IF|INITIALIZE|' + r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|' + r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|' + r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|' + r'RETURN|REWRITE|SCREEN|' + r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|' + r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|' + r'WORKING-STORAGE|WRITE)' + r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved), + + # Reserved words + (r'(^|(?<=[^0-9a-z_\-]))' + r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|' + r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|' + r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE' + r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|' + r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|' + r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|' + r'BLANK|' + r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|' + r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|' + r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|' + r'CONSTANT|CONTAINS|CONTENT|CONTROL|' + r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|' + r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|' + r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|' + r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|' + r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|' + r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|' + r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|' + r'EXCLUSIVE|EXTEND|EXTERNAL|' + r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|' + r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|' + r'FUNCTION-ID|GIVING|GLOBAL|GROUP|' + r'HEADING|HIGHLIGHT|I-O|ID|' + r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|' + r'INITIAL|INITIALIZED|INPUT|' + r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|' + r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|' + r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|' + r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|' + r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|' + r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|' + r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|' + r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|' + r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|' + r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|' + r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|' + r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|' + r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|' + r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|' + r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|' + r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|' + r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|' + r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|' + r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|' + r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|' + r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|' + r'STANDARD-1|STANDARD-2|STATUS|SUM|' + r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|' + r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|' + r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|' + r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|' + r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|' + r'WITH|WORDS|YYYYDDD|YYYYMMDD)' + r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo), + + # inactive reserved words + (r'(^|(?<=[^0-9a-z_\-]))' + r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|' + r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|' + r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|' + r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|' + r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|' + r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|' + r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|' + r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|' + r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|' + r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|' + r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|' + r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|' + r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|' + r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|' + r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|' + r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|' + r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|' + r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|' + r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error), + + # Data Types + (r'(^|(?<=[^0-9a-z_\-]))' + r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|' + r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|' + r'BINARY-C-LONG|' + r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|' + r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type), + + # Operators + (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator), + + # (r'(::)', Keyword.Declaration), + + (r'([(),;:&%.])', Punctuation), + + # Intrinsics + (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|' + r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|' + r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|' + r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|' + r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|' + r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|' + r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|' + r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|' + r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|' + r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|' + r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|' + r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*' + r'($|(?=[^0-9a-z_\-]))', Name.Function), + + # Booleans + (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin), + # Comparing Operators + (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|' + r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word), + ], + + # \"[^\"\n]*\"|\'[^\'\n]*\' + 'strings': [ + # apparently strings can be delimited by EOL if they are continued + # in the next line + (r'"[^"\n]*("|\n)', String.Double), + (r"'[^'\n]*('|\n)", String.Single), + ], + + 'nums': [ + (r'\d+(\s*|\.$|$)', Number.Integer), + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), + ], + } + + +class CobolFreeformatLexer(CobolLexer): + """ + Lexer for Free format OpenCOBOL code. + + .. versionadded:: 1.6 + """ + name = 'COBOLFree' + aliases = ['cobolfree'] + filenames = ['*.cbl', '*.CBL'] + mimetypes = [] + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'comment': [ + (r'(\*>.*\n|^\w*\*.*$)', Comment), + ], + } diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index cdf0a6fc..a5601b17 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -3,5373 +3,33 @@ pygments.lexers.compiled ~~~~~~~~~~~~~~~~~~~~~~~~ - Lexers for compiled languages. + Just export lexer classes previously contained in this module. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import re -from string import Template - -from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ - this, combined, inherit, do_insertions, default, words -from pygments.util import get_bool_opt, get_list_opt -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Error, Literal, Generic -from pygments.scanner import Scanner - -# backwards compatibility from pygments.lexers.functional import OcamlLexer from pygments.lexers.jvm import JavaLexer, ScalaLexer - -__all__ = ['CLexer', 'CppLexer', 'DLexer', 'DelphiLexer', 'ECLexer', - 'NesCLexer', 'DylanLexer', 'ObjectiveCLexer', 'ObjectiveCppLexer', - 'FortranLexer', 'GLShaderLexer', 'PrologLexer', 'CythonLexer', - 'ValaLexer', 'OocLexer', 'GoLexer', 'FelixLexer', 'AdaLexer', - 'Modula2Lexer', 'BlitzMaxLexer', 'BlitzBasicLexer', 'NimrodLexer', - 'FantomLexer', 'RustLexer', 'CudaLexer', 'MonkeyLexer', 'SwigLexer', - 'DylanLidLexer', 'DylanConsoleLexer', 'CobolLexer', - 'CobolFreeformatLexer', 'LogosLexer', 'ClayLexer', 'PikeLexer', - 'ChapelLexer', 'EiffelLexer', 'Inform6Lexer', 'Inform7Lexer', - 'Inform6TemplateLexer', 'MqlLexer', 'SwiftLexer', 'NitLexer'] - - -class CFamilyLexer(RegexLexer): - """ - For C family source code. This is used as a base class to avoid repetitious - definitions. - """ - - #: optional Comment or Whitespace - _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)+' - #: only one /* */ style comment - _ws1 = r'\s*(?:/[*].*?[*]/\s*)*' - - tokens = { - 'whitespace': [ - # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), - ('^#', Comment.Preproc, 'macro'), - # or with whitespace - ('^(' + _ws1 + r')(#if\s+0)', - bygroups(using(this), Comment.Preproc), 'if0'), - ('^(' + _ws1 + ')(#)', - bygroups(using(this), Comment.Preproc), 'macro'), - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuation - (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - ], - 'statements': [ - (r'L?"', String, 'string'), - (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'0[0-7]+[LlUu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), - (r'\*/', Error), - (r'[~!%^&*+=|?:<>/-]', Operator), - (r'[()\[\],.]', Punctuation), - (words(('auto', 'break', 'case', 'const', 'continue', 'default', 'do', - 'else', 'enum', 'extern', 'for', 'goto', 'if', 'register', - 'restricted', 'return', 'sizeof', 'static', 'struct', - 'switch', 'typedef', 'union', 'volatile', 'while'), - suffix=r'\b'), Keyword), - (r'(bool|int|long|float|short|double|char|unsigned|signed|void|' - r'[a-z_][a-z0-9_]*_t)\b', - Keyword.Type), - (words(('inline', '_inline', '__inline', 'naked', 'restrict', - 'thread', 'typename'), suffix=r'\b'), Keyword.Reserved), - # Vector intrinsics - (r'(__m(128i|128d|128|64))\b', Keyword.Reserved), - # Microsoft-isms - (words(( - 'asm', 'int8', 'based', 'except', 'int16', 'stdcall', 'cdecl', - 'fastcall', 'int32', 'declspec', 'finally', 'int64', 'try', - 'leave', 'wchar_t', 'w64', 'unaligned', 'raise', 'noop', - 'identifier', 'forceinline', 'assume'), - prefix=r'__', suffix=r'\b'), Keyword.Reserved), - (r'(true|false|NULL)\b', Name.Builtin), - (r'([a-zA-Z_]\w*)(\s*)(:)(?!:)', bygroups(Name.Label, Text, Punctuation)), - ('[a-zA-Z_]\w*', Name), - ], - 'root': [ - include('whitespace'), - # functions - (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments - r'([a-zA-Z_]\w*)' # method name - r'(\s*\([^;]*?\))' # signature - r'(' + _ws + r')?({)', - bygroups(using(this), Name.Function, using(this), using(this), - Punctuation), - 'function'), - # function declarations - (r'((?:[\w*\s])+?(?:\s|[*]))' # return arguments - r'([a-zA-Z_]\w*)' # method name - r'(\s*\([^;]*?\))' # signature - r'(' + _ws + r')?(;)', - bygroups(using(this), Name.Function, using(this), using(this), - Punctuation)), - default('statement'), - ], - 'statement': [ - include('whitespace'), - include('statements'), - ('[{}]', Punctuation), - (';', Punctuation, '#pop'), - ], - 'function': [ - include('whitespace'), - include('statements'), - (';', Punctuation), - ('{', Punctuation, '#push'), - ('}', Punctuation, '#pop'), - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|' - r'u[a-fA-F0-9]{4}|U[a-fA-F0-9]{8}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ], - 'macro': [ - (r'[^/\n]+', Comment.Preproc), - (r'/[*](.|\n)*?[*]/', Comment.Multiline), - (r'//.*?\n', Comment.Single, '#pop'), - (r'/', Comment.Preproc), - (r'(?<=\\)\n', Comment.Preproc), - (r'\n', Comment.Preproc, '#pop'), - ], - 'if0': [ - (r'^\s*#if.*?(?)', Text, '#pop'), - ], - } - - def analyse_text(text): - if re.search('#include <[a-z]+>', text): - return 0.2 - if re.search('using namespace ', text): - return 0.4 - - -class PikeLexer(CppLexer): - """ - For `Pike `_ source code. - - .. versionadded:: 2.0 - """ - name = 'Pike' - aliases = ['pike'] - filenames = ['*.pike', '*.pmod'] - mimetypes = ['text/x-pike'] - - tokens = { - 'statements': [ - (words(( - 'catch', 'new', 'private', 'protected', 'public', 'gauge', - 'throw', 'throws', 'class', 'interface', 'implement', 'abstract', 'extends', 'from', - 'this', 'super', 'new', 'constant', 'final', 'static', 'import', 'use', 'extern', - 'inline', 'proto', 'break', 'continue', 'if', 'else', 'for', - 'while', 'do', 'switch', 'case', 'as', 'in', 'version', 'return', 'true', 'false', 'null', - '__VERSION__', '__MAJOR__', '__MINOR__', '__BUILD__', '__REAL_VERSION__', - '__REAL_MAJOR__', '__REAL_MINOR__', '__REAL_BUILD__', '__DATE__', '__TIME__', - '__FILE__', '__DIR__', '__LINE__', '__AUTO_BIGNUM__', '__NT__', '__PIKE__', - '__amigaos__', '_Pragma', 'static_assert', 'defined', 'sscanf'), suffix=r'\b'), - Keyword), - (r'(bool|int|long|float|short|double|char|string|object|void|mapping|' - r'array|multiset|program|function|lambda|mixed|' - r'[a-z_][a-z0-9_]*_t)\b', - Keyword.Type), - (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), - (r'[~!%^&*+=|?:<>/-@]', Operator), - inherit, - ], - 'classname': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), - # template specification - (r'\s*(?=>)', Text, '#pop'), - ], - } - - -class SwigLexer(CppLexer): - """ - For `SWIG `_ source code. - - .. versionadded:: 2.0 - """ - name = 'SWIG' - aliases = ['swig'] - filenames = ['*.swg', '*.i'] - mimetypes = ['text/swig'] - priority = 0.04 # Lower than C/C++ and Objective C/C++ - - tokens = { - 'statements': [ - # SWIG directives - (r'(%[a-z_][a-z0-9_]*)', Name.Function), - # Special variables - ('\$\**\&?\w+', Name), - # Stringification / additional preprocessor directives - (r'##*[a-zA-Z_]\w*', Comment.Preproc), - inherit, - ], - } - - # This is a far from complete set of SWIG directives - swig_directives = set(( - # Most common directives - '%apply', '%define', '%director', '%enddef', '%exception', '%extend', - '%feature', '%fragment', '%ignore', '%immutable', '%import', '%include', - '%inline', '%insert', '%module', '%newobject', '%nspace', '%pragma', - '%rename', '%shared_ptr', '%template', '%typecheck', '%typemap', - # Less common directives - '%arg', '%attribute', '%bang', '%begin', '%callback', '%catches', '%clear', - '%constant', '%copyctor', '%csconst', '%csconstvalue', '%csenum', - '%csmethodmodifiers', '%csnothrowexception', '%default', '%defaultctor', - '%defaultdtor', '%defined', '%delete', '%delobject', '%descriptor', - '%exceptionclass', '%exceptionvar', '%extend_smart_pointer', '%fragments', - '%header', '%ifcplusplus', '%ignorewarn', '%implicit', '%implicitconv', - '%init', '%javaconst', '%javaconstvalue', '%javaenum', '%javaexception', - '%javamethodmodifiers', '%kwargs', '%luacode', '%mutable', '%naturalvar', - '%nestedworkaround', '%perlcode', '%pythonabc', '%pythonappend', - '%pythoncallback', '%pythoncode', '%pythondynamic', '%pythonmaybecall', - '%pythonnondynamic', '%pythonprepend', '%refobject', '%shadow', '%sizeof', - '%trackobjects', '%types', '%unrefobject', '%varargs', '%warn', - '%warnfilter')) - - def analyse_text(text): - rv = 0 - # Search for SWIG directives, which are conventionally at the beginning of - # a line. The probability of them being within a line is low, so let another - # lexer win in this case. - matches = re.findall(r'^\s*(%[a-z_][a-z0-9_]*)', text, re.M) - for m in matches: - if m in SwigLexer.swig_directives: - rv = 0.98 - break - else: - rv = 0.91 # Fraction higher than MatlabLexer - return rv - - -class ECLexer(CLexer): - """ - For eC source code with preprocessor directives. - - .. versionadded:: 1.5 - """ - name = 'eC' - aliases = ['ec'] - filenames = ['*.ec', '*.eh'] - mimetypes = ['text/x-echdr', 'text/x-ecsrc'] - - tokens = { - 'statements': [ - (words(( - 'virtual', 'class', 'private', 'public', 'property', 'import', - 'delete', 'new', 'new0', 'renew', 'renew0', 'define', 'get', - 'set', 'remote', 'dllexport', 'dllimport', 'stdcall', 'subclass', - '__on_register_module', 'namespace', 'using', 'typed_object', - 'any_object', 'incref', 'register', 'watch', 'stopwatching', 'firewatchers', - 'watchable', 'class_designer', 'class_fixed', 'class_no_expansion', 'isset', - 'class_default_property', 'property_category', 'class_data', - 'class_property', 'virtual', 'thisclass', 'dbtable', 'dbindex', - 'database_open', 'dbfield'), suffix=r'\b'), Keyword), - (words(('uint', 'uint16', 'uint32', 'uint64', 'bool', 'byte', - 'unichar', 'int64'), suffix=r'\b'), - Keyword.Type), - (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), - (r'(null|value|this)\b', Name.Builtin), - inherit, - ], - 'classname': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), - # template specification - (r'\s*(?=>)', Text, '#pop'), - ], - } - - -class NesCLexer(CLexer): - """ - For `nesC `_ source code with preprocessor - directives. - - .. versionadded:: 2.0 - """ - name = 'nesC' - aliases = ['nesc'] - filenames = ['*.nc'] - mimetypes = ['text/x-nescsrc'] - - tokens = { - 'statements': [ - (words(( - 'abstract', 'as', 'async', 'atomic', 'call', 'command', 'component', - 'components', 'configuration', 'event', 'extends', 'generic', - 'implementation', 'includes', 'interface', 'module', 'new', 'norace', - 'post', 'provides', 'signal', 'task', 'uses'), suffix=r'\b'), - Keyword), - (words(('nx_struct', 'nx_union', 'nx_int8_t', 'nx_int16_t', 'nx_int32_t', - 'nx_int64_t', 'nx_uint8_t', 'nx_uint16_t', 'nx_uint32_t', - 'nx_uint64_t'), suffix=r'\b'), - Keyword.Type), - inherit, - ], - } - - -class ClayLexer(RegexLexer): - """ - For `Clay `_ source. - - .. versionadded:: 2.0 - """ - name = 'Clay' - filenames = ['*.clay'] - aliases = ['clay'] - mimetypes = ['text/x-clay'] - tokens = { - 'root': [ - (r'\s', Text), - (r'//.*?$', Comment.Singleline), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - (r'\b(public|private|import|as|record|variant|instance' - r'|define|overload|default|external|alias' - r'|rvalue|ref|forward|inline|noinline|forceinline' - r'|enum|var|and|or|not|if|else|goto|return|while' - r'|switch|case|break|continue|for|in|true|false|try|catch|throw' - r'|finally|onerror|staticassert|eval|when|newtype' - r'|__FILE__|__LINE__|__COLUMN__|__ARG__' - r')\b', Keyword), - (r'[~!%^&*+=|:<>/-]', Operator), - (r'[#(){}\[\],;.]', Punctuation), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'\d+[LlUu]*', Number.Integer), - (r'\b(true|false)\b', Name.Builtin), - (r'(?i)[a-z_?][a-z_?0-9]*', Name), - (r'"""', String, 'tdqs'), - (r'"', String, 'dqs'), - ], - 'strings': [ - (r'(?i)\\(x[0-9a-f]{2}|.)', String.Escape), - (r'.', String), - ], - 'nl': [ - (r'\n', String), - ], - 'dqs': [ - (r'"', String, '#pop'), - include('strings'), - ], - 'tdqs': [ - (r'"""', String, '#pop'), - include('strings'), - include('nl'), - ], - } - - -class DLexer(RegexLexer): - """ - For D source. - - .. versionadded:: 1.2 - """ - name = 'D' - filenames = ['*.d', '*.di'] - aliases = ['d'] - mimetypes = ['text/x-dsrc'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'\s+', Text), - # (r'\\\n', Text), # line continuations - # Comments - (r'//(.*?)\n', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - (r'/\+', Comment.Multiline, 'nested_comment'), - # Keywords - (words(( - 'abstract', 'alias', 'align', 'asm', 'assert', 'auto', 'body', - 'break', 'case', 'cast', 'catch', 'class', 'const', 'continue', - 'debug', 'default', 'delegate', 'delete', 'deprecated', 'do', 'else', - 'enum', 'export', 'extern', 'finally', 'final', 'foreach_reverse', - 'foreach', 'for', 'function', 'goto', 'if', 'immutable', 'import', - 'interface', 'invariant', 'inout', 'in', 'is', 'lazy', 'mixin', - 'module', 'new', 'nothrow', 'out', 'override', 'package', 'pragma', - 'private', 'protected', 'public', 'pure', 'ref', 'return', 'scope', - 'shared', 'static', 'struct', 'super', 'switch', 'synchronized', - 'template', 'this', 'throw', 'try', 'typedef', 'typeid', 'typeof', - 'union', 'unittest', 'version', 'volatile', 'while', 'with', - '__gshared', '__traits', '__vector', '__parameters'), - suffix=r'\b'), - Keyword), - (words(( - 'bool', 'byte', 'cdouble', 'cent', 'cfloat', 'char', 'creal', - 'dchar', 'double', 'float', 'idouble', 'ifloat', 'int', 'ireal', - 'long', 'real', 'short', 'ubyte', 'ucent', 'uint', 'ulong', - 'ushort', 'void', 'wchar'), suffix=r'\b'), - Keyword.Type), - (r'(false|true|null)\b', Keyword.Constant), - (words(( - '__FILE__', '__MODULE__', '__LINE__', '__FUNCTION__', '__PRETTY_FUNCTION__' - '', '__DATE__', '__EOF__', '__TIME__', '__TIMESTAMP__', '__VENDOR__', - '__VERSION__'), suffix=r'\b'), - Keyword.Pseudo), - (r'macro\b', Keyword.Reserved), - (r'(string|wstring|dstring|size_t|ptrdiff_t)\b', Name.Builtin), - # FloatLiteral - # -- HexFloat - (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)' - r'[pP][+\-]?[0-9_]+[fFL]?[i]?', Number.Float), - # -- DecimalFloat - (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|' - r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[fFL]?[i]?', Number.Float), - (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[fFL]?[i]?', Number.Float), - # IntegerLiteral - # -- Binary - (r'0[Bb][01_]+', Number.Bin), - # -- Octal - (r'0[0-7_]+', Number.Oct), - # -- Hexadecimal - (r'0[xX][0-9a-fA-F_]+', Number.Hex), - # -- Decimal - (r'(0|[1-9][0-9_]*)([LUu]|Lu|LU|uL|UL)?', Number.Integer), - # CharacterLiteral - (r"""'(\\['"?\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" - r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\&\w+;|.)'""", - String.Char), - # StringLiteral - # -- WysiwygString - (r'r"[^"]*"[cwd]?', String), - # -- AlternateWysiwygString - (r'`[^`]*`[cwd]?', String), - # -- DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"[cwd]?', String), - # -- EscapeSequence - (r"\\(['\"?\\abfnrtv]|x[0-9a-fA-F]{2}|[0-7]{1,3}" - r"|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8}|&\w+;)", - String), - # -- HexString - (r'x"[0-9a-fA-F_\s]*"[cwd]?', String), - # -- DelimitedString - (r'q"\[', String, 'delimited_bracket'), - (r'q"\(', String, 'delimited_parenthesis'), - (r'q"<', String, 'delimited_angle'), - (r'q"{', String, 'delimited_curly'), - (r'q"([a-zA-Z_]\w*)\n.*?\n\1"', String), - (r'q"(.).*?\1"', String), - # -- TokenString - (r'q{', String, 'token_string'), - # Attributes - (r'@([a-zA-Z_]\w*)?', Name.Decorator), - # Tokens - (r'(~=|\^=|%=|\*=|==|!>=|!<=|!<>=|!<>|!<|!>|!=|>>>=|>>>|>>=|>>|>=' - r'|<>=|<>|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.\.|\.\.|/=)' - r'|[/.&|\-+<>!()\[\]{}?,;:$=*%^~]', Punctuation), - # Identifier - (r'[a-zA-Z_]\w*', Name), - # Line - (r'#line\s.*\n', Comment.Special), - ], - 'nested_comment': [ - (r'[^+/]+', Comment.Multiline), - (r'/\+', Comment.Multiline, '#push'), - (r'\+/', Comment.Multiline, '#pop'), - (r'[+/]', Comment.Multiline), - ], - 'token_string': [ - (r'{', Punctuation, 'token_string_nest'), - (r'}', String, '#pop'), - include('root'), - ], - 'token_string_nest': [ - (r'{', Punctuation, '#push'), - (r'}', Punctuation, '#pop'), - include('root'), - ], - 'delimited_bracket': [ - (r'[^\[\]]+', String), - (r'\[', String, 'delimited_inside_bracket'), - (r'\]"', String, '#pop'), - ], - 'delimited_inside_bracket': [ - (r'[^\[\]]+', String), - (r'\[', String, '#push'), - (r'\]', String, '#pop'), - ], - 'delimited_parenthesis': [ - (r'[^\(\)]+', String), - (r'\(', String, 'delimited_inside_parenthesis'), - (r'\)"', String, '#pop'), - ], - 'delimited_inside_parenthesis': [ - (r'[^\(\)]+', String), - (r'\(', String, '#push'), - (r'\)', String, '#pop'), - ], - 'delimited_angle': [ - (r'[^<>]+', String), - (r'<', String, 'delimited_inside_angle'), - (r'>"', String, '#pop'), - ], - 'delimited_inside_angle': [ - (r'[^<>]+', String), - (r'<', String, '#push'), - (r'>', String, '#pop'), - ], - 'delimited_curly': [ - (r'[^{}]+', String), - (r'{', String, 'delimited_inside_curly'), - (r'}"', String, '#pop'), - ], - 'delimited_inside_curly': [ - (r'[^{}]+', String), - (r'{', String, '#push'), - (r'}', String, '#pop'), - ], - } - - -class DelphiLexer(Lexer): - """ - For `Delphi `_ (Borland Object Pascal), - Turbo Pascal and Free Pascal source code. - - Additional options accepted: - - `turbopascal` - Highlight Turbo Pascal specific keywords (default: ``True``). - `delphi` - Highlight Borland Delphi specific keywords (default: ``True``). - `freepascal` - Highlight Free Pascal specific keywords (default: ``True``). - `units` - A list of units that should be considered builtin, supported are - ``System``, ``SysUtils``, ``Classes`` and ``Math``. - Default is to consider all of them builtin. - """ - name = 'Delphi' - aliases = ['delphi', 'pas', 'pascal', 'objectpascal'] - filenames = ['*.pas'] - mimetypes = ['text/x-pascal'] - - TURBO_PASCAL_KEYWORDS = ( - 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case', - 'const', 'constructor', 'continue', 'destructor', 'div', 'do', - 'downto', 'else', 'end', 'file', 'for', 'function', 'goto', - 'if', 'implementation', 'in', 'inherited', 'inline', 'interface', - 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator', - 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce', - 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to', - 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor' - ) - - DELPHI_KEYWORDS = ( - 'as', 'class', 'except', 'exports', 'finalization', 'finally', - 'initialization', 'is', 'library', 'on', 'property', 'raise', - 'threadvar', 'try' - ) - - FREE_PASCAL_KEYWORDS = ( - 'dispose', 'exit', 'false', 'new', 'true' - ) - - BLOCK_KEYWORDS = set(( - 'begin', 'class', 'const', 'constructor', 'destructor', 'end', - 'finalization', 'function', 'implementation', 'initialization', - 'label', 'library', 'operator', 'procedure', 'program', 'property', - 'record', 'threadvar', 'type', 'unit', 'uses', 'var' - )) - - FUNCTION_MODIFIERS = set(( - 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe', - 'pascal', 'register', 'safecall', 'softfloat', 'stdcall', - 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external', - 'override', 'assembler' - )) - - # XXX: those aren't global. but currently we know no way for defining - # them just for the type context. - DIRECTIVES = set(( - 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far', - 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected', - 'published', 'public' - )) - - BUILTIN_TYPES = set(( - 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool', - 'cardinal', 'char', 'comp', 'currency', 'double', 'dword', - 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint', - 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean', - 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency', - 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle', - 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer', - 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint', - 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword', - 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint', - 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate', - 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant', - 'widechar', 'widestring', 'word', 'wordbool' - )) - - BUILTIN_UNITS = { - 'System': ( - 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8', - 'append', 'arctan', 'assert', 'assigned', 'assignfile', - 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir', - 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble', - 'concat', 'continue', 'copy', 'cos', 'dec', 'delete', - 'dispose', 'doubletocomp', 'endthread', 'enummodules', - 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr', - 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize', - 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance', - 'findresourcehinstance', 'flush', 'frac', 'freemem', - 'get8087cw', 'getdir', 'getlasterror', 'getmem', - 'getmemorymanager', 'getmodulefilename', 'getvariantmanager', - 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert', - 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset', - 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd', - 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount', - 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random', - 'randomize', 'read', 'readln', 'reallocmem', - 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir', - 'round', 'runerror', 'seek', 'seekeof', 'seekeoln', - 'set8087cw', 'setlength', 'setlinebreakstyle', - 'setmemorymanager', 'setstring', 'settextbuf', - 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt', - 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar', - 'succ', 'swap', 'trunc', 'truncate', 'typeinfo', - 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring', - 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi', - 'utf8tounicode', 'val', 'vararrayredim', 'varclear', - 'widecharlentostring', 'widecharlentostrvar', - 'widechartostring', 'widechartostrvar', - 'widestringtoucs4string', 'write', 'writeln' - ), - 'SysUtils': ( - 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks', - 'allocmem', 'ansicomparefilename', 'ansicomparestr', - 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr', - 'ansilastchar', 'ansilowercase', 'ansilowercasefilename', - 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext', - 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp', - 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan', - 'ansistrscan', 'ansistrupper', 'ansiuppercase', - 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep', - 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype', - 'callterminateprocs', 'changefileext', 'charlength', - 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr', - 'comparetext', 'createdir', 'createguid', 'currentyear', - 'currtostr', 'currtostrf', 'date', 'datetimetofiledate', - 'datetimetostr', 'datetimetostring', 'datetimetosystemtime', - 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate', - 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists', - 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime', - 'exceptionerrormessage', 'excludetrailingbackslash', - 'excludetrailingpathdelimiter', 'expandfilename', - 'expandfilenamecase', 'expanduncfilename', 'extractfiledir', - 'extractfiledrive', 'extractfileext', 'extractfilename', - 'extractfilepath', 'extractrelativepath', 'extractshortpathname', - 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime', - 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly', - 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr', - 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage', - 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext', - 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr', - 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr', - 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr', - 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir', - 'getenvironmentvariable', 'getfileversion', 'getformatsettings', - 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription', - 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth', - 'includetrailingbackslash', 'includetrailingpathdelimiter', - 'incmonth', 'initializepackage', 'interlockeddecrement', - 'interlockedexchange', 'interlockedexchangeadd', - 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter', - 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident', - 'languages', 'lastdelimiter', 'loadpackage', 'loadstr', - 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now', - 'outofmemoryerror', 'quotedstr', 'raiselastoserror', - 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate', - 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext', - 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize', - 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy', - 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp', - 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy', - 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew', - 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos', - 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr', - 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime', - 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint', - 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime', - 'strtotimedef', 'strupper', 'supports', 'syserrormessage', - 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime', - 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright', - 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime', - 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime', - 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime', - 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext', - 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase', - 'widesamestr', 'widesametext', 'wideuppercase', 'win32check', - 'wraptext' - ), - 'Classes': ( - 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize', - 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect', - 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass', - 'groupdescendantswith', 'hextobin', 'identtoint', - 'initinheritedcomponent', 'inttoident', 'invalidpoint', - 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext', - 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource', - 'pointsequal', 'readcomponentres', 'readcomponentresex', - 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias', - 'registerclasses', 'registercomponents', 'registerintegerconsts', - 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup', - 'teststreamformat', 'unregisterclass', 'unregisterclasses', - 'unregisterintegerconsts', 'unregistermoduleclasses', - 'writecomponentresfile' - ), - 'Math': ( - 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec', - 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil', - 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc', - 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle', - 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance', - 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask', - 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg', - 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate', - 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero', - 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue', - 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue', - 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods', - 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance', - 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd', - 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant', - 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode', - 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev', - 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation', - 'tan', 'tanh', 'totalvariance', 'variance' - ) - } - - ASM_REGISTERS = set(( - 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0', - 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0', - 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx', - 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp', - 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6', - 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5', - 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5', - 'xmm6', 'xmm7' - )) - - ASM_INSTRUCTIONS = set(( - 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound', - 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw', - 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae', - 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg', - 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb', - 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl', - 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo', - 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb', - 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid', - 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt', - 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd', - 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd', - 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe', - 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle', - 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge', - 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe', - 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave', - 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw', - 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw', - 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr', - 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx', - 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd', - 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw', - 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw', - 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe', - 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror', - 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb', - 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe', - 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle', - 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng', - 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz', - 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl', - 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold', - 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str', - 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit', - 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait', - 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat', - 'xlatb', 'xor' - )) - - def __init__(self, **options): - Lexer.__init__(self, **options) - self.keywords = set() - if get_bool_opt(options, 'turbopascal', True): - self.keywords.update(self.TURBO_PASCAL_KEYWORDS) - if get_bool_opt(options, 'delphi', True): - self.keywords.update(self.DELPHI_KEYWORDS) - if get_bool_opt(options, 'freepascal', True): - self.keywords.update(self.FREE_PASCAL_KEYWORDS) - self.builtins = set() - for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)): - self.builtins.update(self.BUILTIN_UNITS[unit]) - - def get_tokens_unprocessed(self, text): - scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE) - stack = ['initial'] - in_function_block = False - in_property_block = False - was_dot = False - next_token_is_function = False - next_token_is_property = False - collect_labels = False - block_labels = set() - brace_balance = [0, 0] - - while not scanner.eos: - token = Error - - if stack[-1] == 'initial': - if scanner.scan(r'\s+'): - token = Text - elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'): - if scanner.match.startswith('$'): - token = Comment.Preproc - else: - token = Comment.Multiline - elif scanner.scan(r'//.*?$'): - token = Comment.Single - elif scanner.scan(r'[-+*\/=<>:;,.@\^]'): - token = Operator - # stop label highlighting on next ";" - if collect_labels and scanner.match == ';': - collect_labels = False - elif scanner.scan(r'[\(\)\[\]]+'): - token = Punctuation - # abort function naming ``foo = Function(...)`` - next_token_is_function = False - # if we are in a function block we count the open - # braces because ootherwise it's impossible to - # determine the end of the modifier context - if in_function_block or in_property_block: - if scanner.match == '(': - brace_balance[0] += 1 - elif scanner.match == ')': - brace_balance[0] -= 1 - elif scanner.match == '[': - brace_balance[1] += 1 - elif scanner.match == ']': - brace_balance[1] -= 1 - elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'): - lowercase_name = scanner.match.lower() - if lowercase_name == 'result': - token = Name.Builtin.Pseudo - elif lowercase_name in self.keywords: - token = Keyword - # if we are in a special block and a - # block ending keyword occours (and the parenthesis - # is balanced) we end the current block context - if (in_function_block or in_property_block) and \ - lowercase_name in self.BLOCK_KEYWORDS and \ - brace_balance[0] <= 0 and \ - brace_balance[1] <= 0: - in_function_block = False - in_property_block = False - brace_balance = [0, 0] - block_labels = set() - if lowercase_name in ('label', 'goto'): - collect_labels = True - elif lowercase_name == 'asm': - stack.append('asm') - elif lowercase_name == 'property': - in_property_block = True - next_token_is_property = True - elif lowercase_name in ('procedure', 'operator', - 'function', 'constructor', - 'destructor'): - in_function_block = True - next_token_is_function = True - # we are in a function block and the current name - # is in the set of registered modifiers. highlight - # it as pseudo keyword - elif in_function_block and \ - lowercase_name in self.FUNCTION_MODIFIERS: - token = Keyword.Pseudo - # if we are in a property highlight some more - # modifiers - elif in_property_block and \ - lowercase_name in ('read', 'write'): - token = Keyword.Pseudo - next_token_is_function = True - # if the last iteration set next_token_is_function - # to true we now want this name highlighted as - # function. so do that and reset the state - elif next_token_is_function: - # Look if the next token is a dot. If yes it's - # not a function, but a class name and the - # part after the dot a function name - if scanner.test(r'\s*\.\s*'): - token = Name.Class - # it's not a dot, our job is done - else: - token = Name.Function - next_token_is_function = False - # same for properties - elif next_token_is_property: - token = Name.Property - next_token_is_property = False - # Highlight this token as label and add it - # to the list of known labels - elif collect_labels: - token = Name.Label - block_labels.add(scanner.match.lower()) - # name is in list of known labels - elif lowercase_name in block_labels: - token = Name.Label - elif lowercase_name in self.BUILTIN_TYPES: - token = Keyword.Type - elif lowercase_name in self.DIRECTIVES: - token = Keyword.Pseudo - # builtins are just builtins if the token - # before isn't a dot - elif not was_dot and lowercase_name in self.builtins: - token = Name.Builtin - else: - token = Name - elif scanner.scan(r"'"): - token = String - stack.append('string') - elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'): - token = String.Char - elif scanner.scan(r'\$[0-9A-Fa-f]+'): - token = Number.Hex - elif scanner.scan(r'\d+(?![eE]|\.[^.])'): - token = Number.Integer - elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'): - token = Number.Float - else: - # if the stack depth is deeper than once, pop - if len(stack) > 1: - stack.pop() - scanner.get_char() - - elif stack[-1] == 'string': - if scanner.scan(r"''"): - token = String.Escape - elif scanner.scan(r"'"): - token = String - stack.pop() - elif scanner.scan(r"[^']*"): - token = String - else: - scanner.get_char() - stack.pop() - - elif stack[-1] == 'asm': - if scanner.scan(r'\s+'): - token = Text - elif scanner.scan(r'end'): - token = Keyword - stack.pop() - elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'): - if scanner.match.startswith('$'): - token = Comment.Preproc - else: - token = Comment.Multiline - elif scanner.scan(r'//.*?$'): - token = Comment.Single - elif scanner.scan(r"'"): - token = String - stack.append('string') - elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'): - token = Name.Label - elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'): - lowercase_name = scanner.match.lower() - if lowercase_name in self.ASM_INSTRUCTIONS: - token = Keyword - elif lowercase_name in self.ASM_REGISTERS: - token = Name.Builtin - else: - token = Name - elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'): - token = Operator - elif scanner.scan(r'[\(\)\[\]]+'): - token = Punctuation - elif scanner.scan(r'\$[0-9A-Fa-f]+'): - token = Number.Hex - elif scanner.scan(r'\d+(?![eE]|\.[^.])'): - token = Number.Integer - elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'): - token = Number.Float - else: - scanner.get_char() - stack.pop() - - # save the dot!!!11 - if scanner.match.strip(): - was_dot = scanner.match == '.' - yield scanner.start_pos, token, scanner.match or '' - - -class DylanLexer(RegexLexer): - """ - For the `Dylan `_ language. - - .. versionadded:: 0.7 - """ - - name = 'Dylan' - aliases = ['dylan'] - filenames = ['*.dylan', '*.dyl', '*.intr'] - mimetypes = ['text/x-dylan'] - - flags = re.IGNORECASE - - builtins = set(( - 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class', - 'compiler-open', 'compiler-sideways', 'domain', 'dynamic', - 'each-subclass', 'exception', 'exclude', 'function', 'generic', - 'handler', 'inherited', 'inline', 'inline-only', 'instance', - 'interface', 'import', 'keyword', 'library', 'macro', 'method', - 'module', 'open', 'primary', 'required', 'sealed', 'sideways', - 'singleton', 'slot', 'thread', 'variable', 'virtual')) - - keywords = set(( - 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup', - 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally', - 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename', - 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when', - 'while')) - - operators = set(( - '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=', - '>', '>=', '&', '|')) - - functions = set(( - 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!', - 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply', - 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!', - 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol', - 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose', - 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as', - 'condition-format-arguments', 'condition-format-string', 'conjoin', - 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions', - 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do', - 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?', - 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first', - 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol', - 'function-arguments', 'function-return-values', - 'function-specializers', 'gcd', 'generic-function-mandatory-keywords', - 'generic-function-methods', 'head', 'head-setter', 'identity', - 'initialize', 'instance?', 'integral?', 'intersection', - 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited', - 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make', - 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes', - 'min', 'modulo', 'negative', 'negative?', 'next-method', - 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop', - 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank', - 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!', - 'remove-duplicates', 'remove-duplicates!', 'remove-key!', - 'remove-method', 'replace-elements!', 'replace-subsequence!', - 'restart-query', 'return-allowed?', 'return-description', - 'return-query', 'reverse', 'reverse!', 'round', 'round/', - 'row-major-index', 'second', 'second-setter', 'shallow-copy', - 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?', - 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position', - 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third', - 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type', - 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values', - 'vector', 'zero?')) - - valid_name = '\\\\?[a-z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+' - - def get_tokens_unprocessed(self, text): - for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): - if token is Name: - lowercase_value = value.lower() - if lowercase_value in self.builtins: - yield index, Name.Builtin, value - continue - if lowercase_value in self.keywords: - yield index, Keyword, value - continue - if lowercase_value in self.functions: - yield index, Name.Builtin, value - continue - if lowercase_value in self.operators: - yield index, Operator, value - continue - yield index, token, value - - tokens = { - 'root': [ - # Whitespace - (r'\s+', Text), - - # single line comment - (r'//.*?\n', Comment.Single), - - # lid header - (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)', - bygroups(Name.Attribute, Operator, Text, String)), - - ('', Text, 'code') # no header match, switch to code - ], - 'code': [ - # Whitespace - (r'\s+', Text), - - # single line comment - (r'//.*?\n', Comment.Single), - - # multi-line comment - (r'/\*', Comment.Multiline, 'comment'), - - # strings and characters - (r'"', String, 'string'), - (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char), - - # binary integer - (r'#[bB][01]+', Number.Bin), - - # octal integer - (r'#[oO][0-7]+', Number.Oct), - - # floating point - (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float), - - # decimal integer - (r'[-+]?\d+', Number.Integer), - - # hex integer - (r'#[xX][0-9a-f]+', Number.Hex), - - # Macro parameters - (r'(\?' + valid_name + ')(:)' - r'(token|name|variable|expression|body|case-body|\*)', - bygroups(Name.Tag, Operator, Name.Builtin)), - (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)', - bygroups(Name.Tag, Operator, Name.Builtin)), - (r'\?' + valid_name, Name.Tag), - - # Punctuation - (r'(=>|::|#\(|#\[|##|\?|\?\?|\?=|[(){}\[\],\.;])', Punctuation), - - # Most operators are picked up as names and then re-flagged. - # This one isn't valid in a name though, so we pick it up now. - (r':=', Operator), - - # Pick up #t / #f before we match other stuff with #. - (r'#[tf]', Literal), - - # #"foo" style keywords - (r'#"', String.Symbol, 'keyword'), - - # #rest, #key, #all-keys, etc. - (r'#[a-z0-9-]+', Keyword), - - # required-init-keyword: style keywords. - (valid_name + ':', Keyword), - - # class names - (r'<' + valid_name + '>', Name.Class), - - # define variable forms. - (r'\*' + valid_name + '\*', Name.Variable.Global), - - # define constant forms. - (r'\$' + valid_name, Name.Constant), - - # everything else. We re-flag some of these in the method above. - (valid_name, Name), - ], - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ], - 'keyword': [ - (r'"', String.Symbol, '#pop'), - (r'[^\\"]+', String.Symbol), # all other characters - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ] - } - - -class DylanLidLexer(RegexLexer): - """ - For Dylan LID (Library Interchange Definition) files. - - .. versionadded:: 1.6 - """ - - name = 'DylanLID' - aliases = ['dylan-lid', 'lid'] - filenames = ['*.lid', '*.hdp'] - mimetypes = ['text/x-dylan-lid'] - - flags = re.IGNORECASE - - tokens = { - 'root': [ - # Whitespace - (r'\s+', Text), - - # single line comment - (r'//.*?\n', Comment.Single), - - # lid header - (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)', - bygroups(Name.Attribute, Operator, Text, String)), - ] - } - - -class DylanConsoleLexer(Lexer): - """ - For Dylan interactive console output like: - - .. sourcecode:: dylan-console - - ? let a = 1; - => 1 - ? a - => 1 - - This is based on a copy of the RubyConsoleLexer. - - .. versionadded:: 1.6 - """ - name = 'Dylan session' - aliases = ['dylan-console', 'dylan-repl'] - filenames = ['*.dylan-console'] - mimetypes = ['text/x-dylan-console'] - - _line_re = re.compile('.*?\n') - _prompt_re = re.compile('\?| ') - - def get_tokens_unprocessed(self, text): - dylexer = DylanLexer(**self.options) - - curcode = '' - insertions = [] - for match in self._line_re.finditer(text): - line = match.group() - m = self._prompt_re.match(line) - if m is not None: - end = m.end() - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:end])])) - curcode += line[end:] - else: - if curcode: - for item in do_insertions(insertions, - dylexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - dylexer.get_tokens_unprocessed(curcode)): - yield item - - -def objective(baselexer): - """ - Generate a subclass of baselexer that accepts the Objective-C syntax - extensions. - """ - - # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here, - # since that's quite common in ordinary C/C++ files. It's OK to match - # JavaDoc/Doxygen keywords that only apply to Objective-C, mind. - # - # The upshot of this is that we CANNOT match @class or @interface - _oc_keywords = re.compile(r'@(?:end|implementation|protocol)') - - # Matches [ ? identifier ( identifier ? ] | identifier? : ) - # (note the identifier is *optional* when there is a ':'!) - _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+' - r'(?:[a-zA-Z_]\w*\s*\]|' - r'(?:[a-zA-Z_]\w*)?:)') - - class GeneratedObjectiveCVariant(baselexer): - """ - Implements Objective-C syntax on top of an existing C family lexer. - """ - - tokens = { - 'statements': [ - (r'@"', String, 'string'), - (r'@(YES|NO)', Number), - (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), - (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), - (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex), - (r'@0[0-7]+[Ll]?', Number.Oct), - (r'@\d+[Ll]?', Number.Integer), - (r'@\(', Literal, 'literal_number'), - (r'@\[', Literal, 'literal_array'), - (r'@\{', Literal, 'literal_dictionary'), - (words(( - '@selector', '@private', '@protected', '@public', '@encode', - '@synchronized', '@try', '@throw', '@catch', '@finally', - '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer', - '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong', - 'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic', - 'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in', - 'out', 'inout', 'release', 'class', '@dynamic', '@optional', - '@required', '@autoreleasepool'), suffix=r'\b'), - Keyword), - (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL', - 'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'), - Keyword.Type), - (r'@(true|false|YES|NO)\n', Name.Builtin), - (r'(YES|NO|nil|self|super)\b', Name.Builtin), - # Carbon types - (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type), - # Carbon built-ins - (r'(TRUE|FALSE)\b', Name.Builtin), - (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text), - ('#pop', 'oc_classname')), - (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text), - ('#pop', 'oc_forward_classname')), - # @ can also prefix other expressions like @{...} or @(...) - (r'@', Punctuation), - inherit, - ], - 'oc_classname': [ - # interface definition that inherits - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)({)', - bygroups(Name.Class, Text, Name.Class, Text, Punctuation), - ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', - bygroups(Name.Class, Text, Name.Class), '#pop'), - # interface definition for a category - ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)({)', - bygroups(Name.Class, Text, Name.Label, Text, Punctuation), - ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))', - bygroups(Name.Class, Text, Name.Label), '#pop'), - # simple interface / implementation - ('([a-zA-Z$_][\w$]*)(\s*)({)', - bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')), - ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') - ], - 'oc_forward_classname': [ - ('([a-zA-Z$_][\w$]*)(\s*,\s*)', - bygroups(Name.Class, Text), 'oc_forward_classname'), - ('([a-zA-Z$_][\w$]*)(\s*;?)', - bygroups(Name.Class, Text), '#pop') - ], - 'oc_ivars': [ - include('whitespace'), - include('statements'), - (';', Punctuation), - ('{', Punctuation, '#push'), - ('}', Punctuation, '#pop'), - ], - 'root': [ - # methods - (r'^([-+])(\s*)' # method marker - r'(\(.*?\))?(\s*)' # return type - r'([a-zA-Z$_][\w$]*:?)', # begin of method name - bygroups(Punctuation, Text, using(this), - Text, Name.Function), - 'method'), - inherit, - ], - 'method': [ - include('whitespace'), - # TODO unsure if ellipses are allowed elsewhere, see - # discussion in Issue 789 - (r',', Punctuation), - (r'\.\.\.', Punctuation), - (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)', - bygroups(using(this), Text, Name.Variable)), - (r'[a-zA-Z$_][\w$]*:', Name.Function), - (';', Punctuation, '#pop'), - ('{', Punctuation, 'function'), - ('', Text, '#pop'), - ], - 'literal_number': [ - (r'\(', Punctuation, 'literal_number_inner'), - (r'\)', Literal, '#pop'), - include('statement'), - ], - 'literal_number_inner': [ - (r'\(', Punctuation, '#push'), - (r'\)', Punctuation, '#pop'), - include('statement'), - ], - 'literal_array': [ - (r'\[', Punctuation, 'literal_array_inner'), - (r'\]', Literal, '#pop'), - include('statement'), - ], - 'literal_array_inner': [ - (r'\[', Punctuation, '#push'), - (r'\]', Punctuation, '#pop'), - include('statement'), - ], - 'literal_dictionary': [ - (r'\}', Literal, '#pop'), - include('statement'), - ], - } - - def analyse_text(text): - if _oc_keywords.search(text): - return 1.0 - elif '@"' in text: # strings - return 0.8 - elif re.search('@[0-9]+', text): - return 0.7 - elif _oc_message.search(text): - return 0.8 - return 0 - - def get_tokens_unprocessed(self, text): - from pygments.lexers._cocoabuiltins import COCOA_INTERFACES, \ - COCOA_PROTOCOLS, COCOA_PRIMITIVES - - for index, token, value in \ - baselexer.get_tokens_unprocessed(self, text): - if token is Name or token is Name.Class: - if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \ - or value in COCOA_PRIMITIVES: - token = Name.Builtin.Pseudo - - yield index, token, value - - return GeneratedObjectiveCVariant - - -class ObjectiveCLexer(objective(CLexer)): - """ - For Objective-C source code with preprocessor directives. - """ - - name = 'Objective-C' - aliases = ['objective-c', 'objectivec', 'obj-c', 'objc'] - filenames = ['*.m', '*.h'] - mimetypes = ['text/x-objective-c'] - priority = 0.05 # Lower than C - - -class ObjectiveCppLexer(objective(CppLexer)): - """ - For Objective-C++ source code with preprocessor directives. - """ - - name = 'Objective-C++' - aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++'] - filenames = ['*.mm', '*.hh'] - mimetypes = ['text/x-objective-c++'] - priority = 0.05 # Lower than C++ - - -class FortranLexer(RegexLexer): - """ - Lexer for FORTRAN 90 code. - - .. versionadded:: 0.10 - """ - name = 'Fortran' - aliases = ['fortran'] - filenames = ['*.f', '*.f90', '*.F', '*.F90'] - mimetypes = ['text/x-fortran'] - flags = re.IGNORECASE - - # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION - # Operators: **, *, +, -, /, <, >, <=, >=, ==, /= - # Logical (?): NOT, AND, OR, EQV, NEQV - - # Builtins: - # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html - - tokens = { - 'root': [ - (r'!.*\n', Comment), - include('strings'), - include('core'), - (r'[a-z]\w*', Name.Variable), - include('nums'), - (r'[\s]+', Text), - ], - 'core': [ - # Statements - (words(( - 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE', - 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND', - 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE', - 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS', - 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE', - 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END', - 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS', - 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT', - 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT', - 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE', - 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY', - 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL', - 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE', - 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ', - 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE', - 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES', - 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE', - 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'), - Keyword), - - # Data Types - (words(( - 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER', - 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR', - 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T', - 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T', - 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T', - 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', - 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', - 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'), - Keyword.Type), - - # Operators - (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator), - - (r'(::)', Keyword.Declaration), - - (r'[()\[\],:&%;]', Punctuation), - # Intrinsics - (words(( - 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL', - 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog', - 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH', - 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref', - 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0', - 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE', - 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp', - 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count', - 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift', - 'CSin', 'CSqRt', 'CTime', 'C_Funloc', 'C_Loc', 'C_Associated', - 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer', - 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc', - 'C_Loc', 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return', - 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin', - 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJ', 'DbesJN', 'DbesY', - 'DbesY', 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF', - 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DLog', 'DMax', - 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH', - 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime', - 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime', - 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of', - 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush', - 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell', - 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument', - 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog', - 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs', - 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits', - 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr', - 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index', - 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC', - 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End', - 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound', - 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk', - 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat', - 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent', - 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc', - 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits', - 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images', - 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product', - 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real', - 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift', - 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind', - 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape', - 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH', - 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand', - 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock', - 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer', - 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask', - 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp', - 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'), - Name.Builtin), - - # Booleans - (r'\.(true|false)\.', Name.Builtin), - # Comparing Operators - (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word), - ], - - 'strings': [ - (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), - (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), - ], - - 'nums': [ - (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer), - (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float), - (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float), - ], - } - - -class GLShaderLexer(RegexLexer): - """ - GLSL (OpenGL Shader) lexer. - - .. versionadded:: 1.1 - """ - name = 'GLSL' - aliases = ['glsl'] - filenames = ['*.vert', '*.frag', '*.geo'] - mimetypes = ['text/x-glslsrc'] - - tokens = { - 'root': [ - (r'^#.*', Comment.Preproc), - (r'//.*', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', - Operator), - (r'[?:]', Operator), # quick hack for ternary - (r'\bdefined\b', Operator), - (r'[;{}(),\[\]]', Punctuation), - # FIXME when e is present, no decimal point needed - (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), - (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), - (r'0[xX][0-9a-fA-F]*', Number.Hex), - (r'0[0-7]*', Number.Oct), - (r'[1-9][0-9]*', Number.Integer), - (words(( - 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break', - 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out', - 'inout', 'float', 'int', 'void', 'bool', 'true', 'false', - 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4', - 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3', - 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4', - 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4', - 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube', - 'sampler1DShadow', 'sampler2DShadow', 'struct'), - prefix=r'\b', suffix=r'\b'), - Keyword), - (words(( - 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this', - 'packed', 'goto', 'switch', 'default', 'inline', 'noinline', - 'volatile', 'public', 'static', 'extern', 'external', 'interface', - 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp', - 'mediump', 'highp', 'precision', 'input', 'output', - 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4', - 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect', - 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'), - prefix=r'\b', suffix=r'\b'), - Keyword), # future use - (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), - (r'\.', Punctuation), - (r'\s+', Text), - ], - } - - -class PrologLexer(RegexLexer): - """ - Lexer for Prolog files. - """ - name = 'Prolog' - aliases = ['prolog'] - filenames = ['*.prolog', '*.pro', '*.pl'] - mimetypes = ['text/x-prolog'] - - flags = re.UNICODE - - tokens = { - 'root': [ - (r'^#.*', Comment.Single), - (r'/\*', Comment.Multiline, 'nested-comment'), - (r'%.*', Comment.Single), - # character literal - (r'0\'.', String.Char), - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[0-9a-fA-F]+', Number.Hex), - # literal with prepended base - (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer), - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+', Number.Integer), - (r'[\[\](){}|.,;!]', Punctuation), - (r':-|-->', Punctuation), - (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|' - r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double), - (r"'(?:''|[^'])*'", String.Atom), # quoted atom - # Needs to not be followed by an atom. - # (r'=(?=\s|[a-zA-Z\[])', Operator), - (r'is\b', Operator), - (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])', - Operator), - (r'(mod|div|not)\b', Operator), - (r'_', Keyword), # The don't-care variable - (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)), - (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' - u'(\\s*)(:-|-->)', - bygroups(Name.Function, Text, Operator)), # function defn - (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' - u'(\\s*)(\\()', - bygroups(Name.Function, Text, Punctuation)), - (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' - u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', - String.Atom), # atom, characters - # This one includes ! - (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+', - String.Atom), # atom, graphics - (r'[A-Z_]\w*', Name.Variable), - (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text), - ], - 'nested-comment': [ - (r'\*/', Comment.Multiline, '#pop'), - (r'/\*', Comment.Multiline, '#push'), - (r'[^*/]+', Comment.Multiline), - (r'[*/]', Comment.Multiline), - ], - } - - def analyse_text(text): - return ':-' in text - - -class CythonLexer(RegexLexer): - """ - For Pyrex and `Cython `_ source code. - - .. versionadded:: 1.1 - """ - - name = 'Cython' - aliases = ['cython', 'pyx', 'pyrex'] - filenames = ['*.pyx', '*.pxd', '*.pxi'] - mimetypes = ['text/x-cython', 'application/x-cython'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)), - (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)), - (r'[^\S\n]+', Text), - (r'#.*$', Comment), - (r'[]{}:(),;[]', Punctuation), - (r'\\\n', Text), - (r'\\', Text), - (r'(in|is|and|or|not)\b', Operator.Word), - (r'(<)([a-zA-Z0-9.?]+)(>)', - bygroups(Punctuation, Keyword.Type, Punctuation)), - (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator), - (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)', - bygroups(Keyword, Number.Integer, Operator, Name, Operator, - Name, Punctuation)), - include('keywords'), - (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'), - (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'), - (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'), - (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'), - (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'), - include('builtins'), - include('backtick'), - ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), - ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), - ('[uU]?"""', String, combined('stringescape', 'tdqs')), - ("[uU]?'''", String, combined('stringescape', 'tsqs')), - ('[uU]?"', String, combined('stringescape', 'dqs')), - ("[uU]?'", String, combined('stringescape', 'sqs')), - include('name'), - include('numbers'), - ], - 'keywords': [ - (words(( - 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', - 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil', - 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print', - 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'), - Keyword), - (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc), - ], - 'builtins': [ - (words(( - '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', - 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', - 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr', - 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit', - 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals', - 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance', - 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max', - 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', - 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed', - 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', - 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', - 'vars', 'xrange', 'zip'), prefix=r'(?/-]', Operator), - (r'(\[)(Compact|Immutable|(?:Boolean|Simple)Type)(\])', - bygroups(Punctuation, Name.Decorator, Punctuation)), - # TODO: "correctly" parse complex code attributes - (r'(\[)(CCode|(?:Integer|Floating)Type)', - bygroups(Punctuation, Name.Decorator)), - (r'[()\[\],.]', Punctuation), - (words(( - 'as', 'base', 'break', 'case', 'catch', 'construct', 'continue', - 'default', 'delete', 'do', 'else', 'enum', 'finally', 'for', - 'foreach', 'get', 'if', 'in', 'is', 'lock', 'new', 'out', 'params', - 'return', 'set', 'sizeof', 'switch', 'this', 'throw', 'try', - 'typeof', 'while', 'yield'), suffix=r'\b'), - Keyword), - (words(( - 'abstract', 'const', 'delegate', 'dynamic', 'ensures', 'extern', - 'inline', 'internal', 'override', 'owned', 'private', 'protected', - 'public', 'ref', 'requires', 'signal', 'static', 'throws', 'unowned', - 'var', 'virtual', 'volatile', 'weak', 'yields'), suffix=r'\b'), - Keyword.Declaration), - (r'(namespace|using)(\s+)', bygroups(Keyword.Namespace, Text), - 'namespace'), - (r'(class|errordomain|interface|struct)(\s+)', - bygroups(Keyword.Declaration, Text), 'class'), - (r'(\.)([a-zA-Z_]\w*)', - bygroups(Operator, Name.Attribute)), - # void is an actual keyword, others are in glib-2.0.vapi - (words(( - 'void', 'bool', 'char', 'double', 'float', 'int', 'int8', 'int16', - 'int32', 'int64', 'long', 'short', 'size_t', 'ssize_t', 'string', - 'time_t', 'uchar', 'uint', 'uint8', 'uint16', 'uint32', 'uint64', - 'ulong', 'unichar', 'ushort'), suffix=r'\b'), - Keyword.Type), - (r'(true|false|null)\b', Name.Builtin), - ('[a-zA-Z_]\w*', Name), - ], - 'root': [ - include('whitespace'), - ('', Text, 'statement'), - ], - 'statement': [ - include('whitespace'), - include('statements'), - ('[{}]', Punctuation), - (';', Punctuation, '#pop'), - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ], - 'if0': [ - (r'^\s*#if.*?(?`_ source code - - .. versionadded:: 1.2 - """ - name = 'Ooc' - aliases = ['ooc'] - filenames = ['*.ooc'] - mimetypes = ['text/x-ooc'] - - tokens = { - 'root': [ - (words(( - 'class', 'interface', 'implement', 'abstract', 'extends', 'from', - 'this', 'super', 'new', 'const', 'final', 'static', 'import', - 'use', 'extern', 'inline', 'proto', 'break', 'continue', - 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do', - 'switch', 'case', 'as', 'in', 'version', 'return', 'true', - 'false', 'null'), prefix=r'\b', suffix=r'\b'), - Keyword), - (r'include\b', Keyword, 'include'), - (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)', - bygroups(Keyword, Text, Keyword, Text, Name.Class)), - (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)', - bygroups(Keyword, Text, Name.Function)), - (r'\bfunc\b', Keyword), - # Note: %= and ^= not listed on http://ooc-lang.org/syntax - (r'//.*', Comment), - (r'(?s)/\*.*?\*/', Comment.Multiline), - (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|' - r'&&?|\|\|?|\^=?)', Operator), - (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text, - Name.Function)), - (r'[A-Z][A-Z0-9_]+', Name.Constant), - (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class), - - (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()', - bygroups(Name.Function, Text)), - (r'[a-z]\w*', Name.Variable), - - # : introduces types - (r'[:(){}\[\];,]', Punctuation), - - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'0c[0-9]+', Number.Oct), - (r'0b[01]+', Number.Bin), - (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float), - (r'[0-9_]+', Number.Decimal), - - (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"', - String.Double), - (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", - String.Char), - (r'@', Punctuation), # pointer dereference - (r'\.', Punctuation), # imports or chain operator - - (r'\\[ \t\n]', Text), - (r'[ \t]+', Text), - ], - 'include': [ - (r'[\w/]+', Name), - (r',', Punctuation), - (r'[ \t]', Text), - (r'[;\n]', Text, '#pop'), - ], - } - - -class GoLexer(RegexLexer): - """ - For `Go `_ source. - """ - name = 'Go' - filenames = ['*.go'] - aliases = ['go'] - mimetypes = ['text/x-gosrc'] - - flags = re.MULTILINE | re.UNICODE - - tokens = { - 'root': [ - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuations - (r'//(.*?)\n', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - (r'(import|package)\b', Keyword.Namespace), - (r'(var|func|struct|map|chan|type|interface|const)\b', - Keyword.Declaration), - (words(( - 'break', 'default', 'select', 'case', 'defer', 'go', - 'else', 'goto', 'switch', 'fallthrough', 'if', 'range', - 'continue', 'for', 'return'), suffix=r'\b'), - Keyword), - (r'(true|false|iota|nil)\b', Keyword.Constant), - # It seems the builtin types aren't actually keywords, but - # can be used as functions. So we need two declarations. - (words(( - 'uint', 'uint8', 'uint16', 'uint32', 'uint64', - 'int', 'int8', 'int16', 'int32', 'int64', - 'float', 'float32', 'float64', - 'complex64', 'complex128', 'byte', 'rune', - 'string', 'bool', 'error', 'uintptr', - 'print', 'println', 'panic', 'recover', 'close', 'complex', - 'real', 'imag', 'len', 'cap', 'append', 'copy', 'delete', - 'new', 'make'), suffix=r'\b(\()'), - bygroups(Name.Builtin, Punctuation)), - (words(( - 'uint', 'uint8', 'uint16', 'uint32', 'uint64', - 'int', 'int8', 'int16', 'int32', 'int64', - 'float', 'float32', 'float64', - 'complex64', 'complex128', 'byte', 'rune', - 'string', 'bool', 'error', 'uintptr'), suffix=r'\b'), - Keyword.Type), - # imaginary_lit - (r'\d+i', Number), - (r'\d+\.\d*([Ee][-+]\d+)?i', Number), - (r'\.\d+([Ee][-+]\d+)?i', Number), - (r'\d+[Ee][-+]\d+i', Number), - # float_lit - (r'\d+(\.\d+[eE][+\-]?\d+|' - r'\.\d*|[eE][+\-]?\d+)', Number.Float), - (r'\.\d+([eE][+\-]?\d+)?', Number.Float), - # int_lit - # -- octal_lit - (r'0[0-7]+', Number.Oct), - # -- hex_lit - (r'0[xX][0-9a-fA-F]+', Number.Hex), - # -- decimal_lit - (r'(0|[1-9][0-9]*)', Number.Integer), - # char_lit - (r"""'(\\['"\\abfnrtv]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" - r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|[^\\])'""", - String.Char), - # StringLiteral - # -- raw_string_lit - (r'`[^`]*`', String), - # -- interpreted_string_lit - (r'"(\\\\|\\"|[^"])*"', String), - # Tokens - (r'(<<=|>>=|<<|>>|<=|>=|&\^=|&\^|\+=|-=|\*=|/=|%=|&=|\|=|&&|\|\|' - r'|<-|\+\+|--|==|!=|:=|\.\.\.|[+\-*/%&])', Operator), - (r'[|^<>=!()\[\]{}.,;:]', Punctuation), - # identifier - (r'[^\W\d]\w*', Name.Other), - ] - } - - -class FelixLexer(RegexLexer): - """ - For `Felix `_ source code. - - .. versionadded:: 1.2 - """ - - name = 'Felix' - aliases = ['felix', 'flx'] - filenames = ['*.flx', '*.flxh'] - mimetypes = ['text/x-felix'] - - preproc = ( - 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef', - ) - - keywords = ( - '_', '_deref', 'all', 'as', - 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass', - 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else', - 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except', - 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork', - 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance', - 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace', - 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise', - 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then', - 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto', - 'when', 'whilst', 'with', 'yield', - ) - - keyword_directives = ( - '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export', - 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn', - 'package', 'private', 'pod', 'property', 'public', 'publish', - 'requires', 'todo', 'virtual', 'use', - ) - - keyword_declarations = ( - 'def', 'let', 'ref', 'val', 'var', - ) - - keyword_types = ( - 'unit', 'void', 'any', 'bool', - 'byte', 'offset', - 'address', 'caddress', 'cvaddress', 'vaddress', - 'tiny', 'short', 'int', 'long', 'vlong', - 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong', - 'int8', 'int16', 'int32', 'int64', - 'uint8', 'uint16', 'uint32', 'uint64', - 'float', 'double', 'ldouble', - 'complex', 'dcomplex', 'lcomplex', - 'imaginary', 'dimaginary', 'limaginary', - 'char', 'wchar', 'uchar', - 'charp', 'charcp', 'ucharp', 'ucharcp', - 'string', 'wstring', 'ustring', - 'cont', - 'array', 'varray', 'list', - 'lvalue', 'opt', 'slice', - ) - - keyword_constants = ( - 'false', 'true', - ) - - operator_words = ( - 'and', 'not', 'in', 'is', 'isin', 'or', 'xor', - ) - - name_builtins = ( - '_svc', 'while', - ) - - name_pseudo = ( - 'root', 'self', 'this', - ) - - decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?' - - tokens = { - 'root': [ - include('whitespace'), - - # Keywords - (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce', - 'union'), suffix=r'\b'), - Keyword, 'funcname'), - (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'), - Keyword, 'classname'), - (r'(instance|module|typeclass)\b', Keyword, 'modulename'), - - (words(keywords, suffix=r'\b'), Keyword), - (words(keyword_directives, suffix=r'\b'), Name.Decorator), - (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration), - (words(keyword_types, suffix=r'\b'), Keyword.Type), - (words(keyword_constants, suffix=r'\b'), Keyword.Constant), - - # Operators - include('operators'), - - # Float Literal - # -- Hex Float - (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)' - r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float), - # -- DecimalFloat - (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|' - r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float), - (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?', - Number.Float), - - # IntegerLiteral - # -- Binary - (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin), - # -- Octal - (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct), - # -- Hexadecimal - (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex), - # -- Decimal - (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer), - - # Strings - ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'), - ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'), - ('([rR][cC]?|[cC][rR])"', String, 'dqs'), - ("([rR][cC]?|[cC][rR])'", String, 'sqs'), - ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')), - ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')), - ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')), - ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')), - - # Punctuation - (r'[\[\]{}:(),;?]', Punctuation), - - # Labels - (r'[a-zA-Z_]\w*:>', Name.Label), - - # Identifiers - (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin), - (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo), - (r'[a-zA-Z_]\w*', Name), - ], - 'whitespace': [ - (r'\n', Text), - (r'\s+', Text), - - include('comment'), - - # Preprocessor - (r'#\s*if\s+0', Comment.Preproc, 'if0'), - (r'#', Comment.Preproc, 'macro'), - ], - 'operators': [ - (r'(%s)\b' % '|'.join(operator_words), Operator.Word), - (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator), - ], - 'comment': [ - (r'//(.*?)\n', Comment.Single), - (r'/[*]', Comment.Multiline, 'comment2'), - ], - 'comment2': [ - (r'[^\/*]', Comment.Multiline), - (r'/[*]', Comment.Multiline, '#push'), - (r'[*]/', Comment.Multiline, '#pop'), - (r'[\/*]', Comment.Multiline), - ], - 'if0': [ - (r'^\s*#if.*?(?]*?>)', - bygroups(Comment.Preproc, Text, String), '#pop'), - (r'(import|include)(\s+)("[^"]*?")', - bygroups(Comment.Preproc, Text, String), '#pop'), - (r"(import|include)(\s+)('[^']*?')", - bygroups(Comment.Preproc, Text, String), '#pop'), - (r'[^/\n]+', Comment.Preproc), - # (r'/[*](.|\n)*?[*]/', Comment), - # (r'//.*?\n', Comment, '#pop'), - (r'/', Comment.Preproc), - (r'(?<=\\)\n', Comment.Preproc), - (r'\n', Comment.Preproc, '#pop'), - ], - 'funcname': [ - include('whitespace'), - (r'[a-zA-Z_]\w*', Name.Function, '#pop'), - # anonymous functions - (r'(?=\()', Text, '#pop'), - ], - 'classname': [ - include('whitespace'), - (r'[a-zA-Z_]\w*', Name.Class, '#pop'), - # anonymous classes - (r'(?=\{)', Text, '#pop'), - ], - 'modulename': [ - include('whitespace'), - (r'\[', Punctuation, ('modulename2', 'tvarlist')), - default('modulename2'), - ], - 'modulename2': [ - include('whitespace'), - (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'), - ], - 'tvarlist': [ - include('whitespace'), - include('operators'), - (r'\[', Punctuation, '#push'), - (r'\]', Punctuation, '#pop'), - (r',', Punctuation), - (r'(with|where)\b', Keyword), - (r'[a-zA-Z_]\w*', Name), - ], - 'stringescape': [ - (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|' - r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) - ], - 'strings': [ - (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' - '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol), - (r'[^\\\'"%\n]+', String), - # quotes, percents and backslashes must be parsed one at a time - (r'[\'"\\]', String), - # unhandled string formatting sign - (r'%', String) - # newlines are an error (use "nl" state) - ], - 'nl': [ - (r'\n', String) - ], - 'dqs': [ - (r'"', String, '#pop'), - # included here again for raw strings - (r'\\\\|\\"|\\\n', String.Escape), - include('strings') - ], - 'sqs': [ - (r"'", String, '#pop'), - # included here again for raw strings - (r"\\\\|\\'|\\\n", String.Escape), - include('strings') - ], - 'tdqs': [ - (r'"""', String, '#pop'), - include('strings'), - include('nl') - ], - 'tsqs': [ - (r"'''", String, '#pop'), - include('strings'), - include('nl') - ], - } - - -class AdaLexer(RegexLexer): - """ - For Ada source code. - - .. versionadded:: 1.3 - """ - - name = 'Ada' - aliases = ['ada', 'ada95', 'ada2005'] - filenames = ['*.adb', '*.ads', '*.ada'] - mimetypes = ['text/x-ada'] - - flags = re.MULTILINE | re.I # Ignore case - - tokens = { - 'root': [ - (r'[^\S\n]+', Text), - (r'--.*?\n', Comment.Single), - (r'[^\S\n]+', Text), - (r'function|procedure|entry', Keyword.Declaration, 'subprogram'), - (r'(subtype|type)(\s+)([a-z0-9_]+)', - bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'), - (r'task|protected', Keyword.Declaration), - (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)), - (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'), - (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text, - Comment.Preproc)), - (r'(true|false|null)\b', Keyword.Constant), - (words(( - 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor', - 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float', - 'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive', - 'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float', - 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'), - Keyword.Type), - (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word), - (r'generic|private', Keyword.Declaration), - (r'package', Keyword.Declaration, 'package'), - (r'array\b', Keyword.Reserved, 'array_def'), - (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)', - bygroups(Name.Constant, Text, Punctuation, Text, - Keyword.Reserved)), - (r'<<[a-z0-9_]+>>', Name.Label), - (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)', - bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)), - (words(( - 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all', - 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare', - 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry', - 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited', - 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding', - 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue', - 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized', - 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when', - 'while', 'xor'), prefix=r'\b', suffix=r'\b'), - Keyword.Reserved), - (r'"[^"]*"', String), - include('attribute'), - include('numbers'), - (r"'[^']'", String.Character), - (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))), - (r"(<>|=>|:=|[()|:;,.'])", Punctuation), - (r'[*<>+=/&-]', Operator), - (r'\n+', Text), - ], - 'numbers': [ - (r'[0-9_]+#[0-9a-f]+#', Number.Hex), - (r'[0-9_]+\.[0-9_]*', Number.Float), - (r'[0-9_]+', Number.Integer), - ], - 'attribute': [ - (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)), - ], - 'subprogram': [ - (r'\(', Punctuation, ('#pop', 'formal_part')), - (r';', Punctuation, '#pop'), - (r'is\b', Keyword.Reserved, '#pop'), - (r'"[^"]+"|[a-z0-9_]+', Name.Function), - include('root'), - ], - 'end': [ - ('(if|case|record|loop|select)', Keyword.Reserved), - ('"[^"]+"|[\w.]+', Name.Function), - ('\s+', Text), - (';', Punctuation, '#pop'), - ], - 'type_def': [ - (r';', Punctuation, '#pop'), - (r'\(', Punctuation, 'formal_part'), - (r'with|and|use', Keyword.Reserved), - (r'array\b', Keyword.Reserved, ('#pop', 'array_def')), - (r'record\b', Keyword.Reserved, ('record_def')), - (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'), - include('root'), - ], - 'array_def': [ - (r';', Punctuation, '#pop'), - (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text, - Keyword.Reserved)), - include('root'), - ], - 'record_def': [ - (r'end record', Keyword.Reserved, '#pop'), - include('root'), - ], - 'import': [ - (r'[a-z0-9_.]+', Name.Namespace, '#pop'), - default('#pop'), - ], - 'formal_part': [ - (r'\)', Punctuation, '#pop'), - (r'[a-z0-9_]+', Name.Variable), - (r',|:[^=]', Punctuation), - (r'(in|not|null|out|access)\b', Keyword.Reserved), - include('root'), - ], - 'package': [ - ('body', Keyword.Declaration), - ('is\s+new|renames', Keyword.Reserved), - ('is', Keyword.Reserved, '#pop'), - (';', Punctuation, '#pop'), - ('\(', Punctuation, 'package_instantiation'), - ('([\w.]+)', Name.Class), - include('root'), - ], - 'package_instantiation': [ - (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable, - Text, Punctuation)), - (r'[a-z0-9._\'"]', Text), - (r'\)', Punctuation, '#pop'), - include('root'), - ], - } - - -class Modula2Lexer(RegexLexer): - """ - For `Modula-2 `_ source code. - - Additional options that determine which keywords are highlighted: - - `pim` - Select PIM Modula-2 dialect (default: True). - `iso` - Select ISO Modula-2 dialect (default: False). - `objm2` - Select Objective Modula-2 dialect (default: False). - `gm2ext` - Also highlight GNU extensions (default: False). - - .. versionadded:: 1.3 - """ - name = 'Modula-2' - aliases = ['modula2', 'm2'] - filenames = ['*.def', '*.mod'] - mimetypes = ['text/x-modula2'] - - flags = re.MULTILINE | re.DOTALL - - tokens = { - 'whitespace': [ - (r'\n+', Text), # blank lines - (r'\s+', Text), # whitespace - ], - 'identifiers': [ - (r'([a-zA-Z_\$][\w\$]*)', Name), - ], - 'numliterals': [ - (r'[01]+B', Number.Bin), # binary number (ObjM2) - (r'[0-7]+B', Number.Oct), # octal number (PIM + ISO) - (r'[0-7]+C', Number.Oct), # char code (PIM + ISO) - (r'[0-9A-F]+C', Number.Hex), # char code (ObjM2) - (r'[0-9A-F]+H', Number.Hex), # hexadecimal number - (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number - (r'[0-9]+\.[0-9]+', Number.Float), # real number - (r'[0-9]+', Number.Integer), # decimal whole number - ], - 'strings': [ - (r"'(\\\\|\\'|[^'])*'", String), # single quoted string - (r'"(\\\\|\\"|[^"])*"', String), # double quoted string - ], - 'operators': [ - (r'[*/+=#~&<>\^-]', Operator), - (r':=', Operator), # assignment - (r'@', Operator), # pointer deref (ISO) - (r'\.\.', Operator), # ellipsis or range - (r'`', Operator), # Smalltalk message (ObjM2) - (r'::', Operator), # type conversion (ObjM2) - ], - 'punctuation': [ - (r'[\(\)\[\]{},.:;|]', Punctuation), - ], - 'comments': [ - (r'//.*?\n', Comment.Single), # ObjM2 - (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2 - (r'\(\*([^\$].*?)\*\)', Comment.Multiline), - # TO DO: nesting of (* ... *) comments - ], - 'pragmas': [ - (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM - (r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2 - ], - 'root': [ - include('whitespace'), - include('comments'), - include('pragmas'), - include('identifiers'), - include('numliterals'), - include('strings'), - include('operators'), - include('punctuation'), - ] - } - - pim_reserved_words = [ - # 40 reserved words - 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', - 'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR', - 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', - 'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED', - 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', - 'UNTIL', 'VAR', 'WHILE', 'WITH', - ] - - pim_pervasives = [ - # 31 pervasives - 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC', - 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL', - 'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD', - 'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL', - ] - - iso_reserved_words = [ - # 46 reserved words - 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV', - 'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY', - 'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', - 'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER', - 'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY', - 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE', - 'WITH', - ] - - iso_pervasives = [ - # 42 pervasives - 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX', - 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', - 'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH', - 'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', - 'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE', - 'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL', - ] - - objm2_reserved_words = [ - # base language, 42 reserved words - 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV', - 'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF', - 'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD', - 'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE', - 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', - 'UNTIL', 'VAR', 'VARIADIC', 'WHILE', - # OO extensions, 16 reserved words - 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD', - 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC', - 'SUPER', 'TRY', - ] - - objm2_pervasives = [ - # base language, 38 pervasives - 'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE', - 'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD', - 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL', - 'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX', - 'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF', - # OO extensions, 3 pervasives - 'OBJECT', 'NO', 'YES', - ] - - gnu_reserved_words = [ - # 10 additional reserved words - 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__', - '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE', - ] - - gnu_pervasives = [ - # 21 identifiers, actually from pseudo-module SYSTEM - # but we will highlight them as if they were pervasives - 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16', - 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96', - 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64', - 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW', - ] - - def __init__(self, **options): - self.reserved_words = set() - self.pervasives = set() - # ISO Modula-2 - if get_bool_opt(options, 'iso', False): - self.reserved_words.update(self.iso_reserved_words) - self.pervasives.update(self.iso_pervasives) - # Objective Modula-2 - elif get_bool_opt(options, 'objm2', False): - self.reserved_words.update(self.objm2_reserved_words) - self.pervasives.update(self.objm2_pervasives) - # PIM Modula-2 (DEFAULT) - else: - self.reserved_words.update(self.pim_reserved_words) - self.pervasives.update(self.pim_pervasives) - # GNU extensions - if get_bool_opt(options, 'gm2ext', False): - self.reserved_words.update(self.gnu_reserved_words) - self.pervasives.update(self.gnu_pervasives) - # initialise - RegexLexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): - # check for reserved words and pervasives - if token is Name: - if value in self.reserved_words: - token = Keyword.Reserved - elif value in self.pervasives: - token = Keyword.Pervasive - # return result - yield index, token, value - - -class BlitzMaxLexer(RegexLexer): - """ - For `BlitzMax `_ source code. - - .. versionadded:: 1.4 - """ - - name = 'BlitzMax' - aliases = ['blitzmax', 'bmax'] - filenames = ['*.bmx'] - mimetypes = ['text/x-bmx'] - - bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b' - bmax_sktypes = r'@{1,2}|[!#$%]' - bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b' - bmax_name = r'[a-z_]\w*' - bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)' - r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \ - (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) - bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])' - - flags = re.MULTILINE | re.IGNORECASE - tokens = { - 'root': [ - # Text - (r'[ \t]+', Text), - (r'\.\.\n', Text), # Line continuation - # Comments - (r"'.*?\n", Comment.Single), - (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline), - # Data types - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]*(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-f]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Other - (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' % - (bmax_vopwords), Operator), - (r'[(),.:\[\]]', Punctuation), - (r'(?:#[\w \t]*)', Name.Label), - (r'(?:\?[\w \t]*)', Comment.Preproc), - # Identifiers - (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name), - bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)), - (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' % - (bmax_name, bmax_name), - bygroups(Keyword.Reserved, Text, Keyword.Namespace)), - (bmax_func, bygroups(Name.Function, Text, Keyword.Type, - Operator, Text, Punctuation, Text, - Keyword.Type, Name.Class, Text, - Keyword.Type, Text, Punctuation)), - (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator, - Text, Punctuation, Text, Keyword.Type, - Name.Class, Text, Keyword.Type)), - (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - # Keywords - (r'\b(Ptr)\b', Keyword.Type), - (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant), - (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration), - (words(( - 'TNullMethodException', 'TNullFunctionException', - 'TNullObjectException', 'TArrayBoundsException', - 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception), - (words(( - 'Strict', 'SuperStrict', 'Module', 'ModuleInfo', - 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private', - 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max', - 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen', - 'Framework', 'Include', 'Import', 'Extern', 'EndExtern', - 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod', - 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', - 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile', - 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect', - 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData', - 'RestoreData'), prefix=r'\b', suffix=r'\b'), - Keyword.Reserved), - # Final resolve (for variable names and such) - (r'(%s)' % (bmax_name), Name.Variable), - ], - 'string': [ - (r'""', String.Double), - (r'"C?', String.Double, '#pop'), - (r'[^"]+', String.Double), - ], - } - - -class BlitzBasicLexer(RegexLexer): - """ - For `BlitzBasic `_ source code. - - .. versionadded:: 2.0 - """ - - name = 'BlitzBasic' - aliases = ['blitzbasic', 'b3d', 'bplus'] - filenames = ['*.bb', '*.decls'] - mimetypes = ['text/x-bb'] - - bb_sktypes = r'@{1,2}|[#$%]' - bb_name = r'[a-z]\w*' - bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \ - (bb_name, bb_sktypes, bb_name) - - flags = re.MULTILINE | re.IGNORECASE - tokens = { - 'root': [ - # Text - (r'[ \t]+', Text), - # Comments - (r";.*?\n", Comment.Single), - # Data types - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]+(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-f]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Other - (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not', - 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str', - 'First', 'Last', 'Before', 'After'), - prefix=r'\b', suffix=r'\b'), - Operator), - (r'([+\-*/~=<>^])', Operator), - (r'[(),:\[\]\\]', Punctuation), - (r'\.([ \t]*)(%s)' % bb_name, Name.Label), - # Identifiers - (r'\b(New)\b([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Label)), - (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name), - bygroups(Operator, Text, Punctuation, Text, Name.Class)), - (r'\b%s\b([ \t]*)(\()' % bb_var, - bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation, - Text, Name.Class, Text, Punctuation)), - (r'\b(Function)\b([ \t]+)%s' % bb_var, - bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type, - Text, Punctuation, Text, Name.Class)), - (r'\b(Type)([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - # Keywords - (r'\b(Pi|True|False|Null)\b', Keyword.Constant), - (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration), - (words(( - 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert', - 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', - 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend', - 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', - 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'), - Keyword.Reserved), - # Final resolve (for variable names and such) - # (r'(%s)' % (bb_name), Name.Variable), - (bb_var, bygroups(Name.Variable, Text, Keyword.Type, - Text, Punctuation, Text, Name.Class)), - ], - 'string': [ - (r'""', String.Double), - (r'"C?', String.Double, '#pop'), - (r'[^"]+', String.Double), - ], - } - - -class NimrodLexer(RegexLexer): - """ - For `Nimrod `_ source code. - - .. versionadded:: 1.5 - """ - - name = 'Nimrod' - aliases = ['nimrod', 'nim'] - filenames = ['*.nim', '*.nimrod'] - mimetypes = ['text/x-nimrod'] - - flags = re.MULTILINE | re.IGNORECASE | re.UNICODE - - def underscorize(words): - newWords = [] - new = "" - for word in words: - for ch in word: - new += (ch + "_?") - newWords.append(new) - new = "" - return "|".join(newWords) - - keywords = [ - 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', - 'case', 'cast', 'const', 'continue', 'converter', 'discard', - 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally', - 'for', 'generic', 'if', 'implies', 'in', 'yield', - 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method', - 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', - 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try', - 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor' - ] - - keywordsPseudo = [ - 'nil', 'true', 'false' - ] - - opWords = [ - 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in', - 'notin', 'is', 'isnot' - ] - - types = [ - 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64', - 'bool', 'char', 'range', 'array', 'seq', 'set', 'string' - ] - - tokens = { - 'root': [ - (r'##.*$', String.Doc), - (r'#.*$', Comment), - (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator), - (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;', - Punctuation), - - # Strings - (r'(?:[\w]+)"', String, 'rdqs'), - (r'"""', String, 'tdqs'), - ('"', String, 'dqs'), - - # Char - ("'", String.Char, 'chars'), - - # Keywords - (r'(%s)\b' % underscorize(opWords), Operator.Word), - (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'), - (r'(%s)\b' % underscorize(keywords), Keyword), - (r'(%s)\b' % underscorize(['from', 'import', 'include']), - Keyword.Namespace), - (r'(v_?a_?r)\b', Keyword.Declaration), - (r'(%s)\b' % underscorize(types), Keyword.Type), - (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo), - # Identifiers - (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name), - # Numbers - (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))', - Number.Float, ('float-suffix', 'float-number')), - (r'0[xX][a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'), - (r'0[bB][01][01_]*', Number.Bin, 'int-suffix'), - (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'), - (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'), - # Whitespace - (r'\s+', Text), - (r'.+$', Error), - ], - 'chars': [ - (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape), - (r"'", String.Char, '#pop'), - (r".", String.Char) - ], - 'strings': [ - (r'(?\?]*?', - ) - ) - - tokens = { - 'comments': [ - (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline - (r'//.*?\n', Comment.Single), # Single line - # TODO: highlight references in fandocs - (r'\*\*.*?\n', Comment.Special), # Fandoc - (r'#.*\n', Comment.Single) # Shell-style - ], - 'literals': [ - (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration - (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), - # Duration with dot - (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal - (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex - (r'\b-?[\d_]+', Number.Integer), # Int - (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char - (r'"', Punctuation, 'insideStr'), # Opening quote - (r'`', Punctuation, 'insideUri'), # Opening accent - (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null - (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL - bygroups(Name.Namespace, Punctuation, Name.Class, - Punctuation, String, Punctuation)), - (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal - bygroups(Name.Namespace, Punctuation, Name.Class, - Punctuation, Name.Function)), - (r'\[,\]', Literal), # Empty list - (s(r'($type)(\[,\])'), # Typed empty list - bygroups(using(this, state='inType'), Literal)), - (r'\[:\]', Literal), # Empty Map - (s(r'($type)(\[:\])'), - bygroups(using(this, state='inType'), Literal)), - ], - 'insideStr': [ - (r'\\\\', String.Escape), # Escaped backslash - (r'\\"', String.Escape), # Escaped " - (r'\\`', String.Escape), # Escaped ` - (r'\$\w+', String.Interpol), # Subst var - (r'\${.*?}', String.Interpol), # Subst expr - (r'"', Punctuation, '#pop'), # Closing quot - (r'.', String) # String content - ], - 'insideUri': [ # TODO: remove copy/paste str/uri - (r'\\\\', String.Escape), # Escaped backslash - (r'\\"', String.Escape), # Escaped " - (r'\\`', String.Escape), # Escaped ` - (r'\$\w+', String.Interpol), # Subst var - (r'\${.*?}', String.Interpol), # Subst expr - (r'`', Punctuation, '#pop'), # Closing tick - (r'.', String.Backtick) # URI content - ], - 'protectionKeywords': [ - (r'\b(public|protected|private|internal)\b', Keyword), - ], - 'typeKeywords': [ - (r'\b(abstract|final|const|native|facet|enum)\b', Keyword), - ], - 'methodKeywords': [ - (r'\b(abstract|native|once|override|static|virtual|final)\b', - Keyword), - ], - 'fieldKeywords': [ - (r'\b(abstract|const|final|native|override|static|virtual|' - r'readonly)\b', Keyword) - ], - 'otherKeywords': [ - (words(( - 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while', - 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue', - 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'), - Keyword), - (r'\b(it|this|super)\b', Name.Builtin.Pseudo), - ], - 'operators': [ - (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator) - ], - 'inType': [ - (r'[\[\]\|\->:\?]', Punctuation), - (s(r'$id'), Name.Class), - default('#pop'), - - ], - 'root': [ - include('comments'), - include('protectionKeywords'), - include('typeKeywords'), - include('methodKeywords'), - include('fieldKeywords'), - include('literals'), - include('otherKeywords'), - include('operators'), - (r'using\b', Keyword.Namespace, 'using'), # Using stmt - (r'@\w+', Name.Decorator, 'facet'), # Symbol - (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class), - 'inheritance'), # Inheritance list - - ### Type var := val - (s(r'($type)([ \t]+)($id)(\s*)(:=)'), - bygroups(using(this, state = 'inType'), Text, - Name.Variable, Text, Operator)), - - ### var := val - (s(r'($id)(\s*)(:=)'), - bygroups(Name.Variable, Text, Operator)), - - ### .someId( or ->someId( ### - (s(r'(\.|(?:\->))($id)(\s*)(\()'), - bygroups(Operator, Name.Function, Text, Punctuation), - 'insideParen'), - - ### .someId or ->someId - (s(r'(\.|(?:\->))($id)'), - bygroups(Operator, Name.Function)), - - ### new makeXXX ( #### - (r'(new)(\s+)(make\w*)(\s*)(\()', - bygroups(Keyword, Text, Name.Function, Text, Punctuation), - 'insideMethodDeclArgs'), - - ### Type name ( #### - (s(r'($type)([ \t]+)' # Return type and whitespace - r'($id)(\s*)(\()'), # method name + open brace - bygroups(using(this, state = 'inType'), Text, - Name.Function, Text, Punctuation), - 'insideMethodDeclArgs'), - - ### ArgType argName, ##### - (s(r'($type)(\s+)($id)(\s*)(,)'), - bygroups(using(this, state='inType'), Text, Name.Variable, - Text, Punctuation)), - - #### ArgType argName) #### - ## Covered in 'insideParen' state - - ### ArgType argName -> ArgType| ### - (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'), - bygroups(using(this, state='inType'), Text, Name.Variable, - Text, Punctuation, Text, using(this, state = 'inType'), - Punctuation)), - - ### ArgType argName| ### - (s(r'($type)(\s+)($id)(\s*)(\|)'), - bygroups(using(this, state='inType'), Text, Name.Variable, - Text, Punctuation)), - - ### Type var - (s(r'($type)([ \t]+)($id)'), - bygroups(using(this, state='inType'), Text, - Name.Variable)), - - (r'\(', Punctuation, 'insideParen'), - (r'\{', Punctuation, 'insideBrace'), - (r'.', Text) - ], - 'insideParen': [ - (r'\)', Punctuation, '#pop'), - include('root'), - ], - 'insideMethodDeclArgs': [ - (r'\)', Punctuation, '#pop'), - (s(r'($type)(\s+)($id)(\s*)(\))'), - bygroups(using(this, state='inType'), Text, Name.Variable, - Text, Punctuation), '#pop'), - include('root'), - ], - 'insideBrace': [ - (r'\}', Punctuation, '#pop'), - include('root'), - ], - 'inheritance': [ - (r'\s+', Text), # Whitespace - (r':|,', Punctuation), - (r'(?:(\w+)(::))?(\w+)', - bygroups(Name.Namespace, Punctuation, Name.Class)), - (r'{', Punctuation, '#pop') - ], - 'using': [ - (r'[ \t]+', Text), # consume whitespaces - (r'(\[)(\w+)(\])', - bygroups(Punctuation, Comment.Special, Punctuation)), # ffi - (r'(\")?([\w\.]+)(\")?', - bygroups(Punctuation, Name.Namespace, Punctuation)), # podname - (r'::', Punctuation, 'usingClass'), - default('#pop') - ], - 'usingClass': [ - (r'[ \t]+', Text), # consume whitespaces - (r'(as)(\s+)(\w+)', - bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'), - (r'[\w\$]+', Name.Class), - default('#pop:2') # jump out to root state - ], - 'facet': [ - (r'\s+', Text), - (r'{', Punctuation, 'facetFields'), - default('#pop') - ], - 'facetFields': [ - include('comments'), - include('literals'), - include('operators'), - (r'\s+', Text), - (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)), - (r'}', Punctuation, '#pop'), - (r'.', Text) - ], - } - - -class RustLexer(RegexLexer): - """ - Lexer for the Rust programming language (version 0.9). - - .. versionadded:: 1.6 - """ - name = 'Rust' - filenames = ['*.rs'] - aliases = ['rust'] - mimetypes = ['text/x-rustsrc'] - - tokens = { - 'root': [ - # Whitespace and Comments - (r'\n', Text), - (r'\s+', Text), - (r'//[/!](.*?)\n', Comment.Doc), - (r'//(.*?)\n', Comment.Single), - (r'/\*', Comment.Multiline, 'comment'), - - # Keywords - (words(( - 'as', 'box', 'break', 'continue', 'do', 'else', 'enum', 'extern', - 'fn', 'for', 'if', 'impl', 'in', 'loop', 'match', 'mut', 'priv', - 'proc', 'pub', 'ref', 'return', 'static', '\'static', 'struct', - 'trait', 'true', 'type', 'unsafe', 'while'), suffix=r'\b'), - Keyword), - (words(('alignof', 'be', 'const', 'offsetof', 'pure', 'sizeof', - 'typeof', 'once', 'unsized', 'yield'), suffix=r'\b'), - Keyword.Reserved), - (r'(mod|use)\b', Keyword.Namespace), - (r'(true|false)\b', Keyword.Constant), - (r'let\b', Keyword.Declaration), - (words(('u8', 'u16', 'u32', 'u64', 'i8', 'i16', 'i32', 'i64', 'uint', - 'int', 'f32', 'f64', 'str', 'bool'), suffix=r'\b'), - Keyword.Type), - (r'self\b', Name.Builtin.Pseudo), - # Prelude - (words(( - 'Freeze', 'Pod', 'Send', 'Sized', 'Add', 'Sub', 'Mul', 'Div', 'Rem', 'Neg', 'Not', 'BitAnd', - 'BitOr', 'BitXor', 'Drop', 'Shl', 'Shr', 'Index', 'Option', 'Some', 'None', 'Result', - 'Ok', 'Err', 'from_str', 'range', 'print', 'println', 'Any', 'AnyOwnExt', 'AnyRefExt', - 'AnyMutRefExt', 'Ascii', 'AsciiCast', 'OnwedAsciiCast', 'AsciiStr', - 'IntoBytes', 'Bool', 'ToCStr', 'Char', 'Clone', 'DeepClone', 'Eq', 'ApproxEq', - 'Ord', 'TotalEq', 'Ordering', 'Less', 'Equal', 'Greater', 'Equiv', 'Container', - 'Mutable', 'Map', 'MutableMap', 'Set', 'MutableSet', 'Default', 'FromStr', - 'Hash', 'FromIterator', 'Extendable', 'Iterator', 'DoubleEndedIterator', - 'RandomAccessIterator', 'CloneableIterator', 'OrdIterator', - 'MutableDoubleEndedIterator', 'ExactSize', 'Times', 'Algebraic', - 'Trigonometric', 'Exponential', 'Hyperbolic', 'Bitwise', 'BitCount', - 'Bounded', 'Integer', 'Fractional', 'Real', 'RealExt', 'Num', 'NumCast', - 'CheckedAdd', 'CheckedSub', 'CheckedMul', 'Orderable', 'Signed', - 'Unsigned', 'Round', 'Primitive', 'Int', 'Float', 'ToStrRadix', - 'ToPrimitive', 'FromPrimitive', 'GenericPath', 'Path', 'PosixPath', - 'WindowsPath', 'RawPtr', 'Buffer', 'Writer', 'Reader', 'Seek', - 'SendStr', 'SendStrOwned', 'SendStrStatic', 'IntoSendStr', 'Str', - 'StrVector', 'StrSlice', 'OwnedStr', 'IterBytes', 'ToStr', 'IntoStr', - 'CopyableTuple', 'ImmutableTuple', 'ImmutableEqVector', 'ImmutableTotalOrdVector', - 'ImmutableCopyableVector', 'OwnedVector', 'OwnedCopyableVector', - 'OwnedEqVector', 'MutableVector', 'MutableTotalOrdVector', - 'Vector', 'VectorVector', 'CopyableVector', 'ImmutableVector', - 'Port', 'Chan', 'SharedChan', 'spawn', 'drop'), suffix=r'\b'), - Name.Builtin), - (r'(ImmutableTuple\d+|Tuple\d+)\b', Name.Builtin), - # Borrowed pointer - (r'(&)(\'[A-Za-z_]\w*)?', bygroups(Operator, Name)), - # Labels - (r'\'[A-Za-z_]\w*:', Name.Label), - # Character Literal - (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" - r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""", - String.Char), - # Lifetime - (r"""'[a-zA-Z_]\w*""", Name.Label), - # Binary Literal - (r'0b[01_]+', Number.Bin, 'number_lit'), - # Octal Literal - (r'0o[0-7_]+', Number.Oct, 'number_lit'), - # Hexadecimal Literal - (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'), - # Decimal Literal - (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|' - r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float, 'number_lit'), - (r'[0-9][0-9_]*', Number.Integer, 'number_lit'), - # String Literal - (r'"', String, 'string'), - (r'r(#*)".*?"\1', String.Raw), - - # Operators and Punctuation - (r'[{}()\[\],.;]', Punctuation), - (r'[+\-*/%&|<>^!~@=:?]', Operator), - - # Identifier - (r'[a-zA-Z_]\w*', Name), - - # Attributes - (r'#\[', Comment.Preproc, 'attribute['), - # Macros - (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\s*\{', - bygroups(Comment.Preproc, Name), 'macro{'), - (r'([A-Za-z_]\w*)!\s*([A-Za-z_]\w*)?\(', - bygroups(Comment.Preproc, Name), 'macro('), - ], - 'comment': [ - (r'[^*/]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline), - ], - 'number_lit': [ - (r'(([ui](8|16|32|64)?)|(f(32|64)?))?', Keyword, '#pop'), - ], - 'string': [ - (r'"', String, '#pop'), - (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}""" - r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape), - (r'[^\\"]+', String), - (r'\\', String), - ], - 'macro{': [ - (r'\{', Operator, '#push'), - (r'\}', Operator, '#pop'), - ], - 'macro(': [ - (r'\(', Operator, '#push'), - (r'\)', Operator, '#pop'), - ], - 'attribute_common': [ - (r'"', String, 'string'), - (r'\[', Comment.Preproc, 'attribute['), - (r'\(', Comment.Preproc, 'attribute('), - ], - 'attribute[': [ - include('attribute_common'), - (r'\];?', Comment.Preproc, '#pop'), - (r'[^"\]]+', Comment.Preproc), - ], - 'attribute(': [ - include('attribute_common'), - (r'\);?', Comment.Preproc, '#pop'), - (r'[^"\)]+', Comment.Preproc), - ], - } - - -class CudaLexer(CLexer): - """ - For NVIDIA `CUDA™ `_ - source. - - .. versionadded:: 1.6 - """ - name = 'CUDA' - filenames = ['*.cu', '*.cuh'] - aliases = ['cuda', 'cu'] - mimetypes = ['text/x-cuda'] - - function_qualifiers = set(('__device__', '__global__', '__host__', - '__noinline__', '__forceinline__')) - variable_qualifiers = set(('__device__', '__constant__', '__shared__', - '__restrict__')) - vector_types = set(('char1', 'uchar1', 'char2', 'uchar2', 'char3', 'uchar3', - 'char4', 'uchar4', 'short1', 'ushort1', 'short2', 'ushort2', - 'short3', 'ushort3', 'short4', 'ushort4', 'int1', 'uint1', - 'int2', 'uint2', 'int3', 'uint3', 'int4', 'uint4', 'long1', - 'ulong1', 'long2', 'ulong2', 'long3', 'ulong3', 'long4', - 'ulong4', 'longlong1', 'ulonglong1', 'longlong2', - 'ulonglong2', 'float1', 'float2', 'float3', 'float4', - 'double1', 'double2', 'dim3')) - variables = set(('gridDim', 'blockIdx', 'blockDim', 'threadIdx', 'warpSize')) - functions = set(('__threadfence_block', '__threadfence', '__threadfence_system', - '__syncthreads', '__syncthreads_count', '__syncthreads_and', - '__syncthreads_or')) - execution_confs = set(('<<<', '>>>')) - - def get_tokens_unprocessed(self, text): - for index, token, value in CLexer.get_tokens_unprocessed(self, text): - if token is Name: - if value in self.variable_qualifiers: - token = Keyword.Type - elif value in self.vector_types: - token = Keyword.Type - elif value in self.variables: - token = Name.Builtin - elif value in self.execution_confs: - token = Keyword.Pseudo - elif value in self.function_qualifiers: - token = Keyword.Reserved - elif value in self.functions: - token = Name.Function - yield index, token, value - - -class MonkeyLexer(RegexLexer): - """ - For - `Monkey `_ - source code. - - .. versionadded:: 1.6 - """ - - name = 'Monkey' - aliases = ['monkey'] - filenames = ['*.monkey'] - mimetypes = ['text/x-monkey'] - - name_variable = r'[a-z_]\w*' - name_function = r'[A-Z]\w*' - name_constant = r'[A-Z_][A-Z0-9_]*' - name_class = r'[A-Z]\w*' - name_module = r'[a-z0-9_]*' - - keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)' - # ? == Bool // % == Int // # == Float // $ == String - keyword_type_special = r'[?%#$]' - - flags = re.MULTILINE - - tokens = { - 'root': [ - # Text - (r'\s+', Text), - # Comments - (r"'.*", Comment), - (r'(?i)^#rem\b', Comment.Multiline, 'comment'), - # preprocessor directives - (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc), - # preprocessor variable (any line starting with '#' that is not a directive) - (r'^#', Comment.Preproc, 'variables'), - # String - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]+(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-fA-Z]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Native data types - (r'\b%s\b' % keyword_type, Keyword.Type), - # Exception handling - (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved), - (r'Throwable', Name.Exception), - # Builtins - (r'(?i)\b(?:Null|True|False)\b', Name.Builtin), - (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo), - (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant), - # Keywords - (r'(?i)^(Import)(\s+)(.*)(\n)', - bygroups(Keyword.Namespace, Text, Name.Namespace, Text)), - (r'(?i)^Strict\b.*\n', Keyword.Reserved), - (r'(?i)(Const|Local|Global|Field)(\s+)', - bygroups(Keyword.Declaration, Text), 'variables'), - (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)', - bygroups(Keyword.Reserved, Text), 'classname'), - (r'(?i)(Function|Method)(\s+)', - bygroups(Keyword.Reserved, Text), 'funcname'), - (r'(?i)(?:End|Return|Public|Private|Extern|Property|' - r'Final|Abstract)\b', Keyword.Reserved), - # Flow Control stuff - (r'(?i)(?:If|Then|Else|ElseIf|EndIf|' - r'Select|Case|Default|' - r'While|Wend|' - r'Repeat|Until|Forever|' - r'For|To|Until|Step|EachIn|Next|' - r'Exit|Continue)\s+', Keyword.Reserved), - # not used yet - (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved), - # Array - (r'[\[\]]', Punctuation), - # Other - (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator), - (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word), - (r'[\(\){}!#,.:]', Punctuation), - # catch the rest - (r'%s\b' % name_constant, Name.Constant), - (r'%s\b' % name_function, Name.Function), - (r'%s\b' % name_variable, Name.Variable), - ], - 'funcname': [ - (r'(?i)%s\b' % name_function, Name.Function), - (r':', Punctuation, 'classname'), - (r'\s+', Text), - (r'\(', Punctuation, 'variables'), - (r'\)', Punctuation, '#pop') - ], - 'classname': [ - (r'%s\.' % name_module, Name.Namespace), - (r'%s\b' % keyword_type, Keyword.Type), - (r'%s\b' % name_class, Name.Class), - # array (of given size) - (r'(\[)(\s*)(\d*)(\s*)(\])', - bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)), - # generics - (r'\s+(?!<)', Text, '#pop'), - (r'<', Punctuation, '#push'), - (r'>', Punctuation, '#pop'), - (r'\n', Text, '#pop'), - default('#pop') - ], - 'variables': [ - (r'%s\b' % name_constant, Name.Constant), - (r'%s\b' % name_variable, Name.Variable), - (r'%s' % keyword_type_special, Keyword.Type), - (r'\s+', Text), - (r':', Punctuation, 'classname'), - (r',', Punctuation, '#push'), - default('#pop') - ], - 'string': [ - (r'[^"~]+', String.Double), - (r'~q|~n|~r|~t|~z|~~', String.Escape), - (r'"', String.Double, '#pop'), - ], - 'comment': [ - (r'(?i)^#rem.*?', Comment.Multiline, "#push"), - (r'(?i)^#end.*?', Comment.Multiline, "#pop"), - (r'\n', Comment.Multiline), - (r'.+', Comment.Multiline), - ], - } - - -class CobolLexer(RegexLexer): - """ - Lexer for OpenCOBOL code. - - .. versionadded:: 1.6 - """ - name = 'COBOL' - aliases = ['cobol'] - filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY'] - mimetypes = ['text/x-cobol'] - flags = re.IGNORECASE | re.MULTILINE - - # Data Types: by PICTURE and USAGE - # Operators: **, *, +, -, /, <, >, <=, >=, =, <> - # Logical (?): NOT, AND, OR - - # Reserved words: - # http://opencobol.add1tocobol.com/#reserved-words - # Intrinsics: - # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions - - tokens = { - 'root': [ - include('comment'), - include('strings'), - include('core'), - include('nums'), - (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable), - # (r'[\s]+', Text), - (r'[ \t]+', Text), - ], - 'comment': [ - (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment), - ], - 'core': [ - # Figurative constants - (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?' - r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)' - r'\s*($|(?=[^0-9a-z_\-]))', - Name.Constant), - - # Reserved words STATEMENTS and other bolds - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|' - r'CONFIGURATION|CONTINUE|' - r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|' - r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|' - r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|' - r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|' - r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|' - r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|' - r'FREE|GENERATE|GO|GOBACK|' - r'IDENTIFICATION|IF|INITIALIZE|' - r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|' - r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|' - r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|' - r'RETURN|REWRITE|SCREEN|' - r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|' - r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|' - r'WORKING-STORAGE|WRITE)' - r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved), - - # Reserved words - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|' - r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|' - r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE' - r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|' - r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|' - r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|' - r'BLANK|' - r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|' - r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|' - r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|' - r'CONSTANT|CONTAINS|CONTENT|CONTROL|' - r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|' - r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|' - r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|' - r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|' - r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|' - r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|' - r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|' - r'EXCLUSIVE|EXTEND|EXTERNAL|' - r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|' - r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|' - r'FUNCTION-ID|GIVING|GLOBAL|GROUP|' - r'HEADING|HIGHLIGHT|I-O|ID|' - r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|' - r'INITIAL|INITIALIZED|INPUT|' - r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|' - r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|' - r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|' - r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|' - r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|' - r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|' - r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|' - r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|' - r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|' - r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|' - r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|' - r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|' - r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|' - r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|' - r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|' - r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|' - r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|' - r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|' - r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|' - r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|' - r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|' - r'STANDARD-1|STANDARD-2|STATUS|SUM|' - r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|' - r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|' - r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|' - r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|' - r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|' - r'WITH|WORDS|YYYYDDD|YYYYMMDD)' - r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo), - - # inactive reserved words - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|' - r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|' - r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|' - r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|' - r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|' - r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|' - r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|' - r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|' - r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|' - r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|' - r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|' - r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|' - r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|' - r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|' - r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|' - r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|' - r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|' - r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|' - r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error), - - # Data Types - (r'(^|(?<=[^0-9a-z_\-]))' - r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|' - r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|' - r'BINARY-C-LONG|' - r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|' - r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type), - - # Operators - (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator), - - # (r'(::)', Keyword.Declaration), - - (r'([(),;:&%.])', Punctuation), - - # Intrinsics - (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|' - r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|' - r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|' - r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|' - r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|' - r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|' - r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|' - r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|' - r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|' - r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|' - r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|' - r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*' - r'($|(?=[^0-9a-z_\-]))', Name.Function), - - # Booleans - (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin), - # Comparing Operators - (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|' - r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word), - ], - - # \"[^\"\n]*\"|\'[^\'\n]*\' - 'strings': [ - # apparently strings can be delimited by EOL if they are continued - # in the next line - (r'"[^"\n]*("|\n)', String.Double), - (r"'[^'\n]*('|\n)", String.Single), - ], - - 'nums': [ - (r'\d+(\s*|\.$|$)', Number.Integer), - (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), - (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), - ], - } - - -class CobolFreeformatLexer(CobolLexer): - """ - Lexer for Free format OpenCOBOL code. - - .. versionadded:: 1.6 - """ - name = 'COBOLFree' - aliases = ['cobolfree'] - filenames = ['*.cbl', '*.CBL'] - mimetypes = [] - flags = re.IGNORECASE | re.MULTILINE - - tokens = { - 'comment': [ - (r'(\*>.*\n|^\w*\*.*$)', Comment), - ], - } - - -class LogosLexer(ObjectiveCppLexer): - """ - For Logos + Objective-C source code with preprocessor directives. - - .. versionadded:: 1.6 - """ - - name = 'Logos' - aliases = ['logos'] - filenames = ['*.x', '*.xi', '*.xm', '*.xmi'] - mimetypes = ['text/x-logos'] - priority = 0.25 - - tokens = { - 'statements': [ - (r'(%orig|%log)\b', Keyword), - (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))', - bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)), - (r'(%init)\b(\()', - bygroups(Keyword, Punctuation), 'logos_init_directive'), - (r'(%init)(?=\s*;)', bygroups(Keyword)), - (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', - bygroups(Keyword, Text, Name.Class), '#pop'), - (r'(%subclass)(\s+)', bygroups(Keyword, Text), - ('#pop', 'logos_classname')), - inherit, - ], - 'logos_init_directive': [ - ('\s+', Text), - (',', Punctuation, ('logos_init_directive', '#pop')), - ('([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)', - bygroups(Name.Class, Text, Punctuation, Text, Text)), - ('([a-zA-Z$_][\w$]*)', Name.Class), - ('\)', Punctuation, '#pop'), - ], - 'logos_classname': [ - ('([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?', - bygroups(Name.Class, Text, Name.Class), '#pop'), - ('([a-zA-Z$_][\w$]*)', Name.Class, '#pop') - ], - 'root': [ - (r'(%subclass)(\s+)', bygroups(Keyword, Text), - 'logos_classname'), - (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)', - bygroups(Keyword, Text, Name.Class)), - (r'(%config)(\s*\(\s*)(\w+)(\s*=\s*)(.*?)(\s*\)\s*)', - bygroups(Keyword, Text, Name.Variable, Text, String, Text)), - (r'(%ctor)(\s*)({)', bygroups(Keyword, Text, Punctuation), - 'function'), - (r'(%new)(\s*)(\()(\s*.*?\s*)(\))', - bygroups(Keyword, Text, Keyword, String, Keyword)), - (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)), - inherit, - ], - } - - _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()') - - def analyse_text(text): - if LogosLexer._logos_keywords.search(text): - return 1.0 - return 0 - - -class ChapelLexer(RegexLexer): - """ - For `Chapel `_ source. - - .. versionadded:: 2.0 - """ - name = 'Chapel' - filenames = ['*.chpl'] - aliases = ['chapel', 'chpl'] - # mimetypes = ['text/x-chapel'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), - - (r'//(.*?)\n', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - - (r'(config|const|in|inout|out|param|ref|type|var)\b', - Keyword.Declaration), - (r'(false|nil|true)\b', Keyword.Constant), - (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', - Keyword.Type), - (words(( - 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall', - 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum', - 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline', - 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on', - 'otherwise', 'pragma', 'reduce', 'return', 'scan', 'select', - 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use', - 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'), - Keyword), - (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), - (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), - 'classname'), - - # imaginary integers - (r'\d+i', Number), - (r'\d+\.\d*([Ee][-+]\d+)?i', Number), - (r'\.\d+([Ee][-+]\d+)?i', Number), - (r'\d+[Ee][-+]\d+i', Number), - - # reals cannot end with a period due to lexical ambiguity with - # .. operator. See reference for rationale. - (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float), - (r'\d+[eE][+-]?[0-9]+i?', Number.Float), - - # integer literals - # -- binary - (r'0[bB][0-1]+', Number.Bin), - # -- hex - (r'0[xX][0-9a-fA-F]+', Number.Hex), - # -- octal - (r'0[oO][0-7]+', Number.Oct), - # -- decimal - (r'[0-9]+', Number.Integer), - - # strings - (r'["\'](\\\\|\\"|[^"\'])*["\']', String), - - # tokens - (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' - r'<=>|<~>|\.\.|by|#|\.\.\.|' - r'&&|\|\||!|&|\||\^|~|<<|>>|' - r'==|!=|<=|>=|<|>|' - r'[+\-*/%]|\*\*)', Operator), - (r'[:;,.?()\[\]{}]', Punctuation), - - # identifiers - (r'[a-zA-Z_][\w$]*', Name.Other), - ], - 'classname': [ - (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'), - ], - 'procname': [ - (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'), - ], - } - - -class EiffelLexer(RegexLexer): - """ - For `Eiffel `_ source code. - - .. versionadded:: 2.0 - """ - name = 'Eiffel' - aliases = ['eiffel'] - filenames = ['*.e'] - mimetypes = ['text/x-eiffel'] - - tokens = { - 'root': [ - (r'[^\S\n]+', Text), - (r'--.*?\n', Comment.Single), - (r'[^\S\n]+', Text), - # Please note that keyword and operator are case insensitive. - (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant), - (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word), - (words(( - 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached', - 'attribute', 'check', 'class', 'convert', 'create', 'debug', - 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure', - 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if', - 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none', - 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename', - 'require', 'rescue', 'retry', 'select', 'separate', 'then', - 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'), - Keyword.Reserved), - (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String), - (r'"([^"%\n]|%.)*?"', String), - include('numbers'), - (r"'([^'%]|%'|%%)'", String.Char), - (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\\?!#%&@|+/\-=\>\*$<|^\[\]])", Operator), - (r"([{}():;,.])", Punctuation), - (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name), - (r'([A-Z][A-Z0-9_]*)', Name.Class), - (r'\n+', Text), - ], - 'numbers': [ - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'0[bB][0-1]+', Number.Bin), - (r'0[cC][0-7]+', Number.Oct), - (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float), - (r'[0-9]+', Number.Integer), - ], - } - - -class Inform6Lexer(RegexLexer): - """ - For `Inform 6 `_ source code. - - .. versionadded:: 2.0 - """ - - name = 'Inform 6' - aliases = ['inform6', 'i6'] - filenames = ['*.inf'] - - flags = re.MULTILINE | re.DOTALL | re.UNICODE - - _name = r'[a-zA-Z_][a-zA-Z_0-9]*' - - # Inform 7 maps these four character classes to their ASCII - # equivalents. To support Inform 6 inclusions within Inform 7, - # Inform6Lexer maps them too. - _dash = u'\\-\u2010-\u2014' - _dquote = u'"\u201c\u201d' - _squote = u"'\u2018\u2019" - _newline = u'\\n\u0085\u2028\u2029' - - tokens = { - 'root': [ - (r'(\A(!%%[^%s]*[%s])+)?' % (_newline, _newline), Comment.Preproc, - 'directive') - ], - '_whitespace': [ - (r'\s+', Text), - (r'![^%s]*' % _newline, Comment.Single) - ], - 'default': [ - include('_whitespace'), - (r'\[', Punctuation, 'many-values'), # Array initialization - (r':|(?=;)', Punctuation, '#pop'), - (r'<', Punctuation), # Second angle bracket in an action statement - default(('expression', '_expression')) - ], - - # Expressions - '_expression': [ - include('_whitespace'), - (r'(?=sp\b)', Text, '#pop'), - (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text, - ('#pop', 'value')), - (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator), - (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop') - ], - 'expression': [ - include('_whitespace'), - (r'\(', Punctuation, ('expression', '_expression')), - (r'\)', Punctuation, '#pop'), - (r'\[', Punctuation, ('#pop', 'statements', 'locals')), - (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation), - (r'\+\+|[%s]{2}(?!>)' % _dash, Operator), - (r',', Punctuation, '_expression'), - (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash, - Operator, '_expression'), - (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word, - '_expression'), - (r'sp\b', Name), - (r'\?~?', Name.Label, 'label?'), - (r'[@{]', Error), - default('#pop') - ], - '_assembly-expression': [ - (r'\(', Punctuation, ('#push', '_expression')), - (r'[\[\]]', Punctuation), - (r'[%s]>' % _dash, Punctuation, '_expression'), - (r'sp\b', Keyword.Pseudo), - (r';', Punctuation, '#pop:3'), - include('expression') - ], - '_for-expression': [ - (r'\)', Punctuation, '#pop:2'), - (r':', Punctuation, '#pop'), - include('expression') - ], - '_keyword-expression': [ - (r'(from|near|to)\b', Keyword, '_expression'), - include('expression') - ], - '_list-expression': [ - (r',', Punctuation, '#pop'), - include('expression') - ], - '_object-expression': [ - (r'has\b', Keyword.Declaration, '#pop'), - include('_list-expression') - ], - - # Values - 'value': [ - include('_whitespace'), - # Strings - (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'), - (r'([%s])(@{[0-9a-fA-F]{1,4}})([%s])' % (_squote, _squote), - bygroups(String.Char, String.Escape, String.Char), '#pop'), - (r'([%s])(@..)([%s])' % (_squote, _squote), - bygroups(String.Char, String.Escape, String.Char), '#pop'), - (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')), - (r'[%s]' % _dquote, String.Double, ('#pop', 'string')), - # Numbers - (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash), - Number.Float, '#pop'), - (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'), - (r'\$\$[01]+', Number.Bin, '#pop'), - (r'[0-9]+', Number.Integer, '#pop'), - # Values prefixed by hashes - (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'), - (r'(#g\$)(%s)' % _name, - bygroups(Operator, Name.Variable.Global), '#pop'), - (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')), - (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'), - (r'#', Name.Builtin, ('#pop', 'system-constant')), - # System functions - (words(( - 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass', - 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'), - Name.Builtin, '#pop'), - # Metaclasses - (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'), - # Veneer routines - (words(( - 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms', - 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String', - 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__', - 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr', - 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process', - 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA', - 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR', - 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr', - 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'), - prefix='(?i)', suffix=r'\b'), - Name.Builtin, '#pop'), - # Other built-in symbols - (words(( - 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE', - 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false', - 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN', - 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT', - 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START', - 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX', - 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print', - 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE', - 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag', - 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3', - 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'), - prefix='(?i)', suffix=r'\b'), - Name.Builtin, '#pop'), - # Other values - (_name, Name, '#pop') - ], - # Strings - 'dictionary-word': [ - (r'[~^]+', String.Escape), - (r'[^~^\\@({%s]+' % _squote, String.Single), - (r'[({]', String.Single), - (r'@{[0-9a-fA-F]{,4}}', String.Escape), - (r'@..', String.Escape), - (r'[%s]' % _squote, String.Single, '#pop') - ], - 'string': [ - (r'[~^]+', String.Escape), - (r'[^~^\\@({%s]+' % _dquote, String.Double), - (r'[({]', String.Double), - (r'\\', String.Escape), - (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' % - (_newline, _newline), String.Escape), - (r'@(\\\s*[%s]\s*)*{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}' - r'(\\\s*[%s]\s*)*}' % (_newline, _newline, _newline), - String.Escape), - (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline), - String.Escape), - (r'[%s]' % _dquote, String.Double, '#pop') - ], - 'plain-string': [ - (r'[^~^\\({\[\]%s]+' % _dquote, String.Double), - (r'[~^({\[\]]', String.Double), - (r'\\', String.Escape), - (r'[%s]' % _dquote, String.Double, '#pop') - ], - # Names - '_constant': [ - include('_whitespace'), - (_name, Name.Constant, '#pop'), - include('value') - ], - '_global': [ - include('_whitespace'), - (_name, Name.Variable.Global, '#pop'), - include('value') - ], - 'label?': [ - include('_whitespace'), - (r'(%s)?' % _name, Name.Label, '#pop') - ], - 'variable?': [ - include('_whitespace'), - (r'(%s)?' % _name, Name.Variable, '#pop') - ], - # Values after hashes - 'obsolete-dictionary-word': [ - (r'\S[a-zA-Z_0-9]*', String.Other, '#pop') - ], - 'system-constant': [ - include('_whitespace'), - (_name, Name.Builtin, '#pop') - ], - - # Directives - 'directive': [ - include('_whitespace'), - (r'#', Punctuation), - (r';', Punctuation, '#pop'), - (r'\[', Punctuation, - ('default', 'statements', 'locals', 'routine-name?')), - (words(( - 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot', - 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file', - 'version'), prefix='(?i)', suffix=r'\b'), - Keyword, 'default'), - (r'(?i)(array|global)\b', Keyword, - ('default', 'directive-keyword?', '_global')), - (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')), - (r'(?i)class\b', Keyword, - ('object-body', 'duplicates', 'class-name')), - (r'(?i)(constant|default)\b', Keyword, - ('default', 'expression', '_constant')), - (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)), - (r'(?i)(extend|verb)\b', Keyword, 'grammar'), - (r'(?i)fake_action\b', Keyword, ('default', '_constant')), - (r'(?i)import\b', Keyword, 'manifest'), - (r'(?i)(include|link)\b', Keyword, - ('default', 'before-plain-string')), - (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')), - (r'(?i)message\b', Keyword, ('default', 'diagnostic')), - (r'(?i)(nearby|object)\b', Keyword, - ('object-body', '_object-head')), - (r'(?i)property\b', Keyword, - ('default', 'alias?', '_constant', 'property-keyword*')), - (r'(?i)replace\b', Keyword, - ('default', 'routine-name?', 'routine-name?')), - (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')), - (r'(?i)stub\b', Keyword, ('default', 'routine-name?')), - (r'(?i)trace\b', Keyword, - ('default', 'trace-keyword?', 'trace-keyword?')), - (r'(?i)zcharacter\b', Keyword, - ('default', 'directive-keyword?', 'directive-keyword?')), - (_name, Name.Class, ('object-body', '_object-head')) - ], - # [, Replace, Stub - 'routine-name?': [ - include('_whitespace'), - (r'(%s)?' % _name, Name.Function, '#pop') - ], - 'locals': [ - include('_whitespace'), - (r';', Punctuation, '#pop'), - (r'\*', Punctuation), - (_name, Name.Variable) - ], - # Array - 'many-values': [ - include('_whitespace'), - (r';', Punctuation), - (r'\]', Punctuation, '#pop'), - (r':', Error), - default(('expression', '_expression')) - ], - # Attribute, Property - 'alias?': [ - include('_whitespace'), - (r'alias\b', Keyword, ('#pop', '_constant')), - default('#pop') - ], - # Class, Object, Nearby - 'class-name': [ - include('_whitespace'), - (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'), - (_name, Name.Class, '#pop') - ], - 'duplicates': [ - include('_whitespace'), - (r'\(', Punctuation, ('#pop', 'expression', '_expression')), - default('#pop') - ], - '_object-head': [ - (r'[%s]>' % _dash, Punctuation), - (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'), - include('_global') - ], - 'object-body': [ - include('_whitespace'), - (r';', Punctuation, '#pop:2'), - (r',', Punctuation), - (r'class\b', Keyword.Declaration, 'class-segment'), - (r'(has|private|with)\b', Keyword.Declaration), - (r':', Error), - default(('_object-expression', '_expression')) - ], - 'class-segment': [ - include('_whitespace'), - (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'), - (_name, Name.Class), - default('value') - ], - # Extend, Verb - 'grammar': [ - include('_whitespace'), - (r'=', Punctuation, ('#pop', 'default')), - (r'\*', Punctuation, ('#pop', 'grammar-line')), - default('_directive-keyword') - ], - 'grammar-line': [ - include('_whitespace'), - (r';', Punctuation, '#pop'), - (r'[/*]', Punctuation), - (r'[%s]>' % _dash, Punctuation, 'value'), - (r'(noun|scope)\b', Keyword, '=routine'), - default('_directive-keyword') - ], - '=routine': [ - include('_whitespace'), - (r'=', Punctuation, 'routine-name?'), - default('#pop') - ], - # Import - 'manifest': [ - include('_whitespace'), - (r';', Punctuation, '#pop'), - (r',', Punctuation), - (r'(?i)(global\b)?', Keyword, '_global') - ], - # Include, Link, Message - 'diagnostic': [ - include('_whitespace'), - (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')), - default(('#pop', 'before-plain-string', 'directive-keyword?')) - ], - 'before-plain-string': [ - include('_whitespace'), - (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')) - ], - 'message-string': [ - (r'[~^]+', String.Escape), - include('plain-string') - ], - - # Keywords used in directives - '_directive-keyword!': [ - include('_whitespace'), - (words(( - 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror', - 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi', - 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private', - 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating', - 'time', 'topic', 'warning', 'with'), suffix=r'\b'), - Keyword, '#pop'), - (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop') - ], - '_directive-keyword': [ - include('_directive-keyword!'), - include('value') - ], - 'directive-keyword?': [ - include('_directive-keyword!'), - default('#pop') - ], - 'property-keyword*': [ - include('_whitespace'), - (r'(additive|long)\b', Keyword), - default('#pop') - ], - 'trace-keyword?': [ - include('_whitespace'), - (words(( - 'assembly', 'dictionary', 'expressions', 'lines', 'linker', - 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'), - Keyword, '#pop'), - default('#pop') - ], - - # Statements - 'statements': [ - include('_whitespace'), - (r'\]', Punctuation, '#pop'), - (r'[;{}]', Punctuation), - (words(( - 'box', 'break', 'continue', 'default', 'give', 'inversion', - 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue', - 'spaces', 'string', 'until'), suffix=r'\b'), - Keyword, 'default'), - (r'(do|else)\b', Keyword), - (r'(font|style)\b', Keyword, - ('default', 'miscellaneous-keyword?')), - (r'for\b', Keyword, ('for', '(?')), - (r'(if|switch|while)', Keyword, - ('expression', '_expression', '(?')), - (r'(jump|save|restore)\b', Keyword, ('default', 'label?')), - (r'objectloop\b', Keyword, - ('_keyword-expression', 'variable?', '(?')), - (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'), - (r'\.', Name.Label, 'label?'), - (r'@', Keyword, 'opcode'), - (r'#(?![agrnw]\$|#)', Punctuation, 'directive'), - (r'<', Punctuation, 'default'), - (r'(move\b)?', Keyword, - ('default', '_keyword-expression', '_expression')) - ], - 'miscellaneous-keyword?': [ - include('_whitespace'), - (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b', - Keyword, '#pop'), - (r'(a|A|an|address|char|name|number|object|property|string|the|' - r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo, - '#pop'), - (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function, - '#pop'), - default('#pop') - ], - '(?': [ - include('_whitespace'), - (r'\(?', Punctuation, '#pop') - ], - 'for': [ - include('_whitespace'), - (r';?', Punctuation, ('_for-expression', '_expression')) - ], - 'print-list': [ - include('_whitespace'), - (r';', Punctuation, '#pop'), - (r':', Error), - default(('_list-expression', '_expression', '_list-expression', 'form')) - ], - 'form': [ - include('_whitespace'), - (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')), - default('#pop') - ], - - # Assembly - 'opcode': [ - include('_whitespace'), - (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')), - (_name, Keyword, 'operands') - ], - 'operands': [ - (r':', Error), - default(('_assembly-expression', '_expression')) - ] - } - - def get_tokens_unprocessed(self, text): - # 'in' is either a keyword or an operator. - # If the token two tokens after 'in' is ')', 'in' is a keyword: - # objectloop(a in b) - # Otherwise, it is an operator: - # objectloop(a in b && true) - objectloop_queue = [] - objectloop_token_count = -1 - previous_token = None - for index, token, value in RegexLexer.get_tokens_unprocessed(self, - text): - if previous_token is Name.Variable and value == 'in': - objectloop_queue = [[index, token, value]] - objectloop_token_count = 2 - elif objectloop_token_count > 0: - if token not in Comment and token not in Text: - objectloop_token_count -= 1 - objectloop_queue.append((index, token, value)) - else: - if objectloop_token_count == 0: - if objectloop_queue[-1][2] == ')': - objectloop_queue[0][1] = Keyword - while objectloop_queue: - yield objectloop_queue.pop(0) - objectloop_token_count = -1 - yield index, token, value - if token not in Comment and token not in Text: - previous_token = token - while objectloop_queue: - yield objectloop_queue.pop(0) - - -class Inform7Lexer(RegexLexer): - """ - For `Inform 7 `_ source code. - - .. versionadded:: 2.0 - """ - - name = 'Inform 7' - aliases = ['inform7', 'i7'] - filenames = ['*.ni', '*.i7x'] - - flags = re.MULTILINE | re.DOTALL | re.UNICODE - - _dash = Inform6Lexer._dash - _dquote = Inform6Lexer._dquote - _newline = Inform6Lexer._newline - _start = r'\A|(?<=[%s])' % _newline - - # There are three variants of Inform 7, differing in how to - # interpret at signs and braces in I6T. In top-level inclusions, at - # signs in the first column are inweb syntax. In phrase definitions - # and use options, tokens in braces are treated as I7. Use options - # also interpret "{N}". - tokens = {} - token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option'] - - for level in token_variants: - tokens[level] = { - '+i6-root': list(Inform6Lexer.tokens['root']), - '+i6t-root': [ # For Inform6TemplateLexer - (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc, - ('directive', '+p')) - ], - 'root': [ - (r'(\|?\s)+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'[%s]' % _dquote, Generic.Heading, - ('+main', '+titling', '+titling-string')), - default(('+main', '+heading?')) - ], - '+titling-string': [ - (r'[^%s]+' % _dquote, Generic.Heading), - (r'[%s]' % _dquote, Generic.Heading, '#pop') - ], - '+titling': [ - (r'\[', Comment.Multiline, '+comment'), - (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading), - (r'[%s]' % _dquote, Generic.Heading, '+titling-string'), - (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote), - Text, ('#pop', '+heading?')), - (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'), - (r'[|%s]' % _newline, Generic.Heading) - ], - '+main': [ - (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text), - (r'[%s]' % _dquote, String.Double, '+text'), - (r':', Text, '+phrase-definition'), - (r'(?i)\bas\b', Text, '+use-option'), - (r'\[', Comment.Multiline, '+comment'), - (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), - bygroups(Punctuation, - using(this, state=('+i6-root', 'directive'), - i6t='+i6t-not-inline'), Punctuation)), - (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' % - (_start, _dquote, _newline), Text, '+heading?'), - (r'(?i)[a(|%s]' % _newline, Text) - ], - '+phrase-definition': [ - (r'\s+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), - bygroups(Punctuation, - using(this, state=('+i6-root', 'directive', - 'default', 'statements'), - i6t='+i6t-inline'), Punctuation), '#pop'), - default('#pop') - ], - '+use-option': [ - (r'\s+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), - bygroups(Punctuation, - using(this, state=('+i6-root', 'directive'), - i6t='+i6t-use-option'), Punctuation), '#pop'), - default('#pop') - ], - '+comment': [ - (r'[^\[\]]+', Comment.Multiline), - (r'\[', Comment.Multiline, '#push'), - (r'\]', Comment.Multiline, '#pop') - ], - '+text': [ - (r'[^\[%s]+' % _dquote, String.Double), - (r'\[.*?\]', String.Interpol), - (r'[%s]' % _dquote, String.Double, '#pop') - ], - '+heading?': [ - (r'(\|?\s)+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'), - (r'[%s]{1,3}' % _dash, Text), - (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline, - Generic.Heading, '#pop'), - default('#pop') - ], - '+documentation-heading': [ - (r'\s+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'(?i)documentation\s+', Text, '+documentation-heading2'), - default('#pop') - ], - '+documentation-heading2': [ - (r'\s+', Text), - (r'\[', Comment.Multiline, '+comment'), - (r'[%s]{4}\s' % _dash, Text, '+documentation'), - default('#pop:2') - ], - '+documentation': [ - (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' % - (_start, _newline), Generic.Heading), - (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline), - Generic.Subheading), - (r'((%s)\t.*?[%s])+' % (_start, _newline), - using(this, state='+main')), - (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text), - (r'\[', Comment.Multiline, '+comment'), - ], - '+i6t-not-inline': [ - (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), - Comment.Preproc), - (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline), - Comment.Preproc), - (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), - Generic.Heading, '+p') - ], - '+i6t-use-option': [ - include('+i6t-not-inline'), - (r'({)(N)(})', bygroups(Punctuation, Text, Punctuation)) - ], - '+i6t-inline': [ - (r'({)(\S[^}]*)?(})', - bygroups(Punctuation, using(this, state='+main'), - Punctuation)) - ], - '+i6t': [ - (r'({[%s])(![^}]*)(}?)' % _dash, - bygroups(Punctuation, Comment.Single, Punctuation)), - (r'({[%s])(lines)(:)([^}]*)(}?)' % _dash, - bygroups(Punctuation, Keyword, Punctuation, Text, - Punctuation), '+lines'), - (r'({[%s])([^:}]*)(:?)([^}]*)(}?)' % _dash, - bygroups(Punctuation, Keyword, Punctuation, Text, - Punctuation)), - (r'(\(\+)(.*?)(\+\)|\Z)', - bygroups(Punctuation, using(this, state='+main'), - Punctuation)) - ], - '+p': [ - (r'[^@]+', Comment.Preproc), - (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), - Comment.Preproc, '#pop'), - (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc), - (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), - Generic.Heading), - (r'@', Comment.Preproc) - ], - '+lines': [ - (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), - Comment.Preproc), - (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline), - Comment.Preproc), - (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), - Generic.Heading, '+p'), - (r'(%s)@[a-zA-Z_0-9]*[ %s]' % (_start, _newline), Keyword), - (r'![^%s]*' % _newline, Comment.Single), - (r'({)([%s]endlines)(})' % _dash, - bygroups(Punctuation, Keyword, Punctuation), '#pop'), - (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text) - ] - } - # Inform 7 can include snippets of Inform 6 template language, - # so all of Inform6Lexer's states are copied here, with - # modifications to account for template syntax. Inform7Lexer's - # own states begin with '+' to avoid name conflicts. Some of - # Inform6Lexer's states begin with '_': these are not modified. - # They deal with template syntax either by including modified - # states, or by matching r'' then pushing to modified states. - for token in Inform6Lexer.tokens: - if token == 'root': - continue - tokens[level][token] = list(Inform6Lexer.tokens[token]) - if not token.startswith('_'): - tokens[level][token][:0] = [include('+i6t'), include(level)] - - def __init__(self, **options): - level = options.get('i6t', '+i6t-not-inline') - if level not in self._all_tokens: - self._tokens = self.__class__.process_tokendef(level) - else: - self._tokens = self._all_tokens[level] - RegexLexer.__init__(self, **options) - - -class Inform6TemplateLexer(Inform7Lexer): - """ - For `Inform 6 template - `_ code. - - .. versionadded:: 2.0 - """ - - name = 'Inform 6 template' - aliases = ['i6t'] - filenames = ['*.i6t'] - - def get_tokens_unprocessed(self, text, stack=('+i6t-root',)): - return Inform7Lexer.get_tokens_unprocessed(self, text, stack) - - -class MqlLexer(CppLexer): - """ - For `MQL4 `_ and - `MQL5 `_ source code. - - .. versionadded:: 2.0 - """ - name = 'MQL' - aliases = ['mql', 'mq4', 'mq5', 'mql4', 'mql5'] - filenames = ['*.mq4', '*.mq5', '*.mqh'] - mimetypes = ['text/x-mql'] - - tokens = { - 'statements': [ - (words(( - 'input', '_Digits', '_Point', '_LastError', '_Period', '_RandomSeed', - '_StopFlag', '_Symbol', '_UninitReason', 'Ask', 'Bars', 'Bid', - 'Close', 'Digits', 'High', 'Low', 'Open', 'Point', 'Time', - 'Volume'), suffix=r'\b'), - Keyword), - (words(( - 'void', 'char', 'uchar', 'bool', 'short', 'ushort', 'int', 'uint', - 'color', 'long', 'ulong', 'datetime', 'float', 'double', - 'string'), suffix=r'\b'), - Keyword.Type), - (words(( - 'Alert', 'CheckPointer', 'Comment', 'DebugBreak', 'ExpertRemove', - 'GetPointer', 'GetTickCount', 'MessageBox', 'PeriodSeconds', 'PlaySound', - 'Print', 'PrintFormat', 'ResetLastError', 'ResourceCreate', 'ResourceFree', - 'ResourceReadImage', 'ResourceSave', 'SendFTP', 'SendMail', 'SendNotification', - 'Sleep', 'TerminalClose', 'TesterStatistics', 'ZeroMemory', - 'ArrayBsearch', 'ArrayCopy', 'ArrayCompare', 'ArrayFree', 'ArrayGetAsSeries', - 'ArrayInitialize', 'ArrayFill', 'ArrayIsSeries', 'ArrayIsDynamic', - 'ArrayMaximum', 'ArrayMinimum', 'ArrayRange', 'ArrayResize', - 'ArraySetAsSeries', 'ArraySize', 'ArraySort', 'ArrayCopyRates', - 'ArrayCopySeries', 'ArrayDimension', - 'CharToString', 'DoubleToString', 'EnumToString', 'NormalizeDouble', - 'StringToDouble', 'StringToInteger', 'StringToTime', 'TimeToString', - 'IntegerToString', 'ShortToString', 'ShortArrayToString', - 'StringToShortArray', 'CharArrayToString', 'StringToCharArray', - 'ColorToARGB', 'ColorToString', 'StringToColor', 'StringFormat', - 'CharToStr', 'DoubleToStr', 'StrToDouble', 'StrToInteger', 'StrToTime', 'TimeToStr', - 'MathAbs', 'MathArccos', 'MathArcsin', 'MathArctan', 'MathCeil', 'MathCos', 'MathExp', - 'MathFloor', 'MathLog', 'MathMax', 'MathMin', 'MathMod', 'MathPow', 'MathRand', - 'MathRound', 'MathSin', 'MathSqrt', 'MathSrand', 'MathTan', 'MathIsValidNumber', - 'StringAdd', 'StringBufferLen', 'StringCompare', 'StringConcatenate', 'StringFill', - 'StringFind', 'StringGetCharacter', 'StringInit', 'StringLen', 'StringReplace', - 'StringSetCharacter', 'StringSplit', 'StringSubstr', 'StringToLower', 'StringToUpper', - 'StringTrimLeft', 'StringTrimRight', 'StringGetChar', 'StringSetChar', - 'TimeCurrent', 'TimeTradeServer', 'TimeLocal', 'TimeGMT', 'TimeDaylightSavings', - 'TimeGMTOffset', 'TimeToStruct', 'StructToTime', 'Day', 'DayOfWeek', 'DayOfYear', - 'Hour', 'Minute', 'Month', 'Seconds', 'TimeDay', 'TimeDayOfWeek', 'TimeDayOfYear', 'TimeHour', - 'TimeMinute', 'TimeMonth', 'TimeSeconds', 'TimeYear', 'Year', - 'AccountInfoDouble', 'AccountInfoInteger', 'AccountInfoString', 'AccountBalance', - 'AccountCredit', 'AccountCompany', 'AccountCurrency', 'AccountEquity', - 'AccountFreeMargin', 'AccountFreeMarginCheck', 'AccountFreeMarginMode', - 'AccountLeverage', 'AccountMargin', 'AccountName', 'AccountNumber', 'AccountProfit', - 'AccountServer', 'AccountStopoutLevel', 'AccountStopoutMode', - 'GetLastError', 'IsStopped', 'UninitializeReason', 'MQLInfoInteger', 'MQLInfoString', - 'Symbol', 'Period', 'Digits', 'Point', 'IsConnected', 'IsDemo', 'IsDllsAllowed', - 'IsExpertEnabled', 'IsLibrariesAllowed', 'IsOptimization', 'IsTesting', - 'IsTradeAllowed', - 'IsTradeContextBusy', 'IsVisualMode', 'TerminalCompany', 'TerminalName', - 'TerminalPath', - 'SymbolsTotal', 'SymbolName', 'SymbolSelect', 'SymbolIsSynchronized', - 'SymbolInfoDouble', - 'SymbolInfoInteger', 'SymbolInfoString', 'SymbolInfoTick', - 'SymbolInfoSessionQuote', - 'SymbolInfoSessionTrade', 'MarketInfo', - 'SeriesInfoInteger', 'CopyRates', 'CopyTime', 'CopyOpen', - 'CopyHigh', 'CopyLow', 'CopyClose', - 'CopyTickVolume', 'CopyRealVolume', 'CopySpread', 'iBars', 'iBarShift', 'iClose', - 'iHigh', 'iHighest', 'iLow', 'iLowest', 'iOpen', 'iTime', 'iVolume', - 'HideTestIndicators', 'Period', 'RefreshRates', 'Symbol', 'WindowBarsPerChart', - 'WindowExpertName', 'WindowFind', 'WindowFirstVisibleBar', 'WindowHandle', - 'WindowIsVisible', 'WindowOnDropped', 'WindowPriceMax', 'WindowPriceMin', - 'WindowPriceOnDropped', 'WindowRedraw', 'WindowScreenShot', - 'WindowTimeOnDropped', 'WindowsTotal', 'WindowXOnDropped', 'WindowYOnDropped', - 'OrderClose', 'OrderCloseBy', 'OrderClosePrice', 'OrderCloseTime', 'OrderComment', - 'OrderCommission', 'OrderDelete', 'OrderExpiration', 'OrderLots', 'OrderMagicNumber', - 'OrderModify', 'OrderOpenPrice', 'OrderOpenTime', 'OrderPrint', 'OrderProfit', - 'OrderSelect', 'OrderSend', 'OrdersHistoryTotal', 'OrderStopLoss', 'OrdersTotal', - 'OrderSwap', 'OrderSymbol', 'OrderTakeProfit', 'OrderTicket', 'OrderType', - 'GlobalVariableCheck', 'GlobalVariableTime', - 'GlobalVariableDel', 'GlobalVariableGet', 'GlobalVariableName', - 'GlobalVariableSet', 'GlobalVariablesFlush', 'GlobalVariableTemp', - 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', - 'GlobalVariablesTotal', 'GlobalVariableCheck', 'GlobalVariableTime', - 'GlobalVariableDel', 'GlobalVariableGet', - 'GlobalVariableName', 'GlobalVariableSet', 'GlobalVariablesFlush', - 'GlobalVariableTemp', 'GlobalVariableSetOnCondition', - 'GlobalVariablesDeleteAll', 'GlobalVariablesTotal', - 'GlobalVariableCheck', 'GlobalVariableTime', 'GlobalVariableDel', - 'GlobalVariableGet', 'GlobalVariableName', 'GlobalVariableSet', - 'GlobalVariablesFlush', 'GlobalVariableTemp', - 'GlobalVariableSetOnCondition', 'GlobalVariablesDeleteAll', - 'GlobalVariablesTotal', - 'FileFindFirst', 'FileFindNext', 'FileFindClose', 'FileOpen', 'FileDelete', - 'FileFlush', 'FileGetInteger', 'FileIsEnding', 'FileIsLineEnding', - 'FileClose', 'FileIsExist', 'FileCopy', 'FileMove', 'FileReadArray', - 'FileReadBool', 'FileReadDatetime', 'FileReadDouble', 'FileReadFloat', - 'FileReadInteger', 'FileReadLong', 'FileReadNumber', 'FileReadString', - 'FileReadStruct', 'FileSeek', 'FileSize', 'FileTell', 'FileWrite', - 'FileWriteArray', 'FileWriteDouble', 'FileWriteFloat', 'FileWriteInteger', - 'FileWriteLong', 'FileWriteString', 'FileWriteStruct', 'FolderCreate', - 'FolderDelete', 'FolderClean', 'FileOpenHistory', - 'IndicatorSetDouble', 'IndicatorSetInteger', 'IndicatorSetString', - 'SetIndexBuffer', 'IndicatorBuffers', 'IndicatorCounted', 'IndicatorDigits', - 'IndicatorShortName', 'SetIndexArrow', 'SetIndexDrawBegin', - 'SetIndexEmptyValue', 'SetIndexLabel', 'SetIndexShift', - 'SetIndexStyle', 'SetLevelStyle', 'SetLevelValue', - 'ObjectCreate', 'ObjectName', 'ObjectDelete', 'ObjectsDeleteAll', - 'ObjectFind', 'ObjectGetTimeByValue', 'ObjectGetValueByTime', - 'ObjectMove', 'ObjectsTotal', 'ObjectGetDouble', 'ObjectGetInteger', - 'ObjectGetString', 'ObjectSetDouble', 'ObjectSetInteger', - 'ObjectSetString', 'TextSetFont', 'TextOut', 'TextGetSize', - 'ObjectDescription', 'ObjectGet', 'ObjectGetFiboDescription', - 'ObjectGetShiftByValue', 'ObjectGetValueByShift', 'ObjectSet', - 'ObjectSetFiboDescription', 'ObjectSetText', 'ObjectType', - 'iAC', 'iAD', 'iADX', 'iAlligator', 'iAO', 'iATR', 'iBearsPower', - 'iBands', 'iBandsOnArray', 'iBullsPower', 'iCCI', 'iCCIOnArray', - 'iCustom', 'iDeMarker', 'iEnvelopes', 'iEnvelopesOnArray', - 'iForce', 'iFractals', 'iGator', 'iIchimoku', 'iBWMFI', 'iMomentum', - 'iMomentumOnArray', 'iMFI', 'iMA', 'iMAOnArray', 'iOsMA', 'iMACD', - 'iOBV', 'iSAR', 'iRSI', 'iRSIOnArray', 'iRVI', 'iStdDev', 'iStdDevOnArray', - 'iStochastic', 'iWPR', - 'EventSetMillisecondTimer', 'EventSetTimer', - 'EventKillTimer', 'EventChartCustom'), suffix=r'\b'), - Name.Function), - (words(( - 'CHARTEVENT_KEYDOWN', 'CHARTEVENT_MOUSE_MOVE', - 'CHARTEVENT_OBJECT_CREATE', - 'CHARTEVENT_OBJECT_CHANGE', 'CHARTEVENT_OBJECT_DELETE', - 'CHARTEVENT_CLICK', - 'CHARTEVENT_OBJECT_CLICK', 'CHARTEVENT_OBJECT_DRAG', - 'CHARTEVENT_OBJECT_ENDEDIT', - 'CHARTEVENT_CHART_CHANGE', 'CHARTEVENT_CUSTOM', - 'CHARTEVENT_CUSTOM_LAST', - 'PERIOD_CURRENT', 'PERIOD_M1', 'PERIOD_M2', 'PERIOD_M3', - 'PERIOD_M4', 'PERIOD_M5', - 'PERIOD_M6', 'PERIOD_M10', 'PERIOD_M12', 'PERIOD_M15', - 'PERIOD_M20', 'PERIOD_M30', - 'PERIOD_H1', 'PERIOD_H2', 'PERIOD_H3', 'PERIOD_H4', - 'PERIOD_H6', 'PERIOD_H8', - 'PERIOD_H12', 'PERIOD_D1', 'PERIOD_W1', 'PERIOD_MN1', - 'CHART_IS_OBJECT', 'CHART_BRING_TO_TOP', - 'CHART_MOUSE_SCROLL', 'CHART_EVENT_MOUSE_MOVE', - 'CHART_EVENT_OBJECT_CREATE', - 'CHART_EVENT_OBJECT_DELETE', 'CHART_MODE', 'CHART_FOREGROUND', - 'CHART_SHIFT', - 'CHART_AUTOSCROLL', 'CHART_SCALE', 'CHART_SCALEFIX', - 'CHART_SCALEFIX_11', - 'CHART_SCALE_PT_PER_BAR', 'CHART_SHOW_OHLC', - 'CHART_SHOW_BID_LINE', - 'CHART_SHOW_ASK_LINE', 'CHART_SHOW_LAST_LINE', - 'CHART_SHOW_PERIOD_SEP', - 'CHART_SHOW_GRID', 'CHART_SHOW_VOLUMES', - 'CHART_SHOW_OBJECT_DESCR', - 'CHART_VISIBLE_BARS', 'CHART_WINDOWS_TOTAL', - 'CHART_WINDOW_IS_VISIBLE', - 'CHART_WINDOW_HANDLE', 'CHART_WINDOW_YDISTANCE', - 'CHART_FIRST_VISIBLE_BAR', - 'CHART_WIDTH_IN_BARS', 'CHART_WIDTH_IN_PIXELS', - 'CHART_HEIGHT_IN_PIXELS', - 'CHART_COLOR_BACKGROUND', 'CHART_COLOR_FOREGROUND', - 'CHART_COLOR_GRID', - 'CHART_COLOR_VOLUME', 'CHART_COLOR_CHART_UP', - 'CHART_COLOR_CHART_DOWN', - 'CHART_COLOR_CHART_LINE', 'CHART_COLOR_CANDLE_BULL', - 'CHART_COLOR_CANDLE_BEAR', - 'CHART_COLOR_BID', 'CHART_COLOR_ASK', 'CHART_COLOR_LAST', - 'CHART_COLOR_STOP_LEVEL', - 'CHART_SHOW_TRADE_LEVELS', 'CHART_DRAG_TRADE_LEVELS', - 'CHART_SHOW_DATE_SCALE', - 'CHART_SHOW_PRICE_SCALE', 'CHART_SHIFT_SIZE', - 'CHART_FIXED_POSITION', - 'CHART_FIXED_MAX', 'CHART_FIXED_MIN', 'CHART_POINTS_PER_BAR', - 'CHART_PRICE_MIN', - 'CHART_PRICE_MAX', 'CHART_COMMENT', 'CHART_BEGIN', - 'CHART_CURRENT_POS', 'CHART_END', - 'CHART_BARS', 'CHART_CANDLES', 'CHART_LINE', 'CHART_VOLUME_HIDE', - 'CHART_VOLUME_TICK', 'CHART_VOLUME_REAL', - 'OBJ_VLINE', 'OBJ_HLINE', 'OBJ_TREND', 'OBJ_TRENDBYANGLE', 'OBJ_CYCLES', - 'OBJ_CHANNEL', 'OBJ_STDDEVCHANNEL', 'OBJ_REGRESSION', 'OBJ_PITCHFORK', - 'OBJ_GANNLINE', 'OBJ_GANNFAN', 'OBJ_GANNGRID', 'OBJ_FIBO', - 'OBJ_FIBOTIMES', 'OBJ_FIBOFAN', 'OBJ_FIBOARC', 'OBJ_FIBOCHANNEL', - 'OBJ_EXPANSION', 'OBJ_RECTANGLE', 'OBJ_TRIANGLE', 'OBJ_ELLIPSE', - 'OBJ_ARROW_THUMB_UP', 'OBJ_ARROW_THUMB_DOWN', - 'OBJ_ARROW_UP', 'OBJ_ARROW_DOWN', - 'OBJ_ARROW_STOP', 'OBJ_ARROW_CHECK', 'OBJ_ARROW_LEFT_PRICE', - 'OBJ_ARROW_RIGHT_PRICE', 'OBJ_ARROW_BUY', 'OBJ_ARROW_SELL', - 'OBJ_ARROW', - 'OBJ_TEXT', 'OBJ_LABEL', 'OBJ_BUTTON', 'OBJ_BITMAP', - 'OBJ_BITMAP_LABEL', - 'OBJ_EDIT', 'OBJ_EVENT', 'OBJ_RECTANGLE_LABEL', - 'OBJPROP_TIME1', 'OBJPROP_PRICE1', 'OBJPROP_TIME2', - 'OBJPROP_PRICE2', 'OBJPROP_TIME3', - 'OBJPROP_PRICE3', 'OBJPROP_COLOR', 'OBJPROP_STYLE', - 'OBJPROP_WIDTH', - 'OBJPROP_BACK', 'OBJPROP_RAY', 'OBJPROP_ELLIPSE', - 'OBJPROP_SCALE', - 'OBJPROP_ANGLE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', - 'OBJPROP_DEVIATION', 'OBJPROP_FONTSIZE', 'OBJPROP_CORNER', - 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_FIBOLEVELS', - 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', - 'OBJPROP_FIRSTLEVEL', 'OBJPROP_COLOR', 'OBJPROP_STYLE', 'OBJPROP_WIDTH', - 'OBJPROP_BACK', 'OBJPROP_ZORDER', 'OBJPROP_FILL', 'OBJPROP_HIDDEN', - 'OBJPROP_SELECTED', 'OBJPROP_READONLY', 'OBJPROP_TYPE', 'OBJPROP_TIME', - 'OBJPROP_SELECTABLE', 'OBJPROP_CREATETIME', 'OBJPROP_LEVELS', - 'OBJPROP_LEVELCOLOR', 'OBJPROP_LEVELSTYLE', 'OBJPROP_LEVELWIDTH', - 'OBJPROP_ALIGN', 'OBJPROP_FONTSIZE', 'OBJPROP_RAY_RIGHT', 'OBJPROP_RAY', - 'OBJPROP_ELLIPSE', 'OBJPROP_ARROWCODE', 'OBJPROP_TIMEFRAMES', 'OBJPROP_ANCHOR', - 'OBJPROP_XDISTANCE', 'OBJPROP_YDISTANCE', 'OBJPROP_DRAWLINES', 'OBJPROP_STATE', - 'OBJPROP_CHART_ID', 'OBJPROP_XSIZE', 'OBJPROP_YSIZE', 'OBJPROP_XOFFSET', - 'OBJPROP_YOFFSET', 'OBJPROP_PERIOD', 'OBJPROP_DATE_SCALE', 'OBJPROP_PRICE_SCALE', - 'OBJPROP_CHART_SCALE', 'OBJPROP_BGCOLOR', 'OBJPROP_CORNER', 'OBJPROP_BORDER_TYPE', - 'OBJPROP_BORDER_COLOR', 'OBJPROP_PRICE', 'OBJPROP_LEVELVALUE', 'OBJPROP_SCALE', - 'OBJPROP_ANGLE', 'OBJPROP_DEVIATION', - 'OBJPROP_NAME', 'OBJPROP_TEXT', 'OBJPROP_TOOLTIP', 'OBJPROP_LEVELTEXT', - 'OBJPROP_FONT', 'OBJPROP_BMPFILE', 'OBJPROP_SYMBOL', - 'BORDER_FLAT', 'BORDER_RAISED', 'BORDER_SUNKEN', 'ALIGN_LEFT', 'ALIGN_CENTER', - 'ALIGN_RIGHT', 'ANCHOR_LEFT_UPPER', 'ANCHOR_LEFT', 'ANCHOR_LEFT_LOWER', - 'ANCHOR_LOWER', 'ANCHOR_RIGHT_LOWER', 'ANCHOR_RIGHT', 'ANCHOR_RIGHT_UPPER', - 'ANCHOR_UPPER', 'ANCHOR_CENTER', 'ANCHOR_TOP', 'ANCHOR_BOTTOM', - 'CORNER_LEFT_UPPER', 'CORNER_LEFT_LOWER', 'CORNER_RIGHT_LOWER', - 'CORNER_RIGHT_UPPER', - 'OBJ_NO_PERIODS', 'EMPTY', 'OBJ_PERIOD_M1', 'OBJ_PERIOD_M5', 'OBJ_PERIOD_M15', - 'OBJ_PERIOD_M30', 'OBJ_PERIOD_H1', 'OBJ_PERIOD_H4', 'OBJ_PERIOD_D1', - 'OBJ_PERIOD_W1', 'OBJ_PERIOD_MN1', 'OBJ_ALL_PERIODS', - 'GANN_UP_TREND', 'GANN_DOWN_TREND', - 'SYMBOL_THUMBSUP', 'SYMBOL_THUMBSDOWN', - 'SYMBOL_ARROWUP', 'SYMBOL_ARROWDOWN', - 'SYMBOL_STOPSIGN', 'SYMBOL_CHECKSIGN', - 'SYMBOL_LEFTPRICE', 'SYMBOL_RIGHTPRICE', - 'PRICE_CLOSE', 'PRICE_OPEN', 'PRICE_HIGH', 'PRICE_LOW', - 'PRICE_MEDIAN', 'PRICE_TYPICAL', 'PRICE_WEIGHTED', - 'VOLUME_TICK', 'VOLUME_REAL', - 'STO_LOWHIGH', 'STO_CLOSECLOSE', - 'MODE_OPEN', 'MODE_LOW', 'MODE_HIGH', 'MODE_CLOSE', 'MODE_VOLUME', 'MODE_TIME', - 'MODE_SMA', 'MODE_EMA', 'MODE_SMMA', 'MODE_LWMA', - 'MODE_MAIN', 'MODE_SIGNAL', 'MODE_MAIN', - 'MODE_PLUSDI', 'MODE_MINUSDI', 'MODE_UPPER', - 'MODE_LOWER', 'MODE_GATORJAW', 'MODE_GATORTEETH', - 'MODE_GATORLIPS', 'MODE_TENKANSEN', - 'MODE_KIJUNSEN', 'MODE_SENKOUSPANA', - 'MODE_SENKOUSPANB', 'MODE_CHINKOUSPAN', - 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', - 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_NONE', - 'STYLE_SOLID', 'STYLE_DASH', 'STYLE_DOT', - 'STYLE_DASHDOT', 'STYLE_DASHDOTDOT', - 'DRAW_NONE', 'DRAW_LINE', 'DRAW_SECTION', 'DRAW_HISTOGRAM', - 'DRAW_ARROW', 'DRAW_ZIGZAG', 'DRAW_FILLING', - 'INDICATOR_DATA', 'INDICATOR_COLOR_INDEX', - 'INDICATOR_CALCULATIONS', 'INDICATOR_DIGITS', - 'INDICATOR_HEIGHT', 'INDICATOR_LEVELS', - 'INDICATOR_LEVELCOLOR', 'INDICATOR_LEVELSTYLE', - 'INDICATOR_LEVELWIDTH', 'INDICATOR_MINIMUM', - 'INDICATOR_MAXIMUM', 'INDICATOR_LEVELVALUE', - 'INDICATOR_SHORTNAME', 'INDICATOR_LEVELTEXT', - 'TERMINAL_BUILD', 'TERMINAL_CONNECTED', - 'TERMINAL_DLLS_ALLOWED', 'TERMINAL_TRADE_ALLOWED', - 'TERMINAL_EMAIL_ENABLED', - 'TERMINAL_FTP_ENABLED', 'TERMINAL_MAXBARS', - 'TERMINAL_CODEPAGE', 'TERMINAL_CPU_CORES', - 'TERMINAL_DISK_SPACE', 'TERMINAL_MEMORY_PHYSICAL', - 'TERMINAL_MEMORY_TOTAL', - 'TERMINAL_MEMORY_AVAILABLE', 'TERMINAL_MEMORY_USED', - 'TERMINAL_X64', - 'TERMINAL_OPENCL_SUPPORT', 'TERMINAL_LANGUAGE', - 'TERMINAL_COMPANY', 'TERMINAL_NAME', - 'TERMINAL_PATH', 'TERMINAL_DATA_PATH', - 'TERMINAL_COMMONDATA_PATH', - 'MQL_PROGRAM_TYPE', 'MQL_DLLS_ALLOWED', - 'MQL_TRADE_ALLOWED', 'MQL_DEBUG', - 'MQL_PROFILER', 'MQL_TESTER', 'MQL_OPTIMIZATION', - 'MQL_VISUAL_MODE', - 'MQL_FRAME_MODE', 'MQL_LICENSE_TYPE', 'MQL_PROGRAM_NAME', - 'MQL_PROGRAM_PATH', - 'PROGRAM_SCRIPT', 'PROGRAM_EXPERT', - 'PROGRAM_INDICATOR', 'LICENSE_FREE', - 'LICENSE_DEMO', 'LICENSE_FULL', 'LICENSE_TIME', - 'MODE_LOW', 'MODE_HIGH', 'MODE_TIME', 'MODE_BID', - 'MODE_ASK', 'MODE_POINT', - 'MODE_DIGITS', 'MODE_SPREAD', 'MODE_STOPLEVEL', - 'MODE_LOTSIZE', 'MODE_TICKVALUE', - 'MODE_TICKSIZE', 'MODE_SWAPLONG', - 'MODE_SWAPSHORT', 'MODE_STARTING', - 'MODE_EXPIRATION', 'MODE_TRADEALLOWED', - 'MODE_MINLOT', 'MODE_LOTSTEP', 'MODE_MAXLOT', - 'MODE_SWAPTYPE', 'MODE_PROFITCALCMODE', - 'MODE_MARGINCALCMODE', 'MODE_MARGININIT', - 'MODE_MARGINMAINTENANCE', 'MODE_MARGINHEDGED', - 'MODE_MARGINREQUIRED', 'MODE_FREEZELEVEL', - 'SUNDAY', 'MONDAY', 'TUESDAY', 'WEDNESDAY', 'THURSDAY', - 'FRIDAY', 'SATURDAY', - 'ACCOUNT_LOGIN', 'ACCOUNT_TRADE_MODE', - 'ACCOUNT_LEVERAGE', - 'ACCOUNT_LIMIT_ORDERS', 'ACCOUNT_MARGIN_SO_MODE', - 'ACCOUNT_TRADE_ALLOWED', 'ACCOUNT_TRADE_EXPERT', - 'ACCOUNT_BALANCE', - 'ACCOUNT_CREDIT', 'ACCOUNT_PROFIT', 'ACCOUNT_EQUITY', - 'ACCOUNT_MARGIN', - 'ACCOUNT_FREEMARGIN', 'ACCOUNT_MARGIN_LEVEL', - 'ACCOUNT_MARGIN_SO_CALL', - 'ACCOUNT_MARGIN_SO_SO', 'ACCOUNT_NAME', - 'ACCOUNT_SERVER', 'ACCOUNT_CURRENCY', - 'ACCOUNT_COMPANY', 'ACCOUNT_TRADE_MODE_DEMO', - 'ACCOUNT_TRADE_MODE_CONTEST', - 'ACCOUNT_TRADE_MODE_REAL', 'ACCOUNT_STOPOUT_MODE_PERCENT', - 'ACCOUNT_STOPOUT_MODE_MONEY', - 'STAT_INITIAL_DEPOSIT', 'STAT_WITHDRAWAL', 'STAT_PROFIT', - 'STAT_GROSS_PROFIT', - 'STAT_GROSS_LOSS', 'STAT_MAX_PROFITTRADE', - 'STAT_MAX_LOSSTRADE', 'STAT_CONPROFITMAX', - 'STAT_CONPROFITMAX_TRADES', 'STAT_MAX_CONWINS', - 'STAT_MAX_CONPROFIT_TRADES', - 'STAT_CONLOSSMAX', 'STAT_CONLOSSMAX_TRADES', - 'STAT_MAX_CONLOSSES', - 'STAT_MAX_CONLOSS_TRADES', 'STAT_BALANCEMIN', - 'STAT_BALANCE_DD', - 'STAT_BALANCEDD_PERCENT', 'STAT_BALANCE_DDREL_PERCENT', - 'STAT_BALANCE_DD_RELATIVE', 'STAT_EQUITYMIN', - 'STAT_EQUITY_DD', - 'STAT_EQUITYDD_PERCENT', 'STAT_EQUITY_DDREL_PERCENT', - 'STAT_EQUITY_DD_RELATIVE', 'STAT_EXPECTED_PAYOFF', - 'STAT_PROFIT_FACTOR', - 'STAT_RECOVERY_FACTOR', 'STAT_SHARPE_RATIO', - 'STAT_MIN_MARGINLEVEL', - 'STAT_CUSTOM_ONTESTER', 'STAT_DEALS', 'STAT_TRADES', - 'STAT_PROFIT_TRADES', - 'STAT_LOSS_TRADES', 'STAT_SHORT_TRADES', 'STAT_LONG_TRADES', - 'STAT_PROFIT_SHORTTRADES', 'STAT_PROFIT_LONGTRADES', - 'STAT_PROFITTRADES_AVGCON', 'STAT_LOSSTRADES_AVGCON', - 'SERIES_BARS_COUNT', 'SERIES_FIRSTDATE', 'SERIES_LASTBAR_DATE', - 'SERIES_SERVER_FIRSTDATE', 'SERIES_TERMINAL_FIRSTDATE', - 'SERIES_SYNCHRONIZED', - 'OP_BUY', 'OP_SELL', 'OP_BUYLIMIT', 'OP_SELLLIMIT', - 'OP_BUYSTOP', 'OP_SELLSTOP', - 'TRADE_ACTION_DEAL', 'TRADE_ACTION_PENDING', - 'TRADE_ACTION_SLTP', - 'TRADE_ACTION_MODIFY', 'TRADE_ACTION_REMOVE', - '__DATE__', '__DATETIME__', '__LINE__', '__FILE__', - '__PATH__', '__FUNCTION__', - '__FUNCSIG__', '__MQLBUILD__', '__MQL4BUILD__', - 'M_E', 'M_LOG2E', 'M_LOG10E', 'M_LN2', 'M_LN10', - 'M_PI', 'M_PI_2', 'M_PI_4', 'M_1_PI', - 'M_2_PI', 'M_2_SQRTPI', 'M_SQRT2', 'M_SQRT1_2', - 'CHAR_MIN', 'CHAR_MAX', 'UCHAR_MAX', - 'SHORT_MIN', 'SHORT_MAX', 'USHORT_MAX', - 'INT_MIN', 'INT_MAX', 'UINT_MAX', - 'LONG_MIN', 'LONG_MAX', 'ULONG_MAX', - 'DBL_MIN', 'DBL_MAX', 'DBL_EPSILON', 'DBL_DIG', 'DBL_MANT_DIG', - 'DBL_MAX_10_EXP', 'DBL_MAX_EXP', 'DBL_MIN_10_EXP', 'DBL_MIN_EXP', - 'FLT_MIN', 'FLT_MAX', 'FLT_EPSILON', - 'FLT_DIG', 'FLT_MANT_DIG', 'FLT_MAX_10_EXP', - 'FLT_MAX_EXP', 'FLT_MIN_10_EXP', 'FLT_MIN_EXP', 'REASON_PROGRAM' - 'REASON_REMOVE', 'REASON_RECOMPILE', - 'REASON_CHARTCHANGE', 'REASON_CHARTCLOSE', - 'REASON_PARAMETERS', 'REASON_ACCOUNT', - 'REASON_TEMPLATE', 'REASON_INITFAILED', - 'REASON_CLOSE', 'POINTER_INVALID' - 'POINTER_DYNAMIC', 'POINTER_AUTOMATIC', - 'NULL', 'EMPTY', 'EMPTY_VALUE', 'CLR_NONE', 'WHOLE_ARRAY', - 'CHARTS_MAX', 'clrNONE', 'EMPTY_VALUE', 'INVALID_HANDLE', - 'IS_DEBUG_MODE', 'IS_PROFILE_MODE', 'NULL', 'WHOLE_ARRAY', 'WRONG_VALUE', - 'ERR_NO_ERROR', 'ERR_NO_RESULT', 'ERR_COMMON_ERROR', - 'ERR_INVALID_TRADE_PARAMETERS', - 'ERR_SERVER_BUSY', 'ERR_OLD_VERSION', 'ERR_NO_CONNECTION', - 'ERR_NOT_ENOUGH_RIGHTS', - 'ERR_TOO_FREQUENT_REQUESTS', 'ERR_MALFUNCTIONAL_TRADE', - 'ERR_ACCOUNT_DISABLED', - 'ERR_INVALID_ACCOUNT', 'ERR_TRADE_TIMEOUT', - 'ERR_INVALID_PRICE', 'ERR_INVALID_STOPS', - 'ERR_INVALID_TRADE_VOLUME', 'ERR_MARKET_CLOSED', - 'ERR_TRADE_DISABLED', - 'ERR_NOT_ENOUGH_MONEY', 'ERR_PRICE_CHANGED', - 'ERR_OFF_QUOTES', 'ERR_BROKER_BUSY', - 'ERR_REQUOTE', 'ERR_ORDER_LOCKED', - 'ERR_LONG_POSITIONS_ONLY_ALLOWED', 'ERR_TOO_MANY_REQUESTS', - 'ERR_TRADE_MODIFY_DENIED', 'ERR_TRADE_CONTEXT_BUSY', - 'ERR_TRADE_EXPIRATION_DENIED', - 'ERR_TRADE_TOO_MANY_ORDERS', 'ERR_TRADE_HEDGE_PROHIBITED', - 'ERR_TRADE_PROHIBITED_BY_FIFO', - 'FILE_READ', 'FILE_WRITE', 'FILE_BIN', 'FILE_CSV', 'FILE_TXT', - 'FILE_ANSI', 'FILE_UNICODE', - 'FILE_SHARE_READ', 'FILE_SHARE_WRITE', 'FILE_REWRITE', - 'FILE_COMMON', 'FILE_EXISTS', - 'FILE_CREATE_DATE', 'FILE_MODIFY_DATE', - 'FILE_ACCESS_DATE', 'FILE_SIZE', 'FILE_POSITION', - 'FILE_END', 'FILE_LINE_END', 'FILE_IS_COMMON', - 'FILE_IS_TEXT', 'FILE_IS_BINARY', - 'FILE_IS_CSV', 'FILE_IS_ANSI', 'FILE_IS_READABLE', 'FILE_IS_WRITABLE', - 'SEEK_SET', 'SEEK_CUR', 'SEEK_END', 'CP_ACP', - 'CP_OEMCP', 'CP_MACCP', 'CP_THREAD_ACP', - 'CP_SYMBOL', 'CP_UTF7', 'CP_UTF8', 'IDOK', 'IDCANCEL', 'IDABORT', - 'IDRETRY', 'IDIGNORE', 'IDYES', 'IDNO', 'IDTRYAGAIN', 'IDCONTINUE', - 'MB_OK', 'MB_OKCANCEL', 'MB_ABORTRETRYIGNORE', 'MB_YESNOCANCEL', - 'MB_YESNO', 'MB_RETRYCANCEL', - 'MB_CANCELTRYCONTINUE', 'MB_ICONSTOP', 'MB_ICONERROR', - 'MB_ICONHAND', 'MB_ICONQUESTION', - 'MB_ICONEXCLAMATION', 'MB_ICONWARNING', - 'MB_ICONINFORMATION', 'MB_ICONASTERISK', - 'MB_DEFBUTTON1', 'MB_DEFBUTTON2', 'MB_DEFBUTTON3', - 'MB_DEFBUTTON4'), suffix=r'\b'), - Name.Constant), - (words(( - 'Black', 'DarkGreen', 'DarkSlateGray', 'Olive', - 'Green', 'Teal', 'Navy', 'Purple', - 'Maroon', 'Indigo', 'MidnightBlue', 'DarkBlue', - 'DarkOliveGreen', 'SaddleBrown', - 'ForestGreen', 'OliveDrab', 'SeaGreen', - 'DarkGoldenrod', 'DarkSlateBlue', - 'Sienna', 'MediumBlue', 'Brown', 'DarkTurquoise', - 'DimGray', 'LightSeaGreen', - 'DarkViolet', 'FireBrick', 'MediumVioletRed', - 'MediumSeaGreen', 'Chocolate', - 'Crimson', 'SteelBlue', 'Goldenrod', 'MediumSpringGreen', - 'LawnGreen', 'CadetBlue', - 'DarkOrchid', 'YellowGreen', 'LimeGreen', 'OrangeRed', - 'DarkOrange', 'Orange', - 'Gold', 'Yellow', 'Chartreuse', 'Lime', 'SpringGreen', - 'Aqua', 'DeepSkyBlue', 'Blue', - 'Magenta', 'Red', 'Gray', 'SlateGray', 'Peru', 'BlueViolet', - 'LightSlateGray', 'DeepPink', - 'MediumTurquoise', 'DodgerBlue', 'Turquoise', 'RoyalBlue', - 'SlateBlue', 'DarkKhaki', - 'IndianRed', 'MediumOrchid', 'GreenYellow', - 'MediumAquamarine', 'DarkSeaGreen', - 'Tomato', 'RosyBrown', 'Orchid', 'MediumPurple', - 'PaleVioletRed', 'Coral', 'CornflowerBlue', - 'DarkGray', 'SandyBrown', 'MediumSlateBlue', - 'Tan', 'DarkSalmon', 'BurlyWood', - 'HotPink', 'Salmon', 'Violet', 'LightCoral', 'SkyBlue', - 'LightSalmon', 'Plum', - 'Khaki', 'LightGreen', 'Aquamarine', 'Silver', - 'LightSkyBlue', 'LightSteelBlue', - 'LightBlue', 'PaleGreen', 'Thistle', 'PowderBlue', - 'PaleGoldenrod', 'PaleTurquoise', - 'LightGray', 'Wheat', 'NavajoWhite', 'Moccasin', - 'LightPink', 'Gainsboro', 'PeachPuff', - 'Pink', 'Bisque', 'LightGoldenrod', 'BlanchedAlmond', - 'LemonChiffon', 'Beige', - 'AntiqueWhite', 'PapayaWhip', 'Cornsilk', - 'LightYellow', 'LightCyan', 'Linen', - 'Lavender', 'MistyRose', 'OldLace', 'WhiteSmoke', - 'Seashell', 'Ivory', 'Honeydew', - 'AliceBlue', 'LavenderBlush', 'MintCream', 'Snow', - 'White'), prefix='(clr)?', suffix=r'\b'), - Name.Constant), - inherit, - ], - } - - -class SwiftLexer(ObjectiveCLexer): - """ - For `Swift `_ source. - """ - name = 'Swift' - filenames = ['*.swift'] - aliases = ['swift'] - mimetypes = ['text/x-swift'] - - keywords_decl = set(('class', 'deinit', 'enum', 'extension', 'func', 'import', - 'init', 'let', 'protocol', 'static', 'struct', 'subscript', - 'typealias', 'var')) - keywords_stmt = set(('break', 'case', 'continue', 'default', 'do', 'else', - 'fallthrough', 'if', 'in', 'for', 'return', 'switch', - 'where', 'while')) - keywords_type = set(('as', 'dynamicType', 'is', 'new', 'super', 'self', 'Self', - 'Type', '__COLUMN__', '__FILE__', '__FUNCTION__', - '__LINE__')) - keywords_resrv = set(('associativity', 'didSet', 'get', 'infix', 'inout', 'left', - 'mutating', 'none', 'nonmutating', 'operator', 'override', - 'postfix', 'precedence', 'prefix', 'right', 'set', - 'unowned', 'unowned(safe)', 'unowned(unsafe)', 'weak', - 'willSet')) - operators = set(('->',)) - - def get_tokens_unprocessed(self, text): - for index, token, value in ObjectiveCLexer.get_tokens_unprocessed(self, text): - if token is Name: - if value in self.keywords_decl: - token = Keyword - elif value in self.keywords_stmt: - token = Keyword - elif value in self.keywords_type: - token = Keyword.Type - elif value in self.keywords_resrv: - token = Keyword.Reserved - elif value in self.operators: - token = Operator - yield index, token, value - - -class NitLexer(RegexLexer): - """ - For `nit `_ source. - - .. versionadded:: 2.0 - """ - - name = 'Nit' - aliases = ['nit'] - filenames = ['*.nit'] - tokens = { - 'root': [ - (r'#.*?$', Comment.Single), - (words(( - 'package', 'module', 'import', 'class', 'abstract', 'interface', - 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef', - 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern', - 'public', 'protected', 'private', 'intrude', 'if', 'then', - 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not', - 'implies', 'return', 'continue', 'break', 'abort', 'assert', - 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable', - 'null', 'as', 'isset', 'label', '__debug__'), suffix='(?=( |\n|\t|\r|\())'), - Keyword), - (r'[A-Z][A-Za-z0-9_]*', Name.Class), - (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), # Simple long string - (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|' - r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt - (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), # Start long string - (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), # Mid long string - (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), # End long string - (r'"(\\.|([^"}{\\]))*"', String), # Simple String - (r'"(\\.|([^"}{\\]))*{', String), # Start string - (r'}(\\.|([^"}{\\]))*{', String), # Mid String - (r'}(\\.|([^"}{\\]))*"', String), # End String - (r'(\'[^\'\\]\')|(\'\\.\')', String.Char), - (r'[0-9]+', Number.Integer), - (r'[0-9]*.[0-9]+', Number.Float), - (r'0(x|X)[0-9A-Fa-f]+', Number.Hex), - (r'[a-z][A-Za-z0-9_]*', Name), - (r'_[A-Za-z0-9_]+', Name.Variable.Instance), - (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator), - (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation), - (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit - ('(\r|\n| |\t)+', Text), - ], - } +from pygments.lexers.c_like.c_cpp import CLexer, CppLexer +from pygments.lexers.c_like.d import DLexer +from pygments.lexers.c_like.objective import ObjectiveCLexer, \ + ObjectiveCppLexer, LogosLexer +from pygments.lexers.c_like.go import GoLexer +from pygments.lexers.c_like.rust import RustLexer +from pygments.lexers.c_like.other import ECLexer, ValaLexer, CudaLexer +from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer +from pygments.lexers.cobol import CobolLexer, CobolFreeformatLexer +from pygments.lexers.fortran import FortranLexer +from pygments.lexers.prolog import PrologLexer +from pygments.lexers.python import CythonLexer +from pygments.lexers.graphics import GLShaderLexer +from pygments.lexers.misc.blitz import BlitzBasicLexer, BlitzMaxLexer, \ + MonkeyLexer +from pygments.lexers.misc.dylan import DylanLexer, DylanLidLexer, \ + DylanConsoleLexer +from pygments.lexers.misc.ooc import OocLexer +from pygments.lexers.misc.felix import FelixLexer +from pygments.lexers.misc.nimrod import NimrodLexer + +__all__ = [] diff --git a/pygments/lexers/dotnet.py b/pygments/lexers/dotnet.py index 2ff57a73..b6313197 100644 --- a/pygments/lexers/dotnet.py +++ b/pygments/lexers/dotnet.py @@ -11,9 +11,9 @@ import re from pygments.lexer import RegexLexer, DelegatingLexer, bygroups, include, \ - using, this, default + using, this, default from pygments.token import Punctuation, \ - Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other + Text, Comment, Operator, Keyword, Name, String, Number, Literal, Other from pygments.util import get_choice_opt, iteritems from pygments import unistring as uni @@ -50,7 +50,7 @@ class CSharpLexer(RegexLexer): name = 'C#' aliases = ['csharp', 'c#'] filenames = ['*.cs'] - mimetypes = ['text/x-csharp'] # inferred + mimetypes = ['text/x-csharp'] # inferred flags = re.MULTILINE | re.DOTALL | re.UNICODE @@ -75,13 +75,13 @@ class CSharpLexer(RegexLexer): tokens[levelname] = { 'root': [ # method names - (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type - r'(' + cs_ident + ')' # method name + (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type + r'(' + cs_ident + ')' # method name r'(\s*)(\()', # signature start bygroups(using(this), Name.Function, Text, Punctuation)), (r'^\s*\[.*?\]', Name.Attribute), (r'[^\S\n]+', Text), - (r'\\\n', Text), # line continuation + (r'\\\n', Text), # line continuation (r'//.*?\n', Comment.Single), (r'/[*].*?[*]/', Comment.Multiline), (r'\n', Text), @@ -120,7 +120,7 @@ class CSharpLexer(RegexLexer): (cs_ident, Name.Class, '#pop') ], 'namespace': [ - (r'(?=\()', Text, '#pop'), # using (resource) + (r'(?=\()', Text, '#pop'), # using (resource) ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop') ] } @@ -162,7 +162,7 @@ class NemerleLexer(RegexLexer): name = 'Nemerle' aliases = ['nemerle'] filenames = ['*.n'] - mimetypes = ['text/x-nemerle'] # inferred + mimetypes = ['text/x-nemerle'] # inferred flags = re.MULTILINE | re.DOTALL | re.UNICODE @@ -170,14 +170,14 @@ class NemerleLexer(RegexLexer): # http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-334.pdf levels = dict( - none = '@?[_a-zA-Z]\w*', - basic = ('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' + - '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + - uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'), - full = ('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', - 'Nl') + '])' - + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl', - 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'), + none='@?[_a-zA-Z]\w*', + basic=('@?[_' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + ']' + + '[' + uni.Lu + uni.Ll + uni.Lt + uni.Lm + uni.Nl + + uni.Nd + uni.Pc + uni.Cf + uni.Mn + uni.Mc + ']*'), + full=('@?(?:_|[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', + 'Nl') + '])' + + '[^' + uni.allexcept('Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Nl', + 'Nd', 'Pc', 'Cf', 'Mn', 'Mc') + ']*'), ) tokens = {} @@ -187,13 +187,13 @@ class NemerleLexer(RegexLexer): tokens[levelname] = { 'root': [ # method names - (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type - r'(' + cs_ident + ')' # method name + (r'^([ \t]*(?:' + cs_ident + r'(?:\[\])?\s+)+?)' # return type + r'(' + cs_ident + ')' # method name r'(\s*)(\()', # signature start bygroups(using(this), Name.Function, Text, Punctuation)), (r'^\s*\[.*?\]', Name.Attribute), (r'[^\S\n]+', Text), - (r'\\\n', Text), # line continuation + (r'\\\n', Text), # line continuation (r'//.*?\n', Comment.Single), (r'/[*].*?[*]/', Comment.Multiline), (r'\n', Text), @@ -249,7 +249,7 @@ class NemerleLexer(RegexLexer): (cs_ident, Name.Class, '#pop') ], 'namespace': [ - (r'(?=\()', Text, '#pop'), # using (resource) + (r'(?=\()', Text, '#pop'), # using (resource) ('(' + cs_ident + r'|\.)+', Name.Namespace, '#pop') ], 'splice-string': [ @@ -372,7 +372,7 @@ class VbNetLexer(RegexLexer): name = 'VB.net' aliases = ['vb.net', 'vbnet'] filenames = ['*.vb', '*.bas'] - mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?) + mimetypes = ['text/x-vbnet', 'text/x-vba'] # (?) flags = re.MULTILINE | re.IGNORECASE tokens = { @@ -431,7 +431,7 @@ class VbNetLexer(RegexLexer): (r'\d+([SILDFR]|US|UI|UL)?', Number.Integer), (r'&H[0-9a-f]+([SILDFR]|US|UI|UL)?', Number.Integer), (r'&O[0-7]+([SILDFR]|US|UI|UL)?', Number.Integer), - (r'_\n', Text), # Line continuation + (r'_\n', Text), # Line continuation ], 'string': [ (r'""', String), @@ -488,7 +488,7 @@ class GenericAspxLexer(RegexLexer): } -#TODO support multiple languages within the same source file +# TODO support multiple languages within the same source file class CSharpAspxLexer(DelegatingLexer): """ Lexer for highligting C# within ASP.NET pages. @@ -500,7 +500,7 @@ class CSharpAspxLexer(DelegatingLexer): mimetypes = [] def __init__(self, **options): - super(CSharpAspxLexer, self).__init__(CSharpLexer,GenericAspxLexer, + super(CSharpAspxLexer, self).__init__(CSharpLexer, GenericAspxLexer, **options) def analyse_text(text): @@ -521,8 +521,8 @@ class VbNetAspxLexer(DelegatingLexer): mimetypes = [] def __init__(self, **options): - super(VbNetAspxLexer, self).__init__(VbNetLexer,GenericAspxLexer, - **options) + super(VbNetAspxLexer, self).__init__(VbNetLexer, GenericAspxLexer, + **options) def analyse_text(text): if re.search(r'Page\s*Language="Vb"', text, re.I) is not None: @@ -548,15 +548,15 @@ class FSharpLexer(RegexLexer): mimetypes = ['text/x-fsharp'] keywords = [ - 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default', - 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else', - 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function', - 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal', - 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable', - 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public', - 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to', - 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when', - 'while', 'with', 'yield!', 'yield', + 'abstract', 'as', 'assert', 'base', 'begin', 'class', 'default', + 'delegate', 'do!', 'do', 'done', 'downcast', 'downto', 'elif', 'else', + 'end', 'exception', 'extern', 'false', 'finally', 'for', 'function', + 'fun', 'global', 'if', 'inherit', 'inline', 'interface', 'internal', + 'in', 'lazy', 'let!', 'let', 'match', 'member', 'module', 'mutable', + 'namespace', 'new', 'null', 'of', 'open', 'override', 'private', 'public', + 'rec', 'return!', 'return', 'select', 'static', 'struct', 'then', 'to', + 'true', 'try', 'type', 'upcast', 'use!', 'use', 'val', 'void', 'when', + 'while', 'with', 'yield!', 'yield', ] # Reserved words; cannot hurt to color them as keywords too. keywords += [ @@ -567,10 +567,10 @@ class FSharpLexer(RegexLexer): 'virtual', 'volatile', ] keyopts = [ - '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.', - '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-', - '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]', - '_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>', + '!=', '#', '&&', '&', '\(', '\)', '\*', '\+', ',', '-\.', + '->', '-', '\.\.', '\.', '::', ':=', ':>', ':', ';;', ';', '<-', + '<\]', '<', '>\]', '>', '\?\?', '\?', '\[<', '\[\|', '\[', '\]', + '_', '`', '{', '\|\]', '\|', '}', '~', '<@@', '<@', '=', '@>', '@@>', ] operators = r'[!$%&*+\./:<=>?@^|~-]' @@ -636,7 +636,7 @@ class FSharpLexer(RegexLexer): (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'B?", String.Char), (r"'.'", String.Char), - (r"'", Keyword), # a stray quote is another syntax element + (r"'", Keyword), # a stray quote is another syntax element (r'@?"', String.Double, 'string'), diff --git a/pygments/lexers/eiffel.py b/pygments/lexers/eiffel.py new file mode 100644 index 00000000..28e3fcac --- /dev/null +++ b/pygments/lexers/eiffel.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.eiffel + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Eiffel language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['EiffelLexer'] + + +class EiffelLexer(RegexLexer): + """ + For `Eiffel `_ source code. + + .. versionadded:: 2.0 + """ + name = 'Eiffel' + aliases = ['eiffel'] + filenames = ['*.e'] + mimetypes = ['text/x-eiffel'] + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'--.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + # Please note that keyword and operator are case insensitive. + (r'(?i)(true|false|void|current|result|precursor)\b', Keyword.Constant), + (r'(?i)(and(\s+then)?|not|xor|implies|or(\s+else)?)\b', Operator.Word), + (words(( + 'across', 'agent', 'alias', 'all', 'as', 'assign', 'attached', + 'attribute', 'check', 'class', 'convert', 'create', 'debug', + 'deferred', 'detachable', 'do', 'else', 'elseif', 'end', 'ensure', + 'expanded', 'export', 'external', 'feature', 'from', 'frozen', 'if', + 'inherit', 'inspect', 'invariant', 'like', 'local', 'loop', 'none', + 'note', 'obsolete', 'old', 'once', 'only', 'redefine', 'rename', + 'require', 'rescue', 'retry', 'select', 'separate', 'then', + 'undefine', 'until', 'variant', 'when'), prefix=r'(?i)\b', suffix=r'\b'), + Keyword.Reserved), + (r'"\[(([^\]%]|\n)|%(.|\n)|\][^"])*?\]"', String), + (r'"([^"%\n]|%.)*?"', String), + include('numbers'), + (r"'([^'%]|%'|%%)'", String.Char), + (r"(//|\\\\|>=|<=|:=|/=|~|/~|[\\\?!#%&@|+/\-=\>\*$<|^\[\]])", Operator), + (r"([{}():;,.])", Punctuation), + (r'([a-z]\w*)|([A-Z][A-Z0-9_]*[a-z]\w*)', Name), + (r'([A-Z][A-Z0-9_]*)', Name.Class), + (r'\n+', Text), + ], + 'numbers': [ + (r'0[xX][a-fA-F0-9]+', Number.Hex), + (r'0[bB][0-1]+', Number.Bin), + (r'0[cC][0-7]+', Number.Oct), + (r'([0-9]+\.[0-9]*)|([0-9]*\.[0-9]+)', Number.Float), + (r'[0-9]+', Number.Integer), + ], + } diff --git a/pygments/lexers/fortran.py b/pygments/lexers/fortran.py new file mode 100644 index 00000000..2e08a0c2 --- /dev/null +++ b/pygments/lexers/fortran.py @@ -0,0 +1,160 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.fortran + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Fortran languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['FortranLexer'] + + +class FortranLexer(RegexLexer): + """ + Lexer for FORTRAN 90 code. + + .. versionadded:: 0.10 + """ + name = 'Fortran' + aliases = ['fortran'] + filenames = ['*.f', '*.f90', '*.F', '*.F90'] + mimetypes = ['text/x-fortran'] + flags = re.IGNORECASE + + # Data Types: INTEGER, REAL, COMPLEX, LOGICAL, CHARACTER and DOUBLE PRECISION + # Operators: **, *, +, -, /, <, >, <=, >=, ==, /= + # Logical (?): NOT, AND, OR, EQV, NEQV + + # Builtins: + # http://gcc.gnu.org/onlinedocs/gcc-3.4.6/g77/Table-of-Intrinsic-Functions.html + + tokens = { + 'root': [ + (r'!.*\n', Comment), + include('strings'), + include('core'), + (r'[a-z]\w*', Name.Variable), + include('nums'), + (r'[\s]+', Text), + ], + 'core': [ + # Statements + (words(( + 'ABSTRACT', 'ACCEPT', 'ALL', 'ALLSTOP', 'ALLOCATABLE', 'ALLOCATE', + 'ARRAY', 'ASSIGN', 'ASSOCIATE', 'ASYNCHRONOUS', 'BACKSPACE', 'BIND', + 'BLOCK', 'BLOCKDATA', 'BYTE', 'CALL', 'CASE', 'CLASS', 'CLOSE', + 'CODIMENSION', 'COMMON', 'CONCURRRENT', 'CONTIGUOUS', 'CONTAINS', + 'CONTINUE', 'CRITICAL', 'CYCLE', 'DATA', 'DEALLOCATE', 'DECODE', + 'DEFERRED', 'DIMENSION', 'DO', 'ELEMENTAL', 'ELSE', 'ENCODE', 'END', + 'ENTRY', 'ENUM', 'ENUMERATOR', 'EQUIVALENCE', 'EXIT', 'EXTENDS', + 'EXTERNAL', 'EXTRINSIC', 'FILE', 'FINAL', 'FORALL', 'FORMAT', + 'FUNCTION', 'GENERIC', 'GOTO', 'IF', 'IMAGES', 'IMPLICIT', + 'IMPORT', 'IMPURE', 'INCLUDE', 'INQUIRE', 'INTENT', 'INTERFACE', + 'INTRINSIC', 'IS', 'LOCK', 'MEMORY', 'MODULE', 'NAMELIST', 'NULLIFY', + 'NONE', 'NON_INTRINSIC', 'NON_OVERRIDABLE', 'NOPASS', 'OPEN', 'OPTIONAL', + 'OPTIONS', 'PARAMETER', 'PASS', 'PAUSE', 'POINTER', 'PRINT', 'PRIVATE', + 'PROGRAM', 'PROCEDURE', 'PROTECTED', 'PUBLIC', 'PURE', 'READ', + 'RECURSIVE', 'RESULT', 'RETURN', 'REWIND', 'SAVE', 'SELECT', 'SEQUENCE', + 'STOP', 'SUBMODULE', 'SUBROUTINE', 'SYNC', 'SYNCALL', 'SYNCIMAGES', + 'SYNCMEMORY', 'TARGET', 'THEN', 'TYPE', 'UNLOCK', 'USE', 'VALUE', + 'VOLATILE', 'WHERE', 'WRITE', 'WHILE'), prefix=r'\b', suffix=r'\s*\b'), + Keyword), + + # Data Types + (words(( + 'CHARACTER', 'COMPLEX', 'DOUBLE PRECISION', 'DOUBLE COMPLEX', 'INTEGER', + 'LOGICAL', 'REAL', 'C_INT', 'C_SHORT', 'C_LONG', 'C_LONG_LONG', 'C_SIGNED_CHAR', + 'C_SIZE_T', 'C_INT8_T', 'C_INT16_T', 'C_INT32_T', 'C_INT64_T', 'C_INT_LEAST8_T', + 'C_INT_LEAST16_T', 'C_INT_LEAST32_T', 'C_INT_LEAST64_T', 'C_INT_FAST8_T', + 'C_INT_FAST16_T', 'C_INT_FAST32_T', 'C_INT_FAST64_T', 'C_INTMAX_T', + 'C_INTPTR_T', 'C_FLOAT', 'C_DOUBLE', 'C_LONG_DOUBLE', 'C_FLOAT_COMPLEX', + 'C_DOUBLE_COMPLEX', 'C_LONG_DOUBLE_COMPLEX', 'C_BOOL', 'C_CHAR', 'C_PTR', + 'C_FUNPTR'), prefix=r'\b', suffix=r'\s*\b'), + Keyword.Type), + + # Operators + (r'(\*\*|\*|\+|-|\/|<|>|<=|>=|==|\/=|=)', Operator), + + (r'(::)', Keyword.Declaration), + + (r'[()\[\],:&%;]', Punctuation), + # Intrinsics + (words(( + 'Abort', 'Abs', 'Access', 'AChar', 'ACos', 'ACosH', 'AdjustL', + 'AdjustR', 'AImag', 'AInt', 'Alarm', 'All', 'Allocated', 'ALog', + 'AMax', 'AMin', 'AMod', 'And', 'ANInt', 'Any', 'ASin', 'ASinH', + 'Associated', 'ATan', 'ATanH', 'Atomic_Define', 'Atomic_Ref', + 'BesJ', 'BesJN', 'Bessel_J0', 'Bessel_J1', 'Bessel_JN', 'Bessel_Y0', + 'Bessel_Y1', 'Bessel_YN', 'BesY', 'BesYN', 'BGE', 'BGT', 'BLE', + 'BLT', 'Bit_Size', 'BTest', 'CAbs', 'CCos', 'Ceiling', 'CExp', + 'Char', 'ChDir', 'ChMod', 'CLog', 'Cmplx', 'Command_Argument_Count', + 'Complex', 'Conjg', 'Cos', 'CosH', 'Count', 'CPU_Time', 'CShift', + 'CSin', 'CSqRt', 'CTime', 'C_Funloc', 'C_Loc', 'C_Associated', + 'C_Null_Ptr', 'C_Null_Funptr', 'C_F_Pointer', 'C_F_ProcPointer', + 'C_Null_Char', 'C_Alert', 'C_Backspace', 'C_Form_Feed', 'C_FunLoc', + 'C_Loc', 'C_Sizeof', 'C_New_Line', 'C_Carriage_Return', + 'C_Horizontal_Tab', 'C_Vertical_Tab', 'DAbs', 'DACos', 'DASin', + 'DATan', 'Date_and_Time', 'DbesJ', 'DbesJ', 'DbesJN', 'DbesY', + 'DbesY', 'DbesYN', 'Dble', 'DCos', 'DCosH', 'DDiM', 'DErF', + 'DErFC', 'DExp', 'Digits', 'DiM', 'DInt', 'DLog', 'DLog', 'DMax', + 'DMin', 'DMod', 'DNInt', 'Dot_Product', 'DProd', 'DSign', 'DSinH', + 'DShiftL', 'DShiftR', 'DSin', 'DSqRt', 'DTanH', 'DTan', 'DTime', + 'EOShift', 'Epsilon', 'ErF', 'ErFC', 'ErFC_Scaled', 'ETime', + 'Execute_Command_Line', 'Exit', 'Exp', 'Exponent', 'Extends_Type_Of', + 'FDate', 'FGet', 'FGetC', 'FindLoc', 'Float', 'Floor', 'Flush', + 'FNum', 'FPutC', 'FPut', 'Fraction', 'FSeek', 'FStat', 'FTell', + 'Gamma', 'GError', 'GetArg', 'Get_Command', 'Get_Command_Argument', + 'Get_Environment_Variable', 'GetCWD', 'GetEnv', 'GetGId', 'GetLog', + 'GetPId', 'GetUId', 'GMTime', 'HostNm', 'Huge', 'Hypot', 'IAbs', + 'IAChar', 'IAll', 'IAnd', 'IAny', 'IArgC', 'IBClr', 'IBits', + 'IBSet', 'IChar', 'IDate', 'IDiM', 'IDInt', 'IDNInt', 'IEOr', + 'IErrNo', 'IFix', 'Imag', 'ImagPart', 'Image_Index', 'Index', + 'Int', 'IOr', 'IParity', 'IRand', 'IsaTty', 'IShft', 'IShftC', + 'ISign', 'Iso_C_Binding', 'Is_Contiguous', 'Is_Iostat_End', + 'Is_Iostat_Eor', 'ITime', 'Kill', 'Kind', 'LBound', 'LCoBound', + 'Len', 'Len_Trim', 'LGe', 'LGt', 'Link', 'LLe', 'LLt', 'LnBlnk', + 'Loc', 'Log', 'Log_Gamma', 'Logical', 'Long', 'LShift', 'LStat', + 'LTime', 'MaskL', 'MaskR', 'MatMul', 'Max', 'MaxExponent', + 'MaxLoc', 'MaxVal', 'MClock', 'Merge', 'Merge_Bits', 'Move_Alloc', + 'Min', 'MinExponent', 'MinLoc', 'MinVal', 'Mod', 'Modulo', 'MvBits', + 'Nearest', 'New_Line', 'NInt', 'Norm2', 'Not', 'Null', 'Num_Images', + 'Or', 'Pack', 'Parity', 'PError', 'Precision', 'Present', 'Product', + 'Radix', 'Rand', 'Random_Number', 'Random_Seed', 'Range', 'Real', + 'RealPart', 'Rename', 'Repeat', 'Reshape', 'RRSpacing', 'RShift', + 'Same_Type_As', 'Scale', 'Scan', 'Second', 'Selected_Char_Kind', + 'Selected_Int_Kind', 'Selected_Real_Kind', 'Set_Exponent', 'Shape', + 'ShiftA', 'ShiftL', 'ShiftR', 'Short', 'Sign', 'Signal', 'SinH', + 'Sin', 'Sleep', 'Sngl', 'Spacing', 'Spread', 'SqRt', 'SRand', + 'Stat', 'Storage_Size', 'Sum', 'SymLnk', 'System', 'System_Clock', + 'Tan', 'TanH', 'Time', 'This_Image', 'Tiny', 'TrailZ', 'Transfer', + 'Transpose', 'Trim', 'TtyNam', 'UBound', 'UCoBound', 'UMask', + 'Unlink', 'Unpack', 'Verify', 'XOr', 'ZAbs', 'ZCos', 'ZExp', + 'ZLog', 'ZSin', 'ZSqRt'), prefix=r'\b', suffix=r'\s*\b'), + Name.Builtin), + + # Booleans + (r'\.(true|false)\.', Name.Builtin), + # Comparing Operators + (r'\.(eq|ne|lt|le|gt|ge|not|and|or|eqv|neqv)\.', Operator.Word), + ], + + 'strings': [ + (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), + (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), + ], + + 'nums': [ + (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer), + (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float), + (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float), + ], + } diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py new file mode 100644 index 00000000..de7db6ba --- /dev/null +++ b/pygments/lexers/graphics.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.graphics + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for computer graphics related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, words +from pygments.token import Text, Comment, Operator, Keyword, Name, \ + Number, Punctuation + +__all__ = ['GLShaderLexer'] + + +class GLShaderLexer(RegexLexer): + """ + GLSL (OpenGL Shader) lexer. + + .. versionadded:: 1.1 + """ + name = 'GLSL' + aliases = ['glsl'] + filenames = ['*.vert', '*.frag', '*.geo'] + mimetypes = ['text/x-glslsrc'] + + tokens = { + 'root': [ + (r'^#.*', Comment.Preproc), + (r'//.*', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'\+|-|~|!=?|\*|/|%|<<|>>|<=?|>=?|==?|&&?|\^|\|\|?', + Operator), + (r'[?:]', Operator), # quick hack for ternary + (r'\bdefined\b', Operator), + (r'[;{}(),\[\]]', Punctuation), + # FIXME when e is present, no decimal point needed + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), + (r'0[xX][0-9a-fA-F]*', Number.Hex), + (r'0[0-7]*', Number.Oct), + (r'[1-9][0-9]*', Number.Integer), + (words(( + 'attribute', 'const', 'uniform', 'varying', 'centroid', 'break', + 'continue', 'do', 'for', 'while', 'if', 'else', 'in', 'out', + 'inout', 'float', 'int', 'void', 'bool', 'true', 'false', + 'invariant', 'discard', 'return', 'mat2', 'mat3' 'mat4', + 'mat2x2', 'mat3x2', 'mat4x2', 'mat2x3', 'mat3x3', 'mat4x3', + 'mat2x4', 'mat3x4', 'mat4x4', 'vec2', 'vec3', 'vec4', + 'ivec2', 'ivec3', 'ivec4', 'bvec2', 'bvec3', 'bvec4', + 'sampler1D', 'sampler2D', 'sampler3D' 'samplerCube', + 'sampler1DShadow', 'sampler2DShadow', 'struct'), + prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + 'asm', 'class', 'union', 'enum', 'typedef', 'template', 'this', + 'packed', 'goto', 'switch', 'default', 'inline', 'noinline', + 'volatile', 'public', 'static', 'extern', 'external', 'interface', + 'long', 'short', 'double', 'half', 'fixed', 'unsigned', 'lowp', + 'mediump', 'highp', 'precision', 'input', 'output', + 'hvec2', 'hvec3', 'hvec4', 'dvec2', 'dvec3', 'dvec4', + 'fvec2', 'fvec3', 'fvec4', 'sampler2DRect', 'sampler3DRect', + 'sampler2DRectShadow', 'sizeof', 'cast', 'namespace', 'using'), + prefix=r'\b', suffix=r'\b'), + Keyword), # future use + (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), + (r'\.', Punctuation), + (r'\s+', Text), + ], + } diff --git a/pygments/lexers/inform.py b/pygments/lexers/inform.py new file mode 100644 index 00000000..c050cab1 --- /dev/null +++ b/pygments/lexers/inform.py @@ -0,0 +1,724 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.inform + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Inform languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error, Generic + +__all__ = ['Inform6Lexer', 'Inform6TemplateLexer', 'Inform7Lexer'] + + +class Inform6Lexer(RegexLexer): + """ + For `Inform 6 `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'Inform 6' + aliases = ['inform6', 'i6'] + filenames = ['*.inf'] + + flags = re.MULTILINE | re.DOTALL | re.UNICODE + + _name = r'[a-zA-Z_][a-zA-Z_0-9]*' + + # Inform 7 maps these four character classes to their ASCII + # equivalents. To support Inform 6 inclusions within Inform 7, + # Inform6Lexer maps them too. + _dash = u'\\-\u2010-\u2014' + _dquote = u'"\u201c\u201d' + _squote = u"'\u2018\u2019" + _newline = u'\\n\u0085\u2028\u2029' + + tokens = { + 'root': [ + (r'(\A(!%%[^%s]*[%s])+)?' % (_newline, _newline), Comment.Preproc, + 'directive') + ], + '_whitespace': [ + (r'\s+', Text), + (r'![^%s]*' % _newline, Comment.Single) + ], + 'default': [ + include('_whitespace'), + (r'\[', Punctuation, 'many-values'), # Array initialization + (r':|(?=;)', Punctuation, '#pop'), + (r'<', Punctuation), # Second angle bracket in an action statement + default(('expression', '_expression')) + ], + + # Expressions + '_expression': [ + include('_whitespace'), + (r'(?=sp\b)', Text, '#pop'), + (r'(?=[%s%s$0-9#a-zA-Z_])' % (_dquote, _squote), Text, + ('#pop', 'value')), + (r'\+\+|[%s]{1,2}(?!>)|~~?' % _dash, Operator), + (r'(?=[()\[%s,?@{:;])' % _dash, Text, '#pop') + ], + 'expression': [ + include('_whitespace'), + (r'\(', Punctuation, ('expression', '_expression')), + (r'\)', Punctuation, '#pop'), + (r'\[', Punctuation, ('#pop', 'statements', 'locals')), + (r'>(?=(\s+|(![^%s]*))*[>;])' % _newline, Punctuation), + (r'\+\+|[%s]{2}(?!>)' % _dash, Operator), + (r',', Punctuation, '_expression'), + (r'&&?|\|\|?|[=~><]?=|[%s]{1,2}>?|\.\.?[&#]?|::|[<>+*/%%]' % _dash, + Operator, '_expression'), + (r'(has|hasnt|in|notin|ofclass|or|provides)\b', Operator.Word, + '_expression'), + (r'sp\b', Name), + (r'\?~?', Name.Label, 'label?'), + (r'[@{]', Error), + default('#pop') + ], + '_assembly-expression': [ + (r'\(', Punctuation, ('#push', '_expression')), + (r'[\[\]]', Punctuation), + (r'[%s]>' % _dash, Punctuation, '_expression'), + (r'sp\b', Keyword.Pseudo), + (r';', Punctuation, '#pop:3'), + include('expression') + ], + '_for-expression': [ + (r'\)', Punctuation, '#pop:2'), + (r':', Punctuation, '#pop'), + include('expression') + ], + '_keyword-expression': [ + (r'(from|near|to)\b', Keyword, '_expression'), + include('expression') + ], + '_list-expression': [ + (r',', Punctuation, '#pop'), + include('expression') + ], + '_object-expression': [ + (r'has\b', Keyword.Declaration, '#pop'), + include('_list-expression') + ], + + # Values + 'value': [ + include('_whitespace'), + # Strings + (r'[%s][^@][%s]' % (_squote, _squote), String.Char, '#pop'), + (r'([%s])(@{[0-9a-fA-F]{1,4}})([%s])' % (_squote, _squote), + bygroups(String.Char, String.Escape, String.Char), '#pop'), + (r'([%s])(@..)([%s])' % (_squote, _squote), + bygroups(String.Char, String.Escape, String.Char), '#pop'), + (r'[%s]' % _squote, String.Single, ('#pop', 'dictionary-word')), + (r'[%s]' % _dquote, String.Double, ('#pop', 'string')), + # Numbers + (r'\$[+%s][0-9]*\.?[0-9]*([eE][+%s]?[0-9]+)?' % (_dash, _dash), + Number.Float, '#pop'), + (r'\$[0-9a-fA-F]+', Number.Hex, '#pop'), + (r'\$\$[01]+', Number.Bin, '#pop'), + (r'[0-9]+', Number.Integer, '#pop'), + # Values prefixed by hashes + (r'(##|#a\$)(%s)' % _name, bygroups(Operator, Name), '#pop'), + (r'(#g\$)(%s)' % _name, + bygroups(Operator, Name.Variable.Global), '#pop'), + (r'#[nw]\$', Operator, ('#pop', 'obsolete-dictionary-word')), + (r'(#r\$)(%s)' % _name, bygroups(Operator, Name.Function), '#pop'), + (r'#', Name.Builtin, ('#pop', 'system-constant')), + # System functions + (words(( + 'child', 'children', 'elder', 'eldest', 'glk', 'indirect', 'metaclass', + 'parent', 'random', 'sibling', 'younger', 'youngest'), suffix=r'\b'), + Name.Builtin, '#pop'), + # Metaclasses + (r'(?i)(Class|Object|Routine|String)\b', Name.Builtin, '#pop'), + # Veneer routines + (words(( + 'Box__Routine', 'CA__Pr', 'CDefArt', 'CInDefArt', 'Cl__Ms', + 'Copy__Primitive', 'CP__Tab', 'DA__Pr', 'DB__Pr', 'DefArt', 'Dynam__String', + 'EnglishNumber', 'Glk__Wrap', 'IA__Pr', 'IB__Pr', 'InDefArt', 'Main__', + 'Meta__class', 'OB__Move', 'OB__Remove', 'OC__Cl', 'OP__Pr', 'Print__Addr', + 'Print__PName', 'PrintShortName', 'RA__Pr', 'RA__Sc', 'RL__Pr', 'R_Process', + 'RT__ChG', 'RT__ChGt', 'RT__ChLDB', 'RT__ChLDW', 'RT__ChPR', 'RT__ChPrintA', + 'RT__ChPrintC', 'RT__ChPrintO', 'RT__ChPrintS', 'RT__ChPS', 'RT__ChR', + 'RT__ChSTB', 'RT__ChSTW', 'RT__ChT', 'RT__Err', 'RT__TrPS', 'RV__Pr', + 'Symb__Tab', 'Unsigned__Compare', 'WV__Pr', 'Z__Region'), + prefix='(?i)', suffix=r'\b'), + Name.Builtin, '#pop'), + # Other built-in symbols + (words(( + 'call', 'copy', 'create', 'DEBUG', 'destroy', 'DICT_CHAR_SIZE', + 'DICT_ENTRY_BYTES', 'DICT_IS_UNICODE', 'DICT_WORD_SIZE', 'false', + 'FLOAT_INFINITY', 'FLOAT_NAN', 'FLOAT_NINFINITY', 'GOBJFIELD_CHAIN', + 'GOBJFIELD_CHILD', 'GOBJFIELD_NAME', 'GOBJFIELD_PARENT', + 'GOBJFIELD_PROPTAB', 'GOBJFIELD_SIBLING', 'GOBJ_EXT_START', + 'GOBJ_TOTAL_LENGTH', 'Grammar__Version', 'INDIV_PROP_START', 'INFIX', + 'infix__watching', 'MODULE_MODE', 'name', 'nothing', 'NUM_ATTR_BYTES', 'print', + 'print_to_array', 'recreate', 'remaining', 'self', 'sender', 'STRICT_MODE', + 'sw__var', 'sys__glob0', 'sys__glob1', 'sys__glob2', 'sys_statusline_flag', + 'TARGET_GLULX', 'TARGET_ZCODE', 'temp__global2', 'temp__global3', + 'temp__global4', 'temp_global', 'true', 'USE_MODULES', 'WORDSIZE'), + prefix='(?i)', suffix=r'\b'), + Name.Builtin, '#pop'), + # Other values + (_name, Name, '#pop') + ], + # Strings + 'dictionary-word': [ + (r'[~^]+', String.Escape), + (r'[^~^\\@({%s]+' % _squote, String.Single), + (r'[({]', String.Single), + (r'@{[0-9a-fA-F]{,4}}', String.Escape), + (r'@..', String.Escape), + (r'[%s]' % _squote, String.Single, '#pop') + ], + 'string': [ + (r'[~^]+', String.Escape), + (r'[^~^\\@({%s]+' % _dquote, String.Double), + (r'[({]', String.Double), + (r'\\', String.Escape), + (r'@(\\\s*[%s]\s*)*@((\\\s*[%s]\s*)*[0-9])*' % + (_newline, _newline), String.Escape), + (r'@(\\\s*[%s]\s*)*{((\\\s*[%s]\s*)*[0-9a-fA-F]){,4}' + r'(\\\s*[%s]\s*)*}' % (_newline, _newline, _newline), + String.Escape), + (r'@(\\\s*[%s]\s*)*.(\\\s*[%s]\s*)*.' % (_newline, _newline), + String.Escape), + (r'[%s]' % _dquote, String.Double, '#pop') + ], + 'plain-string': [ + (r'[^~^\\({\[\]%s]+' % _dquote, String.Double), + (r'[~^({\[\]]', String.Double), + (r'\\', String.Escape), + (r'[%s]' % _dquote, String.Double, '#pop') + ], + # Names + '_constant': [ + include('_whitespace'), + (_name, Name.Constant, '#pop'), + include('value') + ], + '_global': [ + include('_whitespace'), + (_name, Name.Variable.Global, '#pop'), + include('value') + ], + 'label?': [ + include('_whitespace'), + (r'(%s)?' % _name, Name.Label, '#pop') + ], + 'variable?': [ + include('_whitespace'), + (r'(%s)?' % _name, Name.Variable, '#pop') + ], + # Values after hashes + 'obsolete-dictionary-word': [ + (r'\S[a-zA-Z_0-9]*', String.Other, '#pop') + ], + 'system-constant': [ + include('_whitespace'), + (_name, Name.Builtin, '#pop') + ], + + # Directives + 'directive': [ + include('_whitespace'), + (r'#', Punctuation), + (r';', Punctuation, '#pop'), + (r'\[', Punctuation, + ('default', 'statements', 'locals', 'routine-name?')), + (words(( + 'abbreviate', 'endif', 'dictionary', 'ifdef', 'iffalse', 'ifndef', 'ifnot', + 'iftrue', 'ifv3', 'ifv5', 'release', 'serial', 'switches', 'system_file', + 'version'), prefix='(?i)', suffix=r'\b'), + Keyword, 'default'), + (r'(?i)(array|global)\b', Keyword, + ('default', 'directive-keyword?', '_global')), + (r'(?i)attribute\b', Keyword, ('default', 'alias?', '_constant')), + (r'(?i)class\b', Keyword, + ('object-body', 'duplicates', 'class-name')), + (r'(?i)(constant|default)\b', Keyword, + ('default', 'expression', '_constant')), + (r'(?i)(end\b)(.*)', bygroups(Keyword, Text)), + (r'(?i)(extend|verb)\b', Keyword, 'grammar'), + (r'(?i)fake_action\b', Keyword, ('default', '_constant')), + (r'(?i)import\b', Keyword, 'manifest'), + (r'(?i)(include|link)\b', Keyword, + ('default', 'before-plain-string')), + (r'(?i)(lowstring|undef)\b', Keyword, ('default', '_constant')), + (r'(?i)message\b', Keyword, ('default', 'diagnostic')), + (r'(?i)(nearby|object)\b', Keyword, + ('object-body', '_object-head')), + (r'(?i)property\b', Keyword, + ('default', 'alias?', '_constant', 'property-keyword*')), + (r'(?i)replace\b', Keyword, + ('default', 'routine-name?', 'routine-name?')), + (r'(?i)statusline\b', Keyword, ('default', 'directive-keyword?')), + (r'(?i)stub\b', Keyword, ('default', 'routine-name?')), + (r'(?i)trace\b', Keyword, + ('default', 'trace-keyword?', 'trace-keyword?')), + (r'(?i)zcharacter\b', Keyword, + ('default', 'directive-keyword?', 'directive-keyword?')), + (_name, Name.Class, ('object-body', '_object-head')) + ], + # [, Replace, Stub + 'routine-name?': [ + include('_whitespace'), + (r'(%s)?' % _name, Name.Function, '#pop') + ], + 'locals': [ + include('_whitespace'), + (r';', Punctuation, '#pop'), + (r'\*', Punctuation), + (_name, Name.Variable) + ], + # Array + 'many-values': [ + include('_whitespace'), + (r';', Punctuation), + (r'\]', Punctuation, '#pop'), + (r':', Error), + default(('expression', '_expression')) + ], + # Attribute, Property + 'alias?': [ + include('_whitespace'), + (r'alias\b', Keyword, ('#pop', '_constant')), + default('#pop') + ], + # Class, Object, Nearby + 'class-name': [ + include('_whitespace'), + (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'), + (_name, Name.Class, '#pop') + ], + 'duplicates': [ + include('_whitespace'), + (r'\(', Punctuation, ('#pop', 'expression', '_expression')), + default('#pop') + ], + '_object-head': [ + (r'[%s]>' % _dash, Punctuation), + (r'(class|has|private|with)\b', Keyword.Declaration, '#pop'), + include('_global') + ], + 'object-body': [ + include('_whitespace'), + (r';', Punctuation, '#pop:2'), + (r',', Punctuation), + (r'class\b', Keyword.Declaration, 'class-segment'), + (r'(has|private|with)\b', Keyword.Declaration), + (r':', Error), + default(('_object-expression', '_expression')) + ], + 'class-segment': [ + include('_whitespace'), + (r'(?=[,;]|(class|has|private|with)\b)', Text, '#pop'), + (_name, Name.Class), + default('value') + ], + # Extend, Verb + 'grammar': [ + include('_whitespace'), + (r'=', Punctuation, ('#pop', 'default')), + (r'\*', Punctuation, ('#pop', 'grammar-line')), + default('_directive-keyword') + ], + 'grammar-line': [ + include('_whitespace'), + (r';', Punctuation, '#pop'), + (r'[/*]', Punctuation), + (r'[%s]>' % _dash, Punctuation, 'value'), + (r'(noun|scope)\b', Keyword, '=routine'), + default('_directive-keyword') + ], + '=routine': [ + include('_whitespace'), + (r'=', Punctuation, 'routine-name?'), + default('#pop') + ], + # Import + 'manifest': [ + include('_whitespace'), + (r';', Punctuation, '#pop'), + (r',', Punctuation), + (r'(?i)(global\b)?', Keyword, '_global') + ], + # Include, Link, Message + 'diagnostic': [ + include('_whitespace'), + (r'[%s]' % _dquote, String.Double, ('#pop', 'message-string')), + default(('#pop', 'before-plain-string', 'directive-keyword?')) + ], + 'before-plain-string': [ + include('_whitespace'), + (r'[%s]' % _dquote, String.Double, ('#pop', 'plain-string')) + ], + 'message-string': [ + (r'[~^]+', String.Escape), + include('plain-string') + ], + + # Keywords used in directives + '_directive-keyword!': [ + include('_whitespace'), + (words(( + 'additive', 'alias', 'buffer', 'class', 'creature', 'data', 'error', 'fatalerror', + 'first', 'has', 'held', 'initial', 'initstr', 'last', 'long', 'meta', 'multi', + 'multiexcept', 'multiheld', 'multiinside', 'noun', 'number', 'only', 'private', + 'replace', 'reverse', 'scope', 'score', 'special', 'string', 'table', 'terminating', + 'time', 'topic', 'warning', 'with'), suffix=r'\b'), + Keyword, '#pop'), + (r'[%s]{1,2}>|[+=]' % _dash, Punctuation, '#pop') + ], + '_directive-keyword': [ + include('_directive-keyword!'), + include('value') + ], + 'directive-keyword?': [ + include('_directive-keyword!'), + default('#pop') + ], + 'property-keyword*': [ + include('_whitespace'), + (r'(additive|long)\b', Keyword), + default('#pop') + ], + 'trace-keyword?': [ + include('_whitespace'), + (words(( + 'assembly', 'dictionary', 'expressions', 'lines', 'linker', + 'objects', 'off', 'on', 'symbols', 'tokens', 'verbs'), suffix=r'\b'), + Keyword, '#pop'), + default('#pop') + ], + + # Statements + 'statements': [ + include('_whitespace'), + (r'\]', Punctuation, '#pop'), + (r'[;{}]', Punctuation), + (words(( + 'box', 'break', 'continue', 'default', 'give', 'inversion', + 'new_line', 'quit', 'read', 'remove', 'return', 'rfalse', 'rtrue', + 'spaces', 'string', 'until'), suffix=r'\b'), + Keyword, 'default'), + (r'(do|else)\b', Keyword), + (r'(font|style)\b', Keyword, + ('default', 'miscellaneous-keyword?')), + (r'for\b', Keyword, ('for', '(?')), + (r'(if|switch|while)', Keyword, + ('expression', '_expression', '(?')), + (r'(jump|save|restore)\b', Keyword, ('default', 'label?')), + (r'objectloop\b', Keyword, + ('_keyword-expression', 'variable?', '(?')), + (r'print(_ret)?\b|(?=[%s])' % _dquote, Keyword, 'print-list'), + (r'\.', Name.Label, 'label?'), + (r'@', Keyword, 'opcode'), + (r'#(?![agrnw]\$|#)', Punctuation, 'directive'), + (r'<', Punctuation, 'default'), + (r'(move\b)?', Keyword, + ('default', '_keyword-expression', '_expression')) + ], + 'miscellaneous-keyword?': [ + include('_whitespace'), + (r'(bold|fixed|from|near|off|on|reverse|roman|to|underline)\b', + Keyword, '#pop'), + (r'(a|A|an|address|char|name|number|object|property|string|the|' + r'The)\b(?=(\s+|(![^%s]*))*\))' % _newline, Keyword.Pseudo, + '#pop'), + (r'%s(?=(\s+|(![^%s]*))*\))' % (_name, _newline), Name.Function, + '#pop'), + default('#pop') + ], + '(?': [ + include('_whitespace'), + (r'\(?', Punctuation, '#pop') + ], + 'for': [ + include('_whitespace'), + (r';?', Punctuation, ('_for-expression', '_expression')) + ], + 'print-list': [ + include('_whitespace'), + (r';', Punctuation, '#pop'), + (r':', Error), + default(('_list-expression', '_expression', '_list-expression', 'form')) + ], + 'form': [ + include('_whitespace'), + (r'\(', Punctuation, ('#pop', 'miscellaneous-keyword?')), + default('#pop') + ], + + # Assembly + 'opcode': [ + include('_whitespace'), + (r'[%s]' % _dquote, String.Double, ('operands', 'plain-string')), + (_name, Keyword, 'operands') + ], + 'operands': [ + (r':', Error), + default(('_assembly-expression', '_expression')) + ] + } + + def get_tokens_unprocessed(self, text): + # 'in' is either a keyword or an operator. + # If the token two tokens after 'in' is ')', 'in' is a keyword: + # objectloop(a in b) + # Otherwise, it is an operator: + # objectloop(a in b && true) + objectloop_queue = [] + objectloop_token_count = -1 + previous_token = None + for index, token, value in RegexLexer.get_tokens_unprocessed(self, + text): + if previous_token is Name.Variable and value == 'in': + objectloop_queue = [[index, token, value]] + objectloop_token_count = 2 + elif objectloop_token_count > 0: + if token not in Comment and token not in Text: + objectloop_token_count -= 1 + objectloop_queue.append((index, token, value)) + else: + if objectloop_token_count == 0: + if objectloop_queue[-1][2] == ')': + objectloop_queue[0][1] = Keyword + while objectloop_queue: + yield objectloop_queue.pop(0) + objectloop_token_count = -1 + yield index, token, value + if token not in Comment and token not in Text: + previous_token = token + while objectloop_queue: + yield objectloop_queue.pop(0) + + +class Inform7Lexer(RegexLexer): + """ + For `Inform 7 `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'Inform 7' + aliases = ['inform7', 'i7'] + filenames = ['*.ni', '*.i7x'] + + flags = re.MULTILINE | re.DOTALL | re.UNICODE + + _dash = Inform6Lexer._dash + _dquote = Inform6Lexer._dquote + _newline = Inform6Lexer._newline + _start = r'\A|(?<=[%s])' % _newline + + # There are three variants of Inform 7, differing in how to + # interpret at signs and braces in I6T. In top-level inclusions, at + # signs in the first column are inweb syntax. In phrase definitions + # and use options, tokens in braces are treated as I7. Use options + # also interpret "{N}". + tokens = {} + token_variants = ['+i6t-not-inline', '+i6t-inline', '+i6t-use-option'] + + for level in token_variants: + tokens[level] = { + '+i6-root': list(Inform6Lexer.tokens['root']), + '+i6t-root': [ # For Inform6TemplateLexer + (r'[^%s]*' % Inform6Lexer._newline, Comment.Preproc, + ('directive', '+p')) + ], + 'root': [ + (r'(\|?\s)+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'[%s]' % _dquote, Generic.Heading, + ('+main', '+titling', '+titling-string')), + default(('+main', '+heading?')) + ], + '+titling-string': [ + (r'[^%s]+' % _dquote, Generic.Heading), + (r'[%s]' % _dquote, Generic.Heading, '#pop') + ], + '+titling': [ + (r'\[', Comment.Multiline, '+comment'), + (r'[^%s.;:|%s]+' % (_dquote, _newline), Generic.Heading), + (r'[%s]' % _dquote, Generic.Heading, '+titling-string'), + (r'[%s]{2}|(?<=[\s%s])\|[\s%s]' % (_newline, _dquote, _dquote), + Text, ('#pop', '+heading?')), + (r'[.;:]|(?<=[\s%s])\|' % _dquote, Text, '#pop'), + (r'[|%s]' % _newline, Generic.Heading) + ], + '+main': [ + (r'(?i)[^%s:a\[(|%s]+' % (_dquote, _newline), Text), + (r'[%s]' % _dquote, String.Double, '+text'), + (r':', Text, '+phrase-definition'), + (r'(?i)\bas\b', Text, '+use-option'), + (r'\[', Comment.Multiline, '+comment'), + (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), + bygroups(Punctuation, + using(this, state=('+i6-root', 'directive'), + i6t='+i6t-not-inline'), Punctuation)), + (r'(%s|(?<=[\s;:.%s]))\|\s|[%s]{2,}' % + (_start, _dquote, _newline), Text, '+heading?'), + (r'(?i)[a(|%s]' % _newline, Text) + ], + '+phrase-definition': [ + (r'\s+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), + bygroups(Punctuation, + using(this, state=('+i6-root', 'directive', + 'default', 'statements'), + i6t='+i6t-inline'), Punctuation), '#pop'), + default('#pop') + ], + '+use-option': [ + (r'\s+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'(\([%s])(.*?)([%s]\))' % (_dash, _dash), + bygroups(Punctuation, + using(this, state=('+i6-root', 'directive'), + i6t='+i6t-use-option'), Punctuation), '#pop'), + default('#pop') + ], + '+comment': [ + (r'[^\[\]]+', Comment.Multiline), + (r'\[', Comment.Multiline, '#push'), + (r'\]', Comment.Multiline, '#pop') + ], + '+text': [ + (r'[^\[%s]+' % _dquote, String.Double), + (r'\[.*?\]', String.Interpol), + (r'[%s]' % _dquote, String.Double, '#pop') + ], + '+heading?': [ + (r'(\|?\s)+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'[%s]{4}\s+' % _dash, Text, '+documentation-heading'), + (r'[%s]{1,3}' % _dash, Text), + (r'(?i)(volume|book|part|chapter|section)\b[^%s]*' % _newline, + Generic.Heading, '#pop'), + default('#pop') + ], + '+documentation-heading': [ + (r'\s+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'(?i)documentation\s+', Text, '+documentation-heading2'), + default('#pop') + ], + '+documentation-heading2': [ + (r'\s+', Text), + (r'\[', Comment.Multiline, '+comment'), + (r'[%s]{4}\s' % _dash, Text, '+documentation'), + default('#pop:2') + ], + '+documentation': [ + (r'(?i)(%s)\s*(chapter|example)\s*:[^%s]*' % + (_start, _newline), Generic.Heading), + (r'(?i)(%s)\s*section\s*:[^%s]*' % (_start, _newline), + Generic.Subheading), + (r'((%s)\t.*?[%s])+' % (_start, _newline), + using(this, state='+main')), + (r'[^%s\[]+|[%s\[]' % (_newline, _newline), Text), + (r'\[', Comment.Multiline, '+comment'), + ], + '+i6t-not-inline': [ + (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), + Comment.Preproc), + (r'(%s)@([%s]+|Purpose:)[^%s]*' % (_start, _dash, _newline), + Comment.Preproc), + (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), + Generic.Heading, '+p') + ], + '+i6t-use-option': [ + include('+i6t-not-inline'), + (r'({)(N)(})', bygroups(Punctuation, Text, Punctuation)) + ], + '+i6t-inline': [ + (r'({)(\S[^}]*)?(})', + bygroups(Punctuation, using(this, state='+main'), + Punctuation)) + ], + '+i6t': [ + (r'({[%s])(![^}]*)(}?)' % _dash, + bygroups(Punctuation, Comment.Single, Punctuation)), + (r'({[%s])(lines)(:)([^}]*)(}?)' % _dash, + bygroups(Punctuation, Keyword, Punctuation, Text, + Punctuation), '+lines'), + (r'({[%s])([^:}]*)(:?)([^}]*)(}?)' % _dash, + bygroups(Punctuation, Keyword, Punctuation, Text, + Punctuation)), + (r'(\(\+)(.*?)(\+\)|\Z)', + bygroups(Punctuation, using(this, state='+main'), + Punctuation)) + ], + '+p': [ + (r'[^@]+', Comment.Preproc), + (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), + Comment.Preproc, '#pop'), + (r'(%s)@([%s]|Purpose:)' % (_start, _dash), Comment.Preproc), + (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), + Generic.Heading), + (r'@', Comment.Preproc) + ], + '+lines': [ + (r'(%s)@c( .*?)?([%s]|\Z)' % (_start, _newline), + Comment.Preproc), + (r'(%s)@([%s]|Purpose:)[^%s]*' % (_start, _dash, _newline), + Comment.Preproc), + (r'(%s)@p( .*?)?([%s]|\Z)' % (_start, _newline), + Generic.Heading, '+p'), + (r'(%s)@[a-zA-Z_0-9]*[ %s]' % (_start, _newline), Keyword), + (r'![^%s]*' % _newline, Comment.Single), + (r'({)([%s]endlines)(})' % _dash, + bygroups(Punctuation, Keyword, Punctuation), '#pop'), + (r'[^@!{]+?([%s]|\Z)|.' % _newline, Text) + ] + } + # Inform 7 can include snippets of Inform 6 template language, + # so all of Inform6Lexer's states are copied here, with + # modifications to account for template syntax. Inform7Lexer's + # own states begin with '+' to avoid name conflicts. Some of + # Inform6Lexer's states begin with '_': these are not modified. + # They deal with template syntax either by including modified + # states, or by matching r'' then pushing to modified states. + for token in Inform6Lexer.tokens: + if token == 'root': + continue + tokens[level][token] = list(Inform6Lexer.tokens[token]) + if not token.startswith('_'): + tokens[level][token][:0] = [include('+i6t'), include(level)] + + def __init__(self, **options): + level = options.get('i6t', '+i6t-not-inline') + if level not in self._all_tokens: + self._tokens = self.__class__.process_tokendef(level) + else: + self._tokens = self._all_tokens[level] + RegexLexer.__init__(self, **options) + + +class Inform6TemplateLexer(Inform7Lexer): + """ + For `Inform 6 template + `_ code. + + .. versionadded:: 2.0 + """ + + name = 'Inform 6 template' + aliases = ['i6t'] + filenames = ['*.i6t'] + + def get_tokens_unprocessed(self, text, stack=('+i6t-root',)): + return Inform7Lexer.get_tokens_unprocessed(self, text, stack) diff --git a/pygments/lexers/misc/__init__.py b/pygments/lexers/misc/__init__.py new file mode 100644 index 00000000..c10f5a29 --- /dev/null +++ b/pygments/lexers/misc/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for miscellaneous languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" diff --git a/pygments/lexers/misc/blitz.py b/pygments/lexers/misc/blitz.py new file mode 100644 index 00000000..9e324adb --- /dev/null +++ b/pygments/lexers/misc/blitz.py @@ -0,0 +1,318 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.blitz + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for blitzbasic.com languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer'] + + +class BlitzMaxLexer(RegexLexer): + """ + For `BlitzMax `_ source code. + + .. versionadded:: 1.4 + """ + + name = 'BlitzMax' + aliases = ['blitzmax', 'bmax'] + filenames = ['*.bmx'] + mimetypes = ['text/x-bmx'] + + bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b' + bmax_sktypes = r'@{1,2}|[!#$%]' + bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b' + bmax_name = r'[a-z_]\w*' + bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)' + r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \ + (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) + bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])' + + flags = re.MULTILINE | re.IGNORECASE + tokens = { + 'root': [ + # Text + (r'[ \t]+', Text), + (r'\.\.\n', Text), # Line continuation + # Comments + (r"'.*?\n", Comment.Single), + (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline), + # Data types + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]*(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-f]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Other + (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' % + (bmax_vopwords), Operator), + (r'[(),.:\[\]]', Punctuation), + (r'(?:#[\w \t]*)', Name.Label), + (r'(?:\?[\w \t]*)', Comment.Preproc), + # Identifiers + (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name), + bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)), + (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' % + (bmax_name, bmax_name), + bygroups(Keyword.Reserved, Text, Keyword.Namespace)), + (bmax_func, bygroups(Name.Function, Text, Keyword.Type, + Operator, Text, Punctuation, Text, + Keyword.Type, Name.Class, Text, + Keyword.Type, Text, Punctuation)), + (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator, + Text, Punctuation, Text, Keyword.Type, + Name.Class, Text, Keyword.Type)), + (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + # Keywords + (r'\b(Ptr)\b', Keyword.Type), + (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant), + (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration), + (words(( + 'TNullMethodException', 'TNullFunctionException', + 'TNullObjectException', 'TArrayBoundsException', + 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception), + (words(( + 'Strict', 'SuperStrict', 'Module', 'ModuleInfo', + 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private', + 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max', + 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen', + 'Framework', 'Include', 'Import', 'Extern', 'EndExtern', + 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod', + 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect', + 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData', + 'RestoreData'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # Final resolve (for variable names and such) + (r'(%s)' % (bmax_name), Name.Variable), + ], + 'string': [ + (r'""', String.Double), + (r'"C?', String.Double, '#pop'), + (r'[^"]+', String.Double), + ], + } + + +class BlitzBasicLexer(RegexLexer): + """ + For `BlitzBasic `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'BlitzBasic' + aliases = ['blitzbasic', 'b3d', 'bplus'] + filenames = ['*.bb', '*.decls'] + mimetypes = ['text/x-bb'] + + bb_sktypes = r'@{1,2}|[#$%]' + bb_name = r'[a-z]\w*' + bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \ + (bb_name, bb_sktypes, bb_name) + + flags = re.MULTILINE | re.IGNORECASE + tokens = { + 'root': [ + # Text + (r'[ \t]+', Text), + # Comments + (r";.*?\n", Comment.Single), + # Data types + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]+(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-f]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Other + (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not', + 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str', + 'First', 'Last', 'Before', 'After'), + prefix=r'\b', suffix=r'\b'), + Operator), + (r'([+\-*/~=<>^])', Operator), + (r'[(),:\[\]\\]', Punctuation), + (r'\.([ \t]*)(%s)' % bb_name, Name.Label), + # Identifiers + (r'\b(New)\b([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Label)), + (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name), + bygroups(Operator, Text, Punctuation, Text, Name.Class)), + (r'\b%s\b([ \t]*)(\()' % bb_var, + bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation, + Text, Name.Class, Text, Punctuation)), + (r'\b(Function)\b([ \t]+)%s' % bb_var, + bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type, + Text, Punctuation, Text, Name.Class)), + (r'\b(Type)([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + # Keywords + (r'\b(Pi|True|False|Null)\b', Keyword.Constant), + (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration), + (words(( + 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert', + 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', + 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # Final resolve (for variable names and such) + # (r'(%s)' % (bb_name), Name.Variable), + (bb_var, bygroups(Name.Variable, Text, Keyword.Type, + Text, Punctuation, Text, Name.Class)), + ], + 'string': [ + (r'""', String.Double), + (r'"C?', String.Double, '#pop'), + (r'[^"]+', String.Double), + ], + } + + +class MonkeyLexer(RegexLexer): + """ + For + `Monkey `_ + source code. + + .. versionadded:: 1.6 + """ + + name = 'Monkey' + aliases = ['monkey'] + filenames = ['*.monkey'] + mimetypes = ['text/x-monkey'] + + name_variable = r'[a-z_]\w*' + name_function = r'[A-Z]\w*' + name_constant = r'[A-Z_][A-Z0-9_]*' + name_class = r'[A-Z]\w*' + name_module = r'[a-z0-9_]*' + + keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)' + # ? == Bool // % == Int // # == Float // $ == String + keyword_type_special = r'[?%#$]' + + flags = re.MULTILINE + + tokens = { + 'root': [ + # Text + (r'\s+', Text), + # Comments + (r"'.*", Comment), + (r'(?i)^#rem\b', Comment.Multiline, 'comment'), + # preprocessor directives + (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc), + # preprocessor variable (any line starting with '#' that is not a directive) + (r'^#', Comment.Preproc, 'variables'), + # String + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]+(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-fA-Z]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Native data types + (r'\b%s\b' % keyword_type, Keyword.Type), + # Exception handling + (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved), + (r'Throwable', Name.Exception), + # Builtins + (r'(?i)\b(?:Null|True|False)\b', Name.Builtin), + (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo), + (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant), + # Keywords + (r'(?i)^(Import)(\s+)(.*)(\n)', + bygroups(Keyword.Namespace, Text, Name.Namespace, Text)), + (r'(?i)^Strict\b.*\n', Keyword.Reserved), + (r'(?i)(Const|Local|Global|Field)(\s+)', + bygroups(Keyword.Declaration, Text), 'variables'), + (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)', + bygroups(Keyword.Reserved, Text), 'classname'), + (r'(?i)(Function|Method)(\s+)', + bygroups(Keyword.Reserved, Text), 'funcname'), + (r'(?i)(?:End|Return|Public|Private|Extern|Property|' + r'Final|Abstract)\b', Keyword.Reserved), + # Flow Control stuff + (r'(?i)(?:If|Then|Else|ElseIf|EndIf|' + r'Select|Case|Default|' + r'While|Wend|' + r'Repeat|Until|Forever|' + r'For|To|Until|Step|EachIn|Next|' + r'Exit|Continue)\s+', Keyword.Reserved), + # not used yet + (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved), + # Array + (r'[\[\]]', Punctuation), + # Other + (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator), + (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word), + (r'[\(\){}!#,.:]', Punctuation), + # catch the rest + (r'%s\b' % name_constant, Name.Constant), + (r'%s\b' % name_function, Name.Function), + (r'%s\b' % name_variable, Name.Variable), + ], + 'funcname': [ + (r'(?i)%s\b' % name_function, Name.Function), + (r':', Punctuation, 'classname'), + (r'\s+', Text), + (r'\(', Punctuation, 'variables'), + (r'\)', Punctuation, '#pop') + ], + 'classname': [ + (r'%s\.' % name_module, Name.Namespace), + (r'%s\b' % keyword_type, Keyword.Type), + (r'%s\b' % name_class, Name.Class), + # array (of given size) + (r'(\[)(\s*)(\d*)(\s*)(\])', + bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)), + # generics + (r'\s+(?!<)', Text, '#pop'), + (r'<', Punctuation, '#push'), + (r'>', Punctuation, '#pop'), + (r'\n', Text, '#pop'), + default('#pop') + ], + 'variables': [ + (r'%s\b' % name_constant, Name.Constant), + (r'%s\b' % name_variable, Name.Variable), + (r'%s' % keyword_type_special, Keyword.Type), + (r'\s+', Text), + (r':', Punctuation, 'classname'), + (r',', Punctuation, '#push'), + default('#pop') + ], + 'string': [ + (r'[^"~]+', String.Double), + (r'~q|~n|~r|~t|~z|~~', String.Escape), + (r'"', String.Double, '#pop'), + ], + 'comment': [ + (r'(?i)^#rem.*?', Comment.Multiline, "#push"), + (r'(?i)^#end.*?', Comment.Multiline, "#pop"), + (r'\n', Comment.Multiline), + (r'.+', Comment.Multiline), + ], + } diff --git a/pygments/lexers/misc/chapel.py b/pygments/lexers/misc/chapel.py new file mode 100644 index 00000000..c2fe6936 --- /dev/null +++ b/pygments/lexers/misc/chapel.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.chapel + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Chapel language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['ChapelLexer'] + + +class ChapelLexer(RegexLexer): + """ + For `Chapel `_ source. + + .. versionadded:: 2.0 + """ + name = 'Chapel' + filenames = ['*.chpl'] + aliases = ['chapel', 'chpl'] + # mimetypes = ['text/x-chapel'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), + + (r'//(.*?)\n', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + + (r'(config|const|in|inout|out|param|ref|type|var)\b', + Keyword.Declaration), + (r'(false|nil|true)\b', Keyword.Constant), + (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', + Keyword.Type), + (words(( + 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall', + 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum', + 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline', + 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on', + 'otherwise', 'pragma', 'reduce', 'return', 'scan', 'select', + 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use', + 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'), + Keyword), + (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), + (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), + 'classname'), + + # imaginary integers + (r'\d+i', Number), + (r'\d+\.\d*([Ee][-+]\d+)?i', Number), + (r'\.\d+([Ee][-+]\d+)?i', Number), + (r'\d+[Ee][-+]\d+i', Number), + + # reals cannot end with a period due to lexical ambiguity with + # .. operator. See reference for rationale. + (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float), + (r'\d+[eE][+-]?[0-9]+i?', Number.Float), + + # integer literals + # -- binary + (r'0[bB][0-1]+', Number.Bin), + # -- hex + (r'0[xX][0-9a-fA-F]+', Number.Hex), + # -- octal + (r'0[oO][0-7]+', Number.Oct), + # -- decimal + (r'[0-9]+', Number.Integer), + + # strings + (r'["\'](\\\\|\\"|[^"\'])*["\']', String), + + # tokens + (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' + r'<=>|<~>|\.\.|by|#|\.\.\.|' + r'&&|\|\||!|&|\||\^|~|<<|>>|' + r'==|!=|<=|>=|<|>|' + r'[+\-*/%]|\*\*)', Operator), + (r'[:;,.?()\[\]{}]', Punctuation), + + # identifiers + (r'[a-zA-Z_][\w$]*', Name.Other), + ], + 'classname': [ + (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'), + ], + 'procname': [ + (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'), + ], + } diff --git a/pygments/lexers/misc/dylan.py b/pygments/lexers/misc/dylan.py new file mode 100644 index 00000000..80484cb4 --- /dev/null +++ b/pygments/lexers/misc/dylan.py @@ -0,0 +1,289 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.dylan + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Dylan language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic, Literal + +__all__ = ['DylanLexer', 'DylanConsoleLexer', 'DylanLidLexer'] + + +class DylanLexer(RegexLexer): + """ + For the `Dylan `_ language. + + .. versionadded:: 0.7 + """ + + name = 'Dylan' + aliases = ['dylan'] + filenames = ['*.dylan', '*.dyl', '*.intr'] + mimetypes = ['text/x-dylan'] + + flags = re.IGNORECASE + + builtins = set(( + 'subclass', 'abstract', 'block', 'concrete', 'constant', 'class', + 'compiler-open', 'compiler-sideways', 'domain', 'dynamic', + 'each-subclass', 'exception', 'exclude', 'function', 'generic', + 'handler', 'inherited', 'inline', 'inline-only', 'instance', + 'interface', 'import', 'keyword', 'library', 'macro', 'method', + 'module', 'open', 'primary', 'required', 'sealed', 'sideways', + 'singleton', 'slot', 'thread', 'variable', 'virtual')) + + keywords = set(( + 'above', 'afterwards', 'begin', 'below', 'by', 'case', 'cleanup', + 'create', 'define', 'else', 'elseif', 'end', 'export', 'finally', + 'for', 'from', 'if', 'in', 'let', 'local', 'otherwise', 'rename', + 'select', 'signal', 'then', 'to', 'unless', 'until', 'use', 'when', + 'while')) + + operators = set(( + '~', '+', '-', '*', '|', '^', '=', '==', '~=', '~==', '<', '<=', + '>', '>=', '&', '|')) + + functions = set(( + 'abort', 'abs', 'add', 'add!', 'add-method', 'add-new', 'add-new!', + 'all-superclasses', 'always', 'any?', 'applicable-method?', 'apply', + 'aref', 'aref-setter', 'as', 'as-lowercase', 'as-lowercase!', + 'as-uppercase', 'as-uppercase!', 'ash', 'backward-iteration-protocol', + 'break', 'ceiling', 'ceiling/', 'cerror', 'check-type', 'choose', + 'choose-by', 'complement', 'compose', 'concatenate', 'concatenate-as', + 'condition-format-arguments', 'condition-format-string', 'conjoin', + 'copy-sequence', 'curry', 'default-handler', 'dimension', 'dimensions', + 'direct-subclasses', 'direct-superclasses', 'disjoin', 'do', + 'do-handlers', 'element', 'element-setter', 'empty?', 'error', 'even?', + 'every?', 'false-or', 'fill!', 'find-key', 'find-method', 'first', + 'first-setter', 'floor', 'floor/', 'forward-iteration-protocol', + 'function-arguments', 'function-return-values', + 'function-specializers', 'gcd', 'generic-function-mandatory-keywords', + 'generic-function-methods', 'head', 'head-setter', 'identity', + 'initialize', 'instance?', 'integral?', 'intersection', + 'key-sequence', 'key-test', 'last', 'last-setter', 'lcm', 'limited', + 'list', 'logand', 'logbit?', 'logior', 'lognot', 'logxor', 'make', + 'map', 'map-as', 'map-into', 'max', 'member?', 'merge-hash-codes', + 'min', 'modulo', 'negative', 'negative?', 'next-method', + 'object-class', 'object-hash', 'odd?', 'one-of', 'pair', 'pop', + 'pop-last', 'positive?', 'push', 'push-last', 'range', 'rank', + 'rcurry', 'reduce', 'reduce1', 'remainder', 'remove', 'remove!', + 'remove-duplicates', 'remove-duplicates!', 'remove-key!', + 'remove-method', 'replace-elements!', 'replace-subsequence!', + 'restart-query', 'return-allowed?', 'return-description', + 'return-query', 'reverse', 'reverse!', 'round', 'round/', + 'row-major-index', 'second', 'second-setter', 'shallow-copy', + 'signal', 'singleton', 'size', 'size-setter', 'slot-initialized?', + 'sort', 'sort!', 'sorted-applicable-methods', 'subsequence-position', + 'subtype?', 'table-protocol', 'tail', 'tail-setter', 'third', + 'third-setter', 'truncate', 'truncate/', 'type-error-expected-type', + 'type-error-value', 'type-for-copy', 'type-union', 'union', 'values', + 'vector', 'zero?')) + + valid_name = '\\\\?[a-z0-9' + re.escape('!&*<>|^$%@_-+~?/=') + ']+' + + def get_tokens_unprocessed(self, text): + for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): + if token is Name: + lowercase_value = value.lower() + if lowercase_value in self.builtins: + yield index, Name.Builtin, value + continue + if lowercase_value in self.keywords: + yield index, Keyword, value + continue + if lowercase_value in self.functions: + yield index, Name.Builtin, value + continue + if lowercase_value in self.operators: + yield index, Operator, value + continue + yield index, token, value + + tokens = { + 'root': [ + # Whitespace + (r'\s+', Text), + + # single line comment + (r'//.*?\n', Comment.Single), + + # lid header + (r'([a-z0-9-]+)(:)([ \t]*)(.*(?:\n[ \t].+)*)', + bygroups(Name.Attribute, Operator, Text, String)), + + ('', Text, 'code') # no header match, switch to code + ], + 'code': [ + # Whitespace + (r'\s+', Text), + + # single line comment + (r'//.*?\n', Comment.Single), + + # multi-line comment + (r'/\*', Comment.Multiline, 'comment'), + + # strings and characters + (r'"', String, 'string'), + (r"'(\\.|\\[0-7]{1,3}|\\x[a-f0-9]{1,2}|[^\\\'\n])'", String.Char), + + # binary integer + (r'#[bB][01]+', Number.Bin), + + # octal integer + (r'#[oO][0-7]+', Number.Oct), + + # floating point + (r'[-+]?(\d*\.\d+(e[-+]?\d+)?|\d+(\.\d*)?e[-+]?\d+)', Number.Float), + + # decimal integer + (r'[-+]?\d+', Number.Integer), + + # hex integer + (r'#[xX][0-9a-f]+', Number.Hex), + + # Macro parameters + (r'(\?' + valid_name + ')(:)' + r'(token|name|variable|expression|body|case-body|\*)', + bygroups(Name.Tag, Operator, Name.Builtin)), + (r'(\?)(:)(token|name|variable|expression|body|case-body|\*)', + bygroups(Name.Tag, Operator, Name.Builtin)), + (r'\?' + valid_name, Name.Tag), + + # Punctuation + (r'(=>|::|#\(|#\[|##|\?|\?\?|\?=|[(){}\[\],\.;])', Punctuation), + + # Most operators are picked up as names and then re-flagged. + # This one isn't valid in a name though, so we pick it up now. + (r':=', Operator), + + # Pick up #t / #f before we match other stuff with #. + (r'#[tf]', Literal), + + # #"foo" style keywords + (r'#"', String.Symbol, 'keyword'), + + # #rest, #key, #all-keys, etc. + (r'#[a-z0-9-]+', Keyword), + + # required-init-keyword: style keywords. + (valid_name + ':', Keyword), + + # class names + (r'<' + valid_name + '>', Name.Class), + + # define variable forms. + (r'\*' + valid_name + '\*', Name.Variable.Global), + + # define constant forms. + (r'\$' + valid_name, Name.Constant), + + # everything else. We re-flag some of these in the method above. + (valid_name, Name), + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ], + 'keyword': [ + (r'"', String.Symbol, '#pop'), + (r'[^\\"]+', String.Symbol), # all other characters + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ] + } + + +class DylanLidLexer(RegexLexer): + """ + For Dylan LID (Library Interchange Definition) files. + + .. versionadded:: 1.6 + """ + + name = 'DylanLID' + aliases = ['dylan-lid', 'lid'] + filenames = ['*.lid', '*.hdp'] + mimetypes = ['text/x-dylan-lid'] + + flags = re.IGNORECASE + + tokens = { + 'root': [ + # Whitespace + (r'\s+', Text), + + # single line comment + (r'//.*?\n', Comment.Single), + + # lid header + (r'(.*?)(:)([ \t]*)(.*(?:\n[ \t].+)*)', + bygroups(Name.Attribute, Operator, Text, String)), + ] + } + + +class DylanConsoleLexer(Lexer): + """ + For Dylan interactive console output like: + + .. sourcecode:: dylan-console + + ? let a = 1; + => 1 + ? a + => 1 + + This is based on a copy of the RubyConsoleLexer. + + .. versionadded:: 1.6 + """ + name = 'Dylan session' + aliases = ['dylan-console', 'dylan-repl'] + filenames = ['*.dylan-console'] + mimetypes = ['text/x-dylan-console'] + + _line_re = re.compile('.*?\n') + _prompt_re = re.compile('\?| ') + + def get_tokens_unprocessed(self, text): + dylexer = DylanLexer(**self.options) + + curcode = '' + insertions = [] + for match in self._line_re.finditer(text): + line = match.group() + m = self._prompt_re.match(line) + if m is not None: + end = m.end() + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:end])])) + curcode += line[end:] + else: + if curcode: + for item in do_insertions(insertions, + dylexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + yield match.start(), Generic.Output, line + if curcode: + for item in do_insertions(insertions, + dylexer.get_tokens_unprocessed(curcode)): + yield item diff --git a/pygments/lexers/misc/fantom.py b/pygments/lexers/misc/fantom.py new file mode 100644 index 00000000..d5a7550f --- /dev/null +++ b/pygments/lexers/misc/fantom.py @@ -0,0 +1,250 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.fantom + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Fantom language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from string import Template + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Literal + +__all__ = ['FantomLexer'] + + +class FantomLexer(RegexLexer): + """ + For Fantom source code. + + .. versionadded:: 1.5 + """ + name = 'Fantom' + aliases = ['fan'] + filenames = ['*.fan'] + mimetypes = ['application/x-fantom'] + + # often used regexes + def s(str): + return Template(str).substitute( + dict( + pod=r'[\"\w\.]+', + eos=r'\n|;', + id=r'[a-zA-Z_]\w*', + # all chars which can be part of type definition. Starts with + # either letter, or [ (maps), or | (funcs) + type=r'(?:\[|[a-zA-Z_]|\|)[:\w\[\]\|\->\?]*?', + ) + ) + + tokens = { + 'comments': [ + (r'(?s)/\*.*?\*/', Comment.Multiline), # Multiline + (r'//.*?\n', Comment.Single), # Single line + # TODO: highlight references in fandocs + (r'\*\*.*?\n', Comment.Special), # Fandoc + (r'#.*\n', Comment.Single) # Shell-style + ], + 'literals': [ + (r'\b-?[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration + (r'\b-?[\d_]*\.[\d_]+(ns|ms|sec|min|hr|day)', Number), # Duration with dot + (r'\b-?(\d+)?\.\d+(f|F|d|D)?', Number.Float), # Float/Decimal + (r'\b-?0x[0-9a-fA-F_]+', Number.Hex), # Hex + (r'\b-?[\d_]+', Number.Integer), # Int + (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), # Char + (r'"', Punctuation, 'insideStr'), # Opening quote + (r'`', Punctuation, 'insideUri'), # Opening accent + (r'\b(true|false|null)\b', Keyword.Constant), # Bool & null + (r'(?:(\w+)(::))?(\w+)(<\|)(.*?)(\|>)', # DSL + bygroups(Name.Namespace, Punctuation, Name.Class, + Punctuation, String, Punctuation)), + (r'(?:(\w+)(::))?(\w+)?(#)(\w+)?', # Type/slot literal + bygroups(Name.Namespace, Punctuation, Name.Class, + Punctuation, Name.Function)), + (r'\[,\]', Literal), # Empty list + (s(r'($type)(\[,\])'), # Typed empty list + bygroups(using(this, state='inType'), Literal)), + (r'\[:\]', Literal), # Empty Map + (s(r'($type)(\[:\])'), + bygroups(using(this, state='inType'), Literal)), + ], + 'insideStr': [ + (r'\\\\', String.Escape), # Escaped backslash + (r'\\"', String.Escape), # Escaped " + (r'\\`', String.Escape), # Escaped ` + (r'\$\w+', String.Interpol), # Subst var + (r'\${.*?}', String.Interpol), # Subst expr + (r'"', Punctuation, '#pop'), # Closing quot + (r'.', String) # String content + ], + 'insideUri': [ # TODO: remove copy/paste str/uri + (r'\\\\', String.Escape), # Escaped backslash + (r'\\"', String.Escape), # Escaped " + (r'\\`', String.Escape), # Escaped ` + (r'\$\w+', String.Interpol), # Subst var + (r'\${.*?}', String.Interpol), # Subst expr + (r'`', Punctuation, '#pop'), # Closing tick + (r'.', String.Backtick) # URI content + ], + 'protectionKeywords': [ + (r'\b(public|protected|private|internal)\b', Keyword), + ], + 'typeKeywords': [ + (r'\b(abstract|final|const|native|facet|enum)\b', Keyword), + ], + 'methodKeywords': [ + (r'\b(abstract|native|once|override|static|virtual|final)\b', + Keyword), + ], + 'fieldKeywords': [ + (r'\b(abstract|const|final|native|override|static|virtual|' + r'readonly)\b', Keyword) + ], + 'otherKeywords': [ + (words(( + 'try', 'catch', 'throw', 'finally', 'for', 'if', 'else', 'while', + 'as', 'is', 'isnot', 'switch', 'case', 'default', 'continue', + 'break', 'do', 'return', 'get', 'set'), prefix=r'\b', suffix=r'\b'), + Keyword), + (r'\b(it|this|super)\b', Name.Builtin.Pseudo), + ], + 'operators': [ + (r'\+\+|\-\-|\+|\-|\*|/|\|\||&&|<=>|<=|<|>=|>|=|!|\[|\]', Operator) + ], + 'inType': [ + (r'[\[\]\|\->:\?]', Punctuation), + (s(r'$id'), Name.Class), + default('#pop'), + + ], + 'root': [ + include('comments'), + include('protectionKeywords'), + include('typeKeywords'), + include('methodKeywords'), + include('fieldKeywords'), + include('literals'), + include('otherKeywords'), + include('operators'), + (r'using\b', Keyword.Namespace, 'using'), # Using stmt + (r'@\w+', Name.Decorator, 'facet'), # Symbol + (r'(class|mixin)(\s+)(\w+)', bygroups(Keyword, Text, Name.Class), + 'inheritance'), # Inheritance list + + # Type var := val + (s(r'($type)([ \t]+)($id)(\s*)(:=)'), + bygroups(using(this, state='inType'), Text, + Name.Variable, Text, Operator)), + + # var := val + (s(r'($id)(\s*)(:=)'), + bygroups(Name.Variable, Text, Operator)), + + # .someId( or ->someId( ### + (s(r'(\.|(?:\->))($id)(\s*)(\()'), + bygroups(Operator, Name.Function, Text, Punctuation), + 'insideParen'), + + # .someId or ->someId + (s(r'(\.|(?:\->))($id)'), + bygroups(Operator, Name.Function)), + + # new makeXXX ( + (r'(new)(\s+)(make\w*)(\s*)(\()', + bygroups(Keyword, Text, Name.Function, Text, Punctuation), + 'insideMethodDeclArgs'), + + # Type name ( + (s(r'($type)([ \t]+)' # Return type and whitespace + r'($id)(\s*)(\()'), # method name + open brace + bygroups(using(this, state='inType'), Text, + Name.Function, Text, Punctuation), + 'insideMethodDeclArgs'), + + # ArgType argName, + (s(r'($type)(\s+)($id)(\s*)(,)'), + bygroups(using(this, state='inType'), Text, Name.Variable, + Text, Punctuation)), + + # ArgType argName) + # Covered in 'insideParen' state + + # ArgType argName -> ArgType| + (s(r'($type)(\s+)($id)(\s*)(\->)(\s*)($type)(\|)'), + bygroups(using(this, state='inType'), Text, Name.Variable, + Text, Punctuation, Text, using(this, state='inType'), + Punctuation)), + + # ArgType argName| + (s(r'($type)(\s+)($id)(\s*)(\|)'), + bygroups(using(this, state='inType'), Text, Name.Variable, + Text, Punctuation)), + + # Type var + (s(r'($type)([ \t]+)($id)'), + bygroups(using(this, state='inType'), Text, + Name.Variable)), + + (r'\(', Punctuation, 'insideParen'), + (r'\{', Punctuation, 'insideBrace'), + (r'.', Text) + ], + 'insideParen': [ + (r'\)', Punctuation, '#pop'), + include('root'), + ], + 'insideMethodDeclArgs': [ + (r'\)', Punctuation, '#pop'), + (s(r'($type)(\s+)($id)(\s*)(\))'), + bygroups(using(this, state='inType'), Text, Name.Variable, + Text, Punctuation), '#pop'), + include('root'), + ], + 'insideBrace': [ + (r'\}', Punctuation, '#pop'), + include('root'), + ], + 'inheritance': [ + (r'\s+', Text), # Whitespace + (r':|,', Punctuation), + (r'(?:(\w+)(::))?(\w+)', + bygroups(Name.Namespace, Punctuation, Name.Class)), + (r'{', Punctuation, '#pop') + ], + 'using': [ + (r'[ \t]+', Text), # consume whitespaces + (r'(\[)(\w+)(\])', + bygroups(Punctuation, Comment.Special, Punctuation)), # ffi + (r'(\")?([\w\.]+)(\")?', + bygroups(Punctuation, Name.Namespace, Punctuation)), # podname + (r'::', Punctuation, 'usingClass'), + default('#pop') + ], + 'usingClass': [ + (r'[ \t]+', Text), # consume whitespaces + (r'(as)(\s+)(\w+)', + bygroups(Keyword.Declaration, Text, Name.Class), '#pop:2'), + (r'[\w\$]+', Name.Class), + default('#pop:2') # jump out to root state + ], + 'facet': [ + (r'\s+', Text), + (r'{', Punctuation, 'facetFields'), + default('#pop') + ], + 'facetFields': [ + include('comments'), + include('literals'), + include('operators'), + (r'\s+', Text), + (r'(\s*)(\w+)(\s*)(=)', bygroups(Text, Name, Text, Operator)), + (r'}', Punctuation, '#pop'), + (r'.', Text) + ], + } diff --git a/pygments/lexers/misc/felix.py b/pygments/lexers/misc/felix.py new file mode 100644 index 00000000..a33b2efe --- /dev/null +++ b/pygments/lexers/misc/felix.py @@ -0,0 +1,273 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.felix + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Felix language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups, default, words, \ + combined +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['FelixLexer'] + + +class FelixLexer(RegexLexer): + """ + For `Felix `_ source code. + + .. versionadded:: 1.2 + """ + + name = 'Felix' + aliases = ['felix', 'flx'] + filenames = ['*.flx', '*.flxh'] + mimetypes = ['text/x-felix'] + + preproc = ( + 'elif', 'else', 'endif', 'if', 'ifdef', 'ifndef', + ) + + keywords = ( + '_', '_deref', 'all', 'as', + 'assert', 'attempt', 'call', 'callback', 'case', 'caseno', 'cclass', + 'code', 'compound', 'ctypes', 'do', 'done', 'downto', 'elif', 'else', + 'endattempt', 'endcase', 'endif', 'endmatch', 'enum', 'except', + 'exceptions', 'expect', 'finally', 'for', 'forall', 'forget', 'fork', + 'functor', 'goto', 'ident', 'if', 'incomplete', 'inherit', 'instance', + 'interface', 'jump', 'lambda', 'loop', 'match', 'module', 'namespace', + 'new', 'noexpand', 'nonterm', 'obj', 'of', 'open', 'parse', 'raise', + 'regexp', 'reglex', 'regmatch', 'rename', 'return', 'the', 'then', + 'to', 'type', 'typecase', 'typedef', 'typematch', 'typeof', 'upto', + 'when', 'whilst', 'with', 'yield', + ) + + keyword_directives = ( + '_gc_pointer', '_gc_type', 'body', 'comment', 'const', 'export', + 'header', 'inline', 'lval', 'macro', 'noinline', 'noreturn', + 'package', 'private', 'pod', 'property', 'public', 'publish', + 'requires', 'todo', 'virtual', 'use', + ) + + keyword_declarations = ( + 'def', 'let', 'ref', 'val', 'var', + ) + + keyword_types = ( + 'unit', 'void', 'any', 'bool', + 'byte', 'offset', + 'address', 'caddress', 'cvaddress', 'vaddress', + 'tiny', 'short', 'int', 'long', 'vlong', + 'utiny', 'ushort', 'vshort', 'uint', 'ulong', 'uvlong', + 'int8', 'int16', 'int32', 'int64', + 'uint8', 'uint16', 'uint32', 'uint64', + 'float', 'double', 'ldouble', + 'complex', 'dcomplex', 'lcomplex', + 'imaginary', 'dimaginary', 'limaginary', + 'char', 'wchar', 'uchar', + 'charp', 'charcp', 'ucharp', 'ucharcp', + 'string', 'wstring', 'ustring', + 'cont', + 'array', 'varray', 'list', + 'lvalue', 'opt', 'slice', + ) + + keyword_constants = ( + 'false', 'true', + ) + + operator_words = ( + 'and', 'not', 'in', 'is', 'isin', 'or', 'xor', + ) + + name_builtins = ( + '_svc', 'while', + ) + + name_pseudo = ( + 'root', 'self', 'this', + ) + + decimal_suffixes = '([tTsSiIlLvV]|ll|LL|([iIuU])(8|16|32|64))?' + + tokens = { + 'root': [ + include('whitespace'), + + # Keywords + (words(('axiom', 'ctor', 'fun', 'gen', 'proc', 'reduce', + 'union'), suffix=r'\b'), + Keyword, 'funcname'), + (words(('class', 'cclass', 'cstruct', 'obj', 'struct'), suffix=r'\b'), + Keyword, 'classname'), + (r'(instance|module|typeclass)\b', Keyword, 'modulename'), + + (words(keywords, suffix=r'\b'), Keyword), + (words(keyword_directives, suffix=r'\b'), Name.Decorator), + (words(keyword_declarations, suffix=r'\b'), Keyword.Declaration), + (words(keyword_types, suffix=r'\b'), Keyword.Type), + (words(keyword_constants, suffix=r'\b'), Keyword.Constant), + + # Operators + include('operators'), + + # Float Literal + # -- Hex Float + (r'0[xX]([0-9a-fA-F_]*\.[0-9a-fA-F_]+|[0-9a-fA-F_]+)' + r'[pP][+\-]?[0-9_]+[lLfFdD]?', Number.Float), + # -- DecimalFloat + (r'[0-9_]+(\.[0-9_]+[eE][+\-]?[0-9_]+|' + r'\.[0-9_]*|[eE][+\-]?[0-9_]+)[lLfFdD]?', Number.Float), + (r'\.(0|[1-9][0-9_]*)([eE][+\-]?[0-9_]+)?[lLfFdD]?', + Number.Float), + + # IntegerLiteral + # -- Binary + (r'0[Bb][01_]+%s' % decimal_suffixes, Number.Bin), + # -- Octal + (r'0[0-7_]+%s' % decimal_suffixes, Number.Oct), + # -- Hexadecimal + (r'0[xX][0-9a-fA-F_]+%s' % decimal_suffixes, Number.Hex), + # -- Decimal + (r'(0|[1-9][0-9_]*)%s' % decimal_suffixes, Number.Integer), + + # Strings + ('([rR][cC]?|[cC][rR])"""', String, 'tdqs'), + ("([rR][cC]?|[cC][rR])'''", String, 'tsqs'), + ('([rR][cC]?|[cC][rR])"', String, 'dqs'), + ("([rR][cC]?|[cC][rR])'", String, 'sqs'), + ('[cCfFqQwWuU]?"""', String, combined('stringescape', 'tdqs')), + ("[cCfFqQwWuU]?'''", String, combined('stringescape', 'tsqs')), + ('[cCfFqQwWuU]?"', String, combined('stringescape', 'dqs')), + ("[cCfFqQwWuU]?'", String, combined('stringescape', 'sqs')), + + # Punctuation + (r'[\[\]{}:(),;?]', Punctuation), + + # Labels + (r'[a-zA-Z_]\w*:>', Name.Label), + + # Identifiers + (r'(%s)\b' % '|'.join(name_builtins), Name.Builtin), + (r'(%s)\b' % '|'.join(name_pseudo), Name.Builtin.Pseudo), + (r'[a-zA-Z_]\w*', Name), + ], + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + + include('comment'), + + # Preprocessor + (r'#\s*if\s+0', Comment.Preproc, 'if0'), + (r'#', Comment.Preproc, 'macro'), + ], + 'operators': [ + (r'(%s)\b' % '|'.join(operator_words), Operator.Word), + (r'!=|==|<<|>>|\|\||&&|[-~+/*%=<>&^|.$]', Operator), + ], + 'comment': [ + (r'//(.*?)\n', Comment.Single), + (r'/[*]', Comment.Multiline, 'comment2'), + ], + 'comment2': [ + (r'[^\/*]', Comment.Multiline), + (r'/[*]', Comment.Multiline, '#push'), + (r'[*]/', Comment.Multiline, '#pop'), + (r'[\/*]', Comment.Multiline), + ], + 'if0': [ + (r'^\s*#if.*?(?]*?>)', + bygroups(Comment.Preproc, Text, String), '#pop'), + (r'(import|include)(\s+)("[^"]*?")', + bygroups(Comment.Preproc, Text, String), '#pop'), + (r"(import|include)(\s+)('[^']*?')", + bygroups(Comment.Preproc, Text, String), '#pop'), + (r'[^/\n]+', Comment.Preproc), + # (r'/[*](.|\n)*?[*]/', Comment), + # (r'//.*?\n', Comment, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Comment.Preproc, '#pop'), + ], + 'funcname': [ + include('whitespace'), + (r'[a-zA-Z_]\w*', Name.Function, '#pop'), + # anonymous functions + (r'(?=\()', Text, '#pop'), + ], + 'classname': [ + include('whitespace'), + (r'[a-zA-Z_]\w*', Name.Class, '#pop'), + # anonymous classes + (r'(?=\{)', Text, '#pop'), + ], + 'modulename': [ + include('whitespace'), + (r'\[', Punctuation, ('modulename2', 'tvarlist')), + default('modulename2'), + ], + 'modulename2': [ + include('whitespace'), + (r'([a-zA-Z_]\w*)', Name.Namespace, '#pop:2'), + ], + 'tvarlist': [ + include('whitespace'), + include('operators'), + (r'\[', Punctuation, '#push'), + (r'\]', Punctuation, '#pop'), + (r',', Punctuation), + (r'(with|where)\b', Keyword), + (r'[a-zA-Z_]\w*', Name), + ], + 'stringescape': [ + (r'\\([\\abfnrtv"\']|\n|N{.*?}|u[a-fA-F0-9]{4}|' + r'U[a-fA-F0-9]{8}|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape) + ], + 'strings': [ + (r'%(\([a-zA-Z0-9]+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?' + '[hlL]?[diouxXeEfFgGcrs%]', String.Interpol), + (r'[^\\\'"%\n]+', String), + # quotes, percents and backslashes must be parsed one at a time + (r'[\'"\\]', String), + # unhandled string formatting sign + (r'%', String) + # newlines are an error (use "nl" state) + ], + 'nl': [ + (r'\n', String) + ], + 'dqs': [ + (r'"', String, '#pop'), + # included here again for raw strings + (r'\\\\|\\"|\\\n', String.Escape), + include('strings') + ], + 'sqs': [ + (r"'", String, '#pop'), + # included here again for raw strings + (r"\\\\|\\'|\\\n", String.Escape), + include('strings') + ], + 'tdqs': [ + (r'"""', String, '#pop'), + include('strings'), + include('nl') + ], + 'tsqs': [ + (r"'''", String, '#pop'), + include('strings'), + include('nl') + ], + } diff --git a/pygments/lexers/misc/nimrod.py b/pygments/lexers/misc/nimrod.py new file mode 100644 index 00000000..60977c8a --- /dev/null +++ b/pygments/lexers/misc/nimrod.py @@ -0,0 +1,159 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.nimrod + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for the Nimrod language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['NimrodLexer'] + + +class NimrodLexer(RegexLexer): + """ + For `Nimrod `_ source code. + + .. versionadded:: 1.5 + """ + + name = 'Nimrod' + aliases = ['nimrod', 'nim'] + filenames = ['*.nim', '*.nimrod'] + mimetypes = ['text/x-nimrod'] + + flags = re.MULTILINE | re.IGNORECASE | re.UNICODE + + def underscorize(words): + newWords = [] + new = "" + for word in words: + for ch in word: + new += (ch + "_?") + newWords.append(new) + new = "" + return "|".join(newWords) + + keywords = [ + 'addr', 'and', 'as', 'asm', 'atomic', 'bind', 'block', 'break', + 'case', 'cast', 'const', 'continue', 'converter', 'discard', + 'distinct', 'div', 'elif', 'else', 'end', 'enum', 'except', 'finally', + 'for', 'generic', 'if', 'implies', 'in', 'yield', + 'is', 'isnot', 'iterator', 'lambda', 'let', 'macro', 'method', + 'mod', 'not', 'notin', 'object', 'of', 'or', 'out', 'proc', + 'ptr', 'raise', 'ref', 'return', 'shl', 'shr', 'template', 'try', + 'tuple', 'type', 'when', 'while', 'with', 'without', 'xor' + ] + + keywordsPseudo = [ + 'nil', 'true', 'false' + ] + + opWords = [ + 'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in', + 'notin', 'is', 'isnot' + ] + + types = [ + 'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64', + 'bool', 'char', 'range', 'array', 'seq', 'set', 'string' + ] + + tokens = { + 'root': [ + (r'##.*$', String.Doc), + (r'#.*$', Comment), + (r'\*|=|>|<|\+|-|/|@|\$|~|&|%|\!|\?|\||\\|\[|\]', Operator), + (r'\.\.|\.|,|\[\.|\.\]|{\.|\.}|\(\.|\.\)|{|}|\(|\)|:|\^|`|;', + Punctuation), + + # Strings + (r'(?:[\w]+)"', String, 'rdqs'), + (r'"""', String, 'tdqs'), + ('"', String, 'dqs'), + + # Char + ("'", String.Char, 'chars'), + + # Keywords + (r'(%s)\b' % underscorize(opWords), Operator.Word), + (r'(p_?r_?o_?c_?\s)(?![\(\[\]])', Keyword, 'funcname'), + (r'(%s)\b' % underscorize(keywords), Keyword), + (r'(%s)\b' % underscorize(['from', 'import', 'include']), + Keyword.Namespace), + (r'(v_?a_?r)\b', Keyword.Declaration), + (r'(%s)\b' % underscorize(types), Keyword.Type), + (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo), + # Identifiers + (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name), + # Numbers + (r'[0-9][0-9_]*(?=([eE.]|\'[fF](32|64)))', + Number.Float, ('float-suffix', 'float-number')), + (r'0[xX][a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'), + (r'0[bB][01][01_]*', Number.Bin, 'int-suffix'), + (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'), + (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'), + # Whitespace + (r'\s+', Text), + (r'.+$', Error), + ], + 'chars': [ + (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape), + (r"'", String.Char, '#pop'), + (r".", String.Char) + ], + 'strings': [ + (r'(?`_ source. + + .. versionadded:: 2.0 + """ + + name = 'Nit' + aliases = ['nit'] + filenames = ['*.nit'] + tokens = { + 'root': [ + (r'#.*?$', Comment.Single), + (words(( + 'package', 'module', 'import', 'class', 'abstract', 'interface', + 'universal', 'enum', 'end', 'fun', 'type', 'init', 'redef', + 'isa', 'do', 'readable', 'writable', 'var', 'intern', 'extern', + 'public', 'protected', 'private', 'intrude', 'if', 'then', + 'else', 'while', 'loop', 'for', 'in', 'and', 'or', 'not', + 'implies', 'return', 'continue', 'break', 'abort', 'assert', + 'new', 'is', 'once', 'super', 'self', 'true', 'false', 'nullable', + 'null', 'as', 'isset', 'label', '__debug__'), suffix='(?=( |\n|\t|\r|\())'), + Keyword), + (r'[A-Z][A-Za-z0-9_]*', Name.Class), + (r'"""(([^\'\\]|\\.)|\\r|\\n)*(({{?)?(""?{{?)*""""*)', String), # Simple long string + (r'\'\'\'(((\\.|[^\'\\])|\\r|\\n)|\'((\\.|[^\'\\])|\\r|\\n)|' + r'\'\'((\\.|[^\'\\])|\\r|\\n))*\'\'\'', String), # Simple long string alt + (r'"""(([^\'\\]|\\.)|\\r|\\n)*((""?)?({{?""?)*{{{{*)', String), # Start long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*(""?)?({{?""?)*{{{{*', String), # Mid long string + (r'}}}(((\\.|[^\'\\])|\\r|\\n))*({{?)?(""?{{?)*""""*', String), # End long string + (r'"(\\.|([^"}{\\]))*"', String), # Simple String + (r'"(\\.|([^"}{\\]))*{', String), # Start string + (r'}(\\.|([^"}{\\]))*{', String), # Mid String + (r'}(\\.|([^"}{\\]))*"', String), # End String + (r'(\'[^\'\\]\')|(\'\\.\')', String.Char), + (r'[0-9]+', Number.Integer), + (r'[0-9]*.[0-9]+', Number.Float), + (r'0(x|X)[0-9A-Fa-f]+', Number.Hex), + (r'[a-z][A-Za-z0-9_]*', Name), + (r'_[A-Za-z0-9_]+', Name.Variable.Instance), + (r'==|!=|<==>|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator), + (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation), + (r'`{[^`]*`}', Text), # Extern blocks won't be Lexed by Nit + ('(\r|\n| |\t)+', Text), + ], + } diff --git a/pygments/lexers/misc/ooc.py b/pygments/lexers/misc/ooc.py new file mode 100644 index 00000000..30376853 --- /dev/null +++ b/pygments/lexers/misc/ooc.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.ooc + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Ooc language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['OocLexer'] + + +class OocLexer(RegexLexer): + """ + For `Ooc `_ source code + + .. versionadded:: 1.2 + """ + name = 'Ooc' + aliases = ['ooc'] + filenames = ['*.ooc'] + mimetypes = ['text/x-ooc'] + + tokens = { + 'root': [ + (words(( + 'class', 'interface', 'implement', 'abstract', 'extends', 'from', + 'this', 'super', 'new', 'const', 'final', 'static', 'import', + 'use', 'extern', 'inline', 'proto', 'break', 'continue', + 'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do', + 'switch', 'case', 'as', 'in', 'version', 'return', 'true', + 'false', 'null'), prefix=r'\b', suffix=r'\b'), + Keyword), + (r'include\b', Keyword, 'include'), + (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)', + bygroups(Keyword, Text, Keyword, Text, Name.Class)), + (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)', + bygroups(Keyword, Text, Name.Function)), + (r'\bfunc\b', Keyword), + # Note: %= and ^= not listed on http://ooc-lang.org/syntax + (r'//.*', Comment), + (r'(?s)/\*.*?\*/', Comment.Multiline), + (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|' + r'&&?|\|\|?|\^=?)', Operator), + (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text, + Name.Function)), + (r'[A-Z][A-Z0-9_]+', Name.Constant), + (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class), + + (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()', + bygroups(Name.Function, Text)), + (r'[a-z]\w*', Name.Variable), + + # : introduces types + (r'[:(){}\[\];,]', Punctuation), + + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'0c[0-9]+', Number.Oct), + (r'0b[01]+', Number.Bin), + (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float), + (r'[0-9_]+', Number.Decimal), + + (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\"])*"', + String.Double), + (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", + String.Char), + (r'@', Punctuation), # pointer dereference + (r'\.', Punctuation), # imports or chain operator + + (r'\\[ \t\n]', Text), + (r'[ \t]+', Text), + ], + 'include': [ + (r'[\w/]+', Name), + (r',', Punctuation), + (r'[ \t]', Text), + (r'[;\n]', Text, '#pop'), + ], + } diff --git a/pygments/lexers/pascal.py b/pygments/lexers/pascal.py new file mode 100644 index 00000000..0381f19f --- /dev/null +++ b/pygments/lexers/pascal.py @@ -0,0 +1,833 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.pascal + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Pascal family languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, include, bygroups, words, \ + using, this, default +from pygments.util import get_bool_opt, get_list_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error +from pygments.scanner import Scanner + +__all__ = ['DelphiLexer', 'Modula2Lexer', 'AdaLexer'] + + +class DelphiLexer(Lexer): + """ + For `Delphi `_ (Borland Object Pascal), + Turbo Pascal and Free Pascal source code. + + Additional options accepted: + + `turbopascal` + Highlight Turbo Pascal specific keywords (default: ``True``). + `delphi` + Highlight Borland Delphi specific keywords (default: ``True``). + `freepascal` + Highlight Free Pascal specific keywords (default: ``True``). + `units` + A list of units that should be considered builtin, supported are + ``System``, ``SysUtils``, ``Classes`` and ``Math``. + Default is to consider all of them builtin. + """ + name = 'Delphi' + aliases = ['delphi', 'pas', 'pascal', 'objectpascal'] + filenames = ['*.pas'] + mimetypes = ['text/x-pascal'] + + TURBO_PASCAL_KEYWORDS = ( + 'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case', + 'const', 'constructor', 'continue', 'destructor', 'div', 'do', + 'downto', 'else', 'end', 'file', 'for', 'function', 'goto', + 'if', 'implementation', 'in', 'inherited', 'inline', 'interface', + 'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator', + 'or', 'packed', 'procedure', 'program', 'record', 'reintroduce', + 'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to', + 'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor' + ) + + DELPHI_KEYWORDS = ( + 'as', 'class', 'except', 'exports', 'finalization', 'finally', + 'initialization', 'is', 'library', 'on', 'property', 'raise', + 'threadvar', 'try' + ) + + FREE_PASCAL_KEYWORDS = ( + 'dispose', 'exit', 'false', 'new', 'true' + ) + + BLOCK_KEYWORDS = set(( + 'begin', 'class', 'const', 'constructor', 'destructor', 'end', + 'finalization', 'function', 'implementation', 'initialization', + 'label', 'library', 'operator', 'procedure', 'program', 'property', + 'record', 'threadvar', 'type', 'unit', 'uses', 'var' + )) + + FUNCTION_MODIFIERS = set(( + 'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe', + 'pascal', 'register', 'safecall', 'softfloat', 'stdcall', + 'varargs', 'name', 'dynamic', 'near', 'virtual', 'external', + 'override', 'assembler' + )) + + # XXX: those aren't global. but currently we know no way for defining + # them just for the type context. + DIRECTIVES = set(( + 'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far', + 'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected', + 'published', 'public' + )) + + BUILTIN_TYPES = set(( + 'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool', + 'cardinal', 'char', 'comp', 'currency', 'double', 'dword', + 'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint', + 'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean', + 'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency', + 'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle', + 'pint64', 'pinteger', 'plongint', 'plongword', 'pointer', + 'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint', + 'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword', + 'pwordarray', 'pwordbool', 'real', 'real48', 'shortint', + 'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate', + 'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant', + 'widechar', 'widestring', 'word', 'wordbool' + )) + + BUILTIN_UNITS = { + 'System': ( + 'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8', + 'append', 'arctan', 'assert', 'assigned', 'assignfile', + 'beginthread', 'blockread', 'blockwrite', 'break', 'chdir', + 'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble', + 'concat', 'continue', 'copy', 'cos', 'dec', 'delete', + 'dispose', 'doubletocomp', 'endthread', 'enummodules', + 'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr', + 'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize', + 'fillchar', 'finalize', 'findclasshinstance', 'findhinstance', + 'findresourcehinstance', 'flush', 'frac', 'freemem', + 'get8087cw', 'getdir', 'getlasterror', 'getmem', + 'getmemorymanager', 'getmodulefilename', 'getvariantmanager', + 'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert', + 'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset', + 'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd', + 'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount', + 'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random', + 'randomize', 'read', 'readln', 'reallocmem', + 'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir', + 'round', 'runerror', 'seek', 'seekeof', 'seekeoln', + 'set8087cw', 'setlength', 'setlinebreakstyle', + 'setmemorymanager', 'setstring', 'settextbuf', + 'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt', + 'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar', + 'succ', 'swap', 'trunc', 'truncate', 'typeinfo', + 'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring', + 'upcase', 'utf8decode', 'utf8encode', 'utf8toansi', + 'utf8tounicode', 'val', 'vararrayredim', 'varclear', + 'widecharlentostring', 'widecharlentostrvar', + 'widechartostring', 'widechartostrvar', + 'widestringtoucs4string', 'write', 'writeln' + ), + 'SysUtils': ( + 'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks', + 'allocmem', 'ansicomparefilename', 'ansicomparestr', + 'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr', + 'ansilastchar', 'ansilowercase', 'ansilowercasefilename', + 'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext', + 'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp', + 'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan', + 'ansistrscan', 'ansistrupper', 'ansiuppercase', + 'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep', + 'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype', + 'callterminateprocs', 'changefileext', 'charlength', + 'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr', + 'comparetext', 'createdir', 'createguid', 'currentyear', + 'currtostr', 'currtostrf', 'date', 'datetimetofiledate', + 'datetimetostr', 'datetimetostring', 'datetimetosystemtime', + 'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate', + 'decodedatefully', 'decodetime', 'deletefile', 'directoryexists', + 'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime', + 'exceptionerrormessage', 'excludetrailingbackslash', + 'excludetrailingpathdelimiter', 'expandfilename', + 'expandfilenamecase', 'expanduncfilename', 'extractfiledir', + 'extractfiledrive', 'extractfileext', 'extractfilename', + 'extractfilepath', 'extractrelativepath', 'extractshortpathname', + 'fileage', 'fileclose', 'filecreate', 'filedatetodatetime', + 'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly', + 'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr', + 'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage', + 'findclose', 'findcmdlineswitch', 'findfirst', 'findnext', + 'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr', + 'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr', + 'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr', + 'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir', + 'getenvironmentvariable', 'getfileversion', 'getformatsettings', + 'getlocaleformatsettings', 'getmodulename', 'getpackagedescription', + 'getpackageinfo', 'gettime', 'guidtostring', 'incamonth', + 'includetrailingbackslash', 'includetrailingpathdelimiter', + 'incmonth', 'initializepackage', 'interlockeddecrement', + 'interlockedexchange', 'interlockedexchangeadd', + 'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter', + 'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident', + 'languages', 'lastdelimiter', 'loadpackage', 'loadstr', + 'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now', + 'outofmemoryerror', 'quotedstr', 'raiselastoserror', + 'raiselastwin32error', 'removedir', 'renamefile', 'replacedate', + 'replacetime', 'safeloadlibrary', 'samefilename', 'sametext', + 'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize', + 'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy', + 'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp', + 'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy', + 'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew', + 'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos', + 'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr', + 'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime', + 'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint', + 'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime', + 'strtotimedef', 'strupper', 'supports', 'syserrormessage', + 'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime', + 'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright', + 'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime', + 'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime', + 'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime', + 'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext', + 'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase', + 'widesamestr', 'widesametext', 'wideuppercase', 'win32check', + 'wraptext' + ), + 'Classes': ( + 'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize', + 'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect', + 'extractstrings', 'findclass', 'findglobalcomponent', 'getclass', + 'groupdescendantswith', 'hextobin', 'identtoint', + 'initinheritedcomponent', 'inttoident', 'invalidpoint', + 'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext', + 'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource', + 'pointsequal', 'readcomponentres', 'readcomponentresex', + 'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias', + 'registerclasses', 'registercomponents', 'registerintegerconsts', + 'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup', + 'teststreamformat', 'unregisterclass', 'unregisterclasses', + 'unregisterintegerconsts', 'unregistermoduleclasses', + 'writecomponentresfile' + ), + 'Math': ( + 'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec', + 'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil', + 'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc', + 'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle', + 'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance', + 'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask', + 'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg', + 'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate', + 'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero', + 'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue', + 'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue', + 'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods', + 'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance', + 'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd', + 'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant', + 'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode', + 'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev', + 'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation', + 'tan', 'tanh', 'totalvariance', 'variance' + ) + } + + ASM_REGISTERS = set(( + 'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0', + 'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0', + 'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx', + 'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp', + 'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6', + 'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5', + 'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5', + 'xmm6', 'xmm7' + )) + + ASM_INSTRUCTIONS = set(( + 'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound', + 'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw', + 'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae', + 'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg', + 'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb', + 'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl', + 'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo', + 'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb', + 'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid', + 'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt', + 'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd', + 'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd', + 'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe', + 'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle', + 'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge', + 'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe', + 'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave', + 'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw', + 'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw', + 'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr', + 'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx', + 'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd', + 'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw', + 'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw', + 'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe', + 'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror', + 'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb', + 'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe', + 'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle', + 'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng', + 'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz', + 'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl', + 'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold', + 'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str', + 'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit', + 'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait', + 'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat', + 'xlatb', 'xor' + )) + + def __init__(self, **options): + Lexer.__init__(self, **options) + self.keywords = set() + if get_bool_opt(options, 'turbopascal', True): + self.keywords.update(self.TURBO_PASCAL_KEYWORDS) + if get_bool_opt(options, 'delphi', True): + self.keywords.update(self.DELPHI_KEYWORDS) + if get_bool_opt(options, 'freepascal', True): + self.keywords.update(self.FREE_PASCAL_KEYWORDS) + self.builtins = set() + for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)): + self.builtins.update(self.BUILTIN_UNITS[unit]) + + def get_tokens_unprocessed(self, text): + scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE) + stack = ['initial'] + in_function_block = False + in_property_block = False + was_dot = False + next_token_is_function = False + next_token_is_property = False + collect_labels = False + block_labels = set() + brace_balance = [0, 0] + + while not scanner.eos: + token = Error + + if stack[-1] == 'initial': + if scanner.scan(r'\s+'): + token = Text + elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'): + if scanner.match.startswith('$'): + token = Comment.Preproc + else: + token = Comment.Multiline + elif scanner.scan(r'//.*?$'): + token = Comment.Single + elif scanner.scan(r'[-+*\/=<>:;,.@\^]'): + token = Operator + # stop label highlighting on next ";" + if collect_labels and scanner.match == ';': + collect_labels = False + elif scanner.scan(r'[\(\)\[\]]+'): + token = Punctuation + # abort function naming ``foo = Function(...)`` + next_token_is_function = False + # if we are in a function block we count the open + # braces because ootherwise it's impossible to + # determine the end of the modifier context + if in_function_block or in_property_block: + if scanner.match == '(': + brace_balance[0] += 1 + elif scanner.match == ')': + brace_balance[0] -= 1 + elif scanner.match == '[': + brace_balance[1] += 1 + elif scanner.match == ']': + brace_balance[1] -= 1 + elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'): + lowercase_name = scanner.match.lower() + if lowercase_name == 'result': + token = Name.Builtin.Pseudo + elif lowercase_name in self.keywords: + token = Keyword + # if we are in a special block and a + # block ending keyword occours (and the parenthesis + # is balanced) we end the current block context + if (in_function_block or in_property_block) and \ + lowercase_name in self.BLOCK_KEYWORDS and \ + brace_balance[0] <= 0 and \ + brace_balance[1] <= 0: + in_function_block = False + in_property_block = False + brace_balance = [0, 0] + block_labels = set() + if lowercase_name in ('label', 'goto'): + collect_labels = True + elif lowercase_name == 'asm': + stack.append('asm') + elif lowercase_name == 'property': + in_property_block = True + next_token_is_property = True + elif lowercase_name in ('procedure', 'operator', + 'function', 'constructor', + 'destructor'): + in_function_block = True + next_token_is_function = True + # we are in a function block and the current name + # is in the set of registered modifiers. highlight + # it as pseudo keyword + elif in_function_block and \ + lowercase_name in self.FUNCTION_MODIFIERS: + token = Keyword.Pseudo + # if we are in a property highlight some more + # modifiers + elif in_property_block and \ + lowercase_name in ('read', 'write'): + token = Keyword.Pseudo + next_token_is_function = True + # if the last iteration set next_token_is_function + # to true we now want this name highlighted as + # function. so do that and reset the state + elif next_token_is_function: + # Look if the next token is a dot. If yes it's + # not a function, but a class name and the + # part after the dot a function name + if scanner.test(r'\s*\.\s*'): + token = Name.Class + # it's not a dot, our job is done + else: + token = Name.Function + next_token_is_function = False + # same for properties + elif next_token_is_property: + token = Name.Property + next_token_is_property = False + # Highlight this token as label and add it + # to the list of known labels + elif collect_labels: + token = Name.Label + block_labels.add(scanner.match.lower()) + # name is in list of known labels + elif lowercase_name in block_labels: + token = Name.Label + elif lowercase_name in self.BUILTIN_TYPES: + token = Keyword.Type + elif lowercase_name in self.DIRECTIVES: + token = Keyword.Pseudo + # builtins are just builtins if the token + # before isn't a dot + elif not was_dot and lowercase_name in self.builtins: + token = Name.Builtin + else: + token = Name + elif scanner.scan(r"'"): + token = String + stack.append('string') + elif scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'): + token = String.Char + elif scanner.scan(r'\$[0-9A-Fa-f]+'): + token = Number.Hex + elif scanner.scan(r'\d+(?![eE]|\.[^.])'): + token = Number.Integer + elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'): + token = Number.Float + else: + # if the stack depth is deeper than once, pop + if len(stack) > 1: + stack.pop() + scanner.get_char() + + elif stack[-1] == 'string': + if scanner.scan(r"''"): + token = String.Escape + elif scanner.scan(r"'"): + token = String + stack.pop() + elif scanner.scan(r"[^']*"): + token = String + else: + scanner.get_char() + stack.pop() + + elif stack[-1] == 'asm': + if scanner.scan(r'\s+'): + token = Text + elif scanner.scan(r'end'): + token = Keyword + stack.pop() + elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'): + if scanner.match.startswith('$'): + token = Comment.Preproc + else: + token = Comment.Multiline + elif scanner.scan(r'//.*?$'): + token = Comment.Single + elif scanner.scan(r"'"): + token = String + stack.append('string') + elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'): + token = Name.Label + elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'): + lowercase_name = scanner.match.lower() + if lowercase_name in self.ASM_INSTRUCTIONS: + token = Keyword + elif lowercase_name in self.ASM_REGISTERS: + token = Name.Builtin + else: + token = Name + elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'): + token = Operator + elif scanner.scan(r'[\(\)\[\]]+'): + token = Punctuation + elif scanner.scan(r'\$[0-9A-Fa-f]+'): + token = Number.Hex + elif scanner.scan(r'\d+(?![eE]|\.[^.])'): + token = Number.Integer + elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'): + token = Number.Float + else: + scanner.get_char() + stack.pop() + + # save the dot!!!11 + if scanner.match.strip(): + was_dot = scanner.match == '.' + yield scanner.start_pos, token, scanner.match or '' + + +class Modula2Lexer(RegexLexer): + """ + For `Modula-2 `_ source code. + + Additional options that determine which keywords are highlighted: + + `pim` + Select PIM Modula-2 dialect (default: True). + `iso` + Select ISO Modula-2 dialect (default: False). + `objm2` + Select Objective Modula-2 dialect (default: False). + `gm2ext` + Also highlight GNU extensions (default: False). + + .. versionadded:: 1.3 + """ + name = 'Modula-2' + aliases = ['modula2', 'm2'] + filenames = ['*.def', '*.mod'] + mimetypes = ['text/x-modula2'] + + flags = re.MULTILINE | re.DOTALL + + tokens = { + 'whitespace': [ + (r'\n+', Text), # blank lines + (r'\s+', Text), # whitespace + ], + 'identifiers': [ + (r'([a-zA-Z_\$][\w\$]*)', Name), + ], + 'numliterals': [ + (r'[01]+B', Number.Bin), # binary number (ObjM2) + (r'[0-7]+B', Number.Oct), # octal number (PIM + ISO) + (r'[0-7]+C', Number.Oct), # char code (PIM + ISO) + (r'[0-9A-F]+C', Number.Hex), # char code (ObjM2) + (r'[0-9A-F]+H', Number.Hex), # hexadecimal number + (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float), # real number + (r'[0-9]+\.[0-9]+', Number.Float), # real number + (r'[0-9]+', Number.Integer), # decimal whole number + ], + 'strings': [ + (r"'(\\\\|\\'|[^'])*'", String), # single quoted string + (r'"(\\\\|\\"|[^"])*"', String), # double quoted string + ], + 'operators': [ + (r'[*/+=#~&<>\^-]', Operator), + (r':=', Operator), # assignment + (r'@', Operator), # pointer deref (ISO) + (r'\.\.', Operator), # ellipsis or range + (r'`', Operator), # Smalltalk message (ObjM2) + (r'::', Operator), # type conversion (ObjM2) + ], + 'punctuation': [ + (r'[\(\)\[\]{},.:;|]', Punctuation), + ], + 'comments': [ + (r'//.*?\n', Comment.Single), # ObjM2 + (r'/\*(.*?)\*/', Comment.Multiline), # ObjM2 + (r'\(\*([^\$].*?)\*\)', Comment.Multiline), + # TO DO: nesting of (* ... *) comments + ], + 'pragmas': [ + (r'\(\*\$(.*?)\*\)', Comment.Preproc), # PIM + (r'<\*(.*?)\*>', Comment.Preproc), # ISO + ObjM2 + ], + 'root': [ + include('whitespace'), + include('comments'), + include('pragmas'), + include('identifiers'), + include('numliterals'), + include('strings'), + include('operators'), + include('punctuation'), + ] + } + + pim_reserved_words = [ + # 40 reserved words + 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', + 'DIV', 'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'EXPORT', 'FOR', + 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', + 'MODULE', 'NOT', 'OF', 'OR', 'POINTER', 'PROCEDURE', 'QUALIFIED', + 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', + 'UNTIL', 'VAR', 'WHILE', 'WITH', + ] + + pim_pervasives = [ + # 31 pervasives + 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'DEC', + 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', 'INC', 'INCL', + 'INTEGER', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', 'NIL', 'ODD', + 'ORD', 'PROC', 'REAL', 'SIZE', 'TRUE', 'TRUNC', 'VAL', + ] + + iso_reserved_words = [ + # 46 reserved words + 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV', + 'DO', 'ELSE', 'ELSIF', 'END', 'EXCEPT', 'EXIT', 'EXPORT', 'FINALLY', + 'FOR', 'FORWARD', 'FROM', 'IF', 'IMPLEMENTATION', 'IMPORT', 'IN', + 'LOOP', 'MOD', 'MODULE', 'NOT', 'OF', 'OR', 'PACKEDSET', 'POINTER', + 'PROCEDURE', 'QUALIFIED', 'RECORD', 'REPEAT', 'REM', 'RETRY', + 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE', + 'WITH', + ] + + iso_pervasives = [ + # 42 pervasives + 'ABS', 'BITSET', 'BOOLEAN', 'CAP', 'CARDINAL', 'CHAR', 'CHR', 'CMPLX', + 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FALSE', 'FLOAT', 'HALT', 'HIGH', + 'IM', 'INC', 'INCL', 'INT', 'INTEGER', 'INTERRUPTIBLE', 'LENGTH', + 'LFLOAT', 'LONGCOMPLEX', 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEW', + 'NIL', 'ODD', 'ORD', 'PROC', 'PROTECTION', 'RE', 'REAL', 'SIZE', + 'TRUE', 'TRUNC', 'UNINTERRUBTIBLE', 'VAL', + ] + + objm2_reserved_words = [ + # base language, 42 reserved words + 'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV', + 'DO', 'ELSE', 'ELSIF', 'END', 'ENUM', 'EXIT', 'FOR', 'FROM', 'IF', + 'IMMUTABLE', 'IMPLEMENTATION', 'IMPORT', 'IN', 'IS', 'LOOP', 'MOD', + 'MODULE', 'NOT', 'OF', 'OPAQUE', 'OR', 'POINTER', 'PROCEDURE', + 'RECORD', 'REPEAT', 'RETURN', 'SET', 'THEN', 'TO', 'TYPE', + 'UNTIL', 'VAR', 'VARIADIC', 'WHILE', + # OO extensions, 16 reserved words + 'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD', + 'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC', + 'SUPER', 'TRY', + ] + + objm2_pervasives = [ + # base language, 38 pervasives + 'ABS', 'BITSET', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'DISPOSE', + 'FALSE', 'HALT', 'HIGH', 'INTEGER', 'INRANGE', 'LENGTH', 'LONGCARD', + 'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NEG', 'NEW', 'NEXTV', 'NIL', + 'OCTET', 'ODD', 'ORD', 'PRED', 'PROC', 'READ', 'REAL', 'SUCC', 'TMAX', + 'TMIN', 'TRUE', 'TSIZE', 'UNICHAR', 'VAL', 'WRITE', 'WRITEF', + # OO extensions, 3 pervasives + 'OBJECT', 'NO', 'YES', + ] + + gnu_reserved_words = [ + # 10 additional reserved words + 'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__', + '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE', + ] + + gnu_pervasives = [ + # 21 identifiers, actually from pseudo-module SYSTEM + # but we will highlight them as if they were pervasives + 'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16', + 'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96', + 'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64', + 'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW', + ] + + def __init__(self, **options): + self.reserved_words = set() + self.pervasives = set() + # ISO Modula-2 + if get_bool_opt(options, 'iso', False): + self.reserved_words.update(self.iso_reserved_words) + self.pervasives.update(self.iso_pervasives) + # Objective Modula-2 + elif get_bool_opt(options, 'objm2', False): + self.reserved_words.update(self.objm2_reserved_words) + self.pervasives.update(self.objm2_pervasives) + # PIM Modula-2 (DEFAULT) + else: + self.reserved_words.update(self.pim_reserved_words) + self.pervasives.update(self.pim_pervasives) + # GNU extensions + if get_bool_opt(options, 'gm2ext', False): + self.reserved_words.update(self.gnu_reserved_words) + self.pervasives.update(self.gnu_pervasives) + # initialise + RegexLexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): + # check for reserved words and pervasives + if token is Name: + if value in self.reserved_words: + token = Keyword.Reserved + elif value in self.pervasives: + token = Keyword.Pervasive + # return result + yield index, token, value + + +class AdaLexer(RegexLexer): + """ + For Ada source code. + + .. versionadded:: 1.3 + """ + + name = 'Ada' + aliases = ['ada', 'ada95', 'ada2005'] + filenames = ['*.adb', '*.ads', '*.ada'] + mimetypes = ['text/x-ada'] + + flags = re.MULTILINE | re.I # Ignore case + + tokens = { + 'root': [ + (r'[^\S\n]+', Text), + (r'--.*?\n', Comment.Single), + (r'[^\S\n]+', Text), + (r'function|procedure|entry', Keyword.Declaration, 'subprogram'), + (r'(subtype|type)(\s+)([a-z0-9_]+)', + bygroups(Keyword.Declaration, Text, Keyword.Type), 'type_def'), + (r'task|protected', Keyword.Declaration), + (r'(subtype)(\s+)', bygroups(Keyword.Declaration, Text)), + (r'(end)(\s+)', bygroups(Keyword.Reserved, Text), 'end'), + (r'(pragma)(\s+)(\w+)', bygroups(Keyword.Reserved, Text, + Comment.Preproc)), + (r'(true|false|null)\b', Keyword.Constant), + (words(( + 'Address', 'Byte', 'Boolean', 'Character', 'Controlled', 'Count', 'Cursor', + 'Duration', 'File_Mode', 'File_Type', 'Float', 'Generator', 'Integer', 'Long_Float', + 'Long_Integer', 'Long_Long_Float', 'Long_Long_Integer', 'Natural', 'Positive', + 'Reference_Type', 'Short_Float', 'Short_Integer', 'Short_Short_Float', + 'Short_Short_Integer', 'String', 'Wide_Character', 'Wide_String'), suffix=r'\b'), + Keyword.Type), + (r'(and(\s+then)?|in|mod|not|or(\s+else)|rem)\b', Operator.Word), + (r'generic|private', Keyword.Declaration), + (r'package', Keyword.Declaration, 'package'), + (r'array\b', Keyword.Reserved, 'array_def'), + (r'(with|use)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), + (r'([a-z0-9_]+)(\s*)(:)(\s*)(constant)', + bygroups(Name.Constant, Text, Punctuation, Text, + Keyword.Reserved)), + (r'<<[a-z0-9_]+>>', Name.Label), + (r'([a-z0-9_]+)(\s*)(:)(\s*)(declare|begin|loop|for|while)', + bygroups(Name.Label, Text, Punctuation, Text, Keyword.Reserved)), + (words(( + 'abort', 'abs', 'abstract', 'accept', 'access', 'aliased', 'all', + 'array', 'at', 'begin', 'body', 'case', 'constant', 'declare', + 'delay', 'delta', 'digits', 'do', 'else', 'elsif', 'end', 'entry', + 'exception', 'exit', 'interface', 'for', 'goto', 'if', 'is', 'limited', + 'loop', 'new', 'null', 'of', 'or', 'others', 'out', 'overriding', + 'pragma', 'protected', 'raise', 'range', 'record', 'renames', 'requeue', + 'return', 'reverse', 'select', 'separate', 'subtype', 'synchronized', + 'task', 'tagged', 'terminate', 'then', 'type', 'until', 'when', + 'while', 'xor'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + (r'"[^"]*"', String), + include('attribute'), + include('numbers'), + (r"'[^']'", String.Character), + (r'([a-z0-9_]+)(\s*|[(,])', bygroups(Name, using(this))), + (r"(<>|=>|:=|[()|:;,.'])", Punctuation), + (r'[*<>+=/&-]', Operator), + (r'\n+', Text), + ], + 'numbers': [ + (r'[0-9_]+#[0-9a-f]+#', Number.Hex), + (r'[0-9_]+\.[0-9_]*', Number.Float), + (r'[0-9_]+', Number.Integer), + ], + 'attribute': [ + (r"(')(\w+)", bygroups(Punctuation, Name.Attribute)), + ], + 'subprogram': [ + (r'\(', Punctuation, ('#pop', 'formal_part')), + (r';', Punctuation, '#pop'), + (r'is\b', Keyword.Reserved, '#pop'), + (r'"[^"]+"|[a-z0-9_]+', Name.Function), + include('root'), + ], + 'end': [ + ('(if|case|record|loop|select)', Keyword.Reserved), + ('"[^"]+"|[\w.]+', Name.Function), + ('\s+', Text), + (';', Punctuation, '#pop'), + ], + 'type_def': [ + (r';', Punctuation, '#pop'), + (r'\(', Punctuation, 'formal_part'), + (r'with|and|use', Keyword.Reserved), + (r'array\b', Keyword.Reserved, ('#pop', 'array_def')), + (r'record\b', Keyword.Reserved, ('record_def')), + (r'(null record)(;)', bygroups(Keyword.Reserved, Punctuation), '#pop'), + include('root'), + ], + 'array_def': [ + (r';', Punctuation, '#pop'), + (r'([a-z0-9_]+)(\s+)(range)', bygroups(Keyword.Type, Text, + Keyword.Reserved)), + include('root'), + ], + 'record_def': [ + (r'end record', Keyword.Reserved, '#pop'), + include('root'), + ], + 'import': [ + (r'[a-z0-9_.]+', Name.Namespace, '#pop'), + default('#pop'), + ], + 'formal_part': [ + (r'\)', Punctuation, '#pop'), + (r'[a-z0-9_]+', Name.Variable), + (r',|:[^=]', Punctuation), + (r'(in|not|null|out|access)\b', Keyword.Reserved), + include('root'), + ], + 'package': [ + ('body', Keyword.Declaration), + ('is\s+new|renames', Keyword.Reserved), + ('is', Keyword.Reserved, '#pop'), + (';', Punctuation, '#pop'), + ('\(', Punctuation, 'package_instantiation'), + ('([\w.]+)', Name.Class), + include('root'), + ], + 'package_instantiation': [ + (r'("[^"]+"|[a-z0-9_]+)(\s+)(=>)', bygroups(Name.Variable, + Text, Punctuation)), + (r'[a-z0-9._\'"]', Text), + (r'\)', Punctuation, '#pop'), + include('root'), + ], + } diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py new file mode 100644 index 00000000..5b705a0b --- /dev/null +++ b/pygments/lexers/prolog.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.prolog + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Prolog and Prolog-like languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, inherit, default, words +from pygments.util import get_bool_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['PrologLexer'] + + +class PrologLexer(RegexLexer): + """ + Lexer for Prolog files. + """ + name = 'Prolog' + aliases = ['prolog'] + filenames = ['*.prolog', '*.pro', '*.pl'] + mimetypes = ['text/x-prolog'] + + flags = re.UNICODE + + tokens = { + 'root': [ + (r'^#.*', Comment.Single), + (r'/\*', Comment.Multiline, 'nested-comment'), + (r'%.*', Comment.Single), + # character literal + (r'0\'.', String.Char), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), + # literal with prepended base + (r'\d\d?\'[a-zA-Z0-9]+', Number.Integer), + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+', Number.Integer), + (r'[\[\](){}|.,;!]', Punctuation), + (r':-|-->', Punctuation), + (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|' + r'\\[0-7]+\\|\\["\nabcefnrstv]|[^\\"])*"', String.Double), + (r"'(?:''|[^'])*'", String.Atom), # quoted atom + # Needs to not be followed by an atom. + # (r'=(?=\s|[a-zA-Z\[])', Operator), + (r'is\b', Operator), + (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])', + Operator), + (r'(mod|div|not)\b', Operator), + (r'_', Keyword), # The don't-care variable + (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)), + (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + u'(\\s*)(:-|-->)', + bygroups(Name.Function, Text, Operator)), # function defn + (u'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)' + u'(\\s*)(\\()', + bygroups(Name.Function, Text, Punctuation)), + (u'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]' + u'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*', + String.Atom), # atom, characters + # This one includes ! + (u'[#&*+\\-./:<=>?@\\\\^~\u00a1-\u00bf\u2010-\u303f]+', + String.Atom), # atom, graphics + (r'[A-Z_]\w*', Name.Variable), + (u'\\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text), + ], + 'nested-comment': [ + (r'\*/', Comment.Multiline, '#pop'), + (r'/\*', Comment.Multiline, '#push'), + (r'[^*/]+', Comment.Multiline), + (r'[*/]', Comment.Multiline), + ], + } + + def analyse_text(text): + return ':-' in text diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py new file mode 100644 index 00000000..aea29355 --- /dev/null +++ b/pygments/lexers/python.py @@ -0,0 +1,196 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.python + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Python and related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, inherit, default, words, combined +from pygments.util import get_bool_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['CythonLexer'] + + +class CythonLexer(RegexLexer): + """ + For Pyrex and `Cython `_ source code. + + .. versionadded:: 1.1 + """ + + name = 'Cython' + aliases = ['cython', 'pyx', 'pyrex'] + filenames = ['*.pyx', '*.pxd', '*.pxi'] + mimetypes = ['text/x-cython', 'application/x-cython'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Text, String.Doc)), + (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Text, String.Doc)), + (r'[^\S\n]+', Text), + (r'#.*$', Comment), + (r'[]{}:(),;[]', Punctuation), + (r'\\\n', Text), + (r'\\', Text), + (r'(in|is|and|or|not)\b', Operator.Word), + (r'(<)([a-zA-Z0-9.?]+)(>)', + bygroups(Punctuation, Keyword.Type, Punctuation)), + (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator), + (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)', + bygroups(Keyword, Number.Integer, Operator, Name, Operator, + Name, Punctuation)), + include('keywords'), + (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'), + (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'), + (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'), + (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'), + (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'), + include('builtins'), + include('backtick'), + ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), + ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), + ('[uU]?"""', String, combined('stringescape', 'tdqs')), + ("[uU]?'''", String, combined('stringescape', 'tsqs')), + ('[uU]?"', String, combined('stringescape', 'dqs')), + ("[uU]?'", String, combined('stringescape', 'sqs')), + include('name'), + include('numbers'), + ], + 'keywords': [ + (words(( + 'assert', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif', + 'else', 'except', 'except?', 'exec', 'finally', 'for', 'gil', + 'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print', + 'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'), + Keyword), + (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc), + ], + 'builtins': [ + (words(( + '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', + 'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', + 'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr', + 'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit', + 'file', 'filter', 'float', 'frozenset', 'getattr', 'globals', + 'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance', + 'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max', + 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', + 'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed', + 'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod', + 'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', + 'vars', 'xrange', 'zip'), prefix=r'(? Date: Tue, 16 Sep 2014 20:21:53 +0200 Subject: split up agile.py into individual modules --- pygments/lexers/_mapping.py | 40 +- pygments/lexers/agile.py | 2551 +--------------------------------------- pygments/lexers/c_like/d.py | 71 +- pygments/lexers/misc/factor.py | 350 ++++++ pygments/lexers/misc/iolang.py | 63 + pygments/lexers/misc/tcl.py | 145 +++ pygments/lexers/perl.py | 604 ++++++++++ pygments/lexers/python.py | 638 +++++++++- pygments/lexers/ruby.py | 498 ++++++++ pygments/lexers/scripting.py | 272 +++++ 10 files changed, 2666 insertions(+), 2566 deletions(-) create mode 100644 pygments/lexers/misc/factor.py create mode 100644 pygments/lexers/misc/iolang.py create mode 100644 pygments/lexers/misc/tcl.py create mode 100644 pygments/lexers/perl.py create mode 100644 pygments/lexers/ruby.py create mode 100644 pygments/lexers/scripting.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 716e0009..f3c09f61 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -61,7 +61,7 @@ LEXERS = { 'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), - 'ChaiscriptLexer': ('pygments.lexers.agile', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), + 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), 'ChapelLexer': ('pygments.lexers.misc.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), @@ -81,7 +81,7 @@ LEXERS = { 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), 'CppLexer': ('pygments.lexers.c_like.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), - 'CrocLexer': ('pygments.lexers.agile', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), + 'CrocLexer': ('pygments.lexers.c_like.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), 'CryptolLexer': ('pygments.lexers.functional', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), @@ -98,7 +98,7 @@ LEXERS = { 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)), - 'DgLexer': ('pygments.lexers.agile', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), + 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), @@ -120,8 +120,8 @@ LEXERS = { 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), 'FSharpLexer': ('pygments.lexers.dotnet', 'FSharp', ('fsharp',), ('*.fs', '*.fsi'), ('text/x-fsharp',)), - 'FactorLexer': ('pygments.lexers.agile', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), - 'FancyLexer': ('pygments.lexers.agile', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), + 'FactorLexer': ('pygments.lexers.misc.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), + 'FancyLexer': ('pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), 'FantomLexer': ('pygments.lexers.misc.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)), 'FelixLexer': ('pygments.lexers.misc.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)), 'FortranLexer': ('pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f', '*.f90', '*.F', '*.F90'), ('text/x-fortran',)), @@ -153,7 +153,7 @@ LEXERS = { 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()), 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), - 'HyLexer': ('pygments.lexers.agile', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), + 'HyLexer': ('pygments.lexers.python', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), @@ -162,7 +162,7 @@ LEXERS = { 'Inform6TemplateLexer': ('pygments.lexers.inform', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), 'Inform7Lexer': ('pygments.lexers.inform', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)), - 'IoLexer': ('pygments.lexers.agile', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), + 'IoLexer': ('pygments.lexers.misc.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), 'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), 'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), @@ -200,7 +200,7 @@ LEXERS = { 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)), - 'LuaLexer': ('pygments.lexers.agile', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), + 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), @@ -214,12 +214,12 @@ LEXERS = { 'MathematicaLexer': ('pygments.lexers.math', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), 'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), 'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()), - 'MiniDLexer': ('pygments.lexers.agile', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)), + 'MiniDLexer': ('pygments.lexers.c_like.d', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)), 'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), 'MonkeyLexer': ('pygments.lexers.misc.blitz', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), - 'MoonScriptLexer': ('pygments.lexers.agile', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), + 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), 'MqlLexer': ('pygments.lexers.c_like.other', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), 'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()), @@ -253,8 +253,8 @@ LEXERS = { 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), - 'Perl6Lexer': ('pygments.lexers.agile', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), - 'PerlLexer': ('pygments.lexers.agile', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), + 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), + 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like.other', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), @@ -269,11 +269,11 @@ LEXERS = { 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()), 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), - 'Python3Lexer': ('pygments.lexers.agile', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), - 'Python3TracebackLexer': ('pygments.lexers.agile', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), - 'PythonConsoleLexer': ('pygments.lexers.agile', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('pygments.lexers.agile', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), - 'PythonTracebackLexer': ('pygments.lexers.agile', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), + 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), + 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), + 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), + 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), + 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)), 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), @@ -299,8 +299,8 @@ LEXERS = { 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), - 'RubyConsoleLexer': ('pygments.lexers.agile', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), - 'RubyLexer': ('pygments.lexers.agile', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), + 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), + 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('pygments.lexers.c_like.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), @@ -327,7 +327,7 @@ LEXERS = { 'SwiftLexer': ('pygments.lexers.c_like.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), 'SwigLexer': ('pygments.lexers.c_like.other', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), 'SystemVerilogLexer': ('pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), - 'TclLexer': ('pygments.lexers.agile', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), + 'TclLexer': ('pygments.lexers.misc.tcl', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index 1ae369b9..e17d82ae 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -3,2550 +3,21 @@ pygments.lexers.agile ~~~~~~~~~~~~~~~~~~~~~ - Lexers for agile languages. + Just export lexer classes previously contained in this module. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import re - -from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, \ - LexerContext, include, combined, do_insertions, bygroups, using, this, default -from pygments.token import Error, Text, Other, \ - Comment, Operator, Keyword, Name, String, Number, Generic, Punctuation -from pygments.util import get_bool_opt, get_list_opt, shebang_matches, iteritems -from pygments import unistring as uni - - -__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', - 'Python3Lexer', 'Python3TracebackLexer', 'RubyLexer', - 'RubyConsoleLexer', 'PerlLexer', 'LuaLexer', 'MoonScriptLexer', - 'CrocLexer', 'MiniDLexer', 'IoLexer', 'TclLexer', 'FactorLexer', - 'FancyLexer', 'DgLexer', 'Perl6Lexer', 'HyLexer', - 'ChaiscriptLexer'] - -# b/w compatibility from pygments.lexers.functional import SchemeLexer from pygments.lexers.jvm import IokeLexer, ClojureLexer - -line_re = re.compile('.*?\n') - - -class PythonLexer(RegexLexer): - """ - For `Python `_ source code. - """ - - name = 'Python' - aliases = ['python', 'py', 'sage'] - filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'] - mimetypes = ['text/x-python', 'application/x-python'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), - (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), - (r'[^\S\n]+', Text), - (r'#.*$', Comment), - (r'[]{}:(),;[]', Punctuation), - (r'\\\n', Text), - (r'\\', Text), - (r'(in|is|and|or|not)\b', Operator.Word), - (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator), - include('keywords'), - (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'), - (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'), - (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), - 'fromimport'), - (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), - 'import'), - include('builtins'), - include('backtick'), - ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), - ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), - ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), - ('[uU]?"""', String, combined('stringescape', 'tdqs')), - ("[uU]?'''", String, combined('stringescape', 'tsqs')), - ('[uU]?"', String, combined('stringescape', 'dqs')), - ("[uU]?'", String, combined('stringescape', 'sqs')), - include('name'), - include('numbers'), - ], - 'keywords': [ - (r'(assert|break|continue|del|elif|else|except|exec|' - r'finally|for|global|if|lambda|pass|print|raise|' - r'return|try|while|yield(\s+from)?|as|with)\b', Keyword), - ], - 'builtins': [ - (r'(?`_ source code (version 3.0). - - .. versionadded:: 0.10 - """ - - name = 'Python 3' - aliases = ['python3', 'py3'] - filenames = [] # Nothing until Python 3 gets widespread - mimetypes = ['text/x-python3', 'application/x-python3'] - - flags = re.MULTILINE | re.UNICODE - - uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue) - - tokens = PythonLexer.tokens.copy() - tokens['keywords'] = [ - (r'(assert|break|continue|del|elif|else|except|' - r'finally|for|global|if|lambda|pass|raise|nonlocal|' - r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b', - Keyword), - ] - tokens['builtins'] = [ - (r'(?>> a = 'foo' - >>> print a - foo - >>> 1 / 0 - Traceback (most recent call last): - File "", line 1, in - ZeroDivisionError: integer division or modulo by zero - - Additional options: - - `python3` - Use Python 3 lexer for code. Default is ``False``. - - .. versionadded:: 1.0 - """ - name = 'Python console session' - aliases = ['pycon'] - mimetypes = ['text/x-python-doctest'] - - def __init__(self, **options): - self.python3 = get_bool_opt(options, 'python3', False) - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - if self.python3: - pylexer = Python3Lexer(**self.options) - tblexer = Python3TracebackLexer(**self.options) - else: - pylexer = PythonLexer(**self.options) - tblexer = PythonTracebackLexer(**self.options) - - curcode = '' - insertions = [] - curtb = '' - tbindex = 0 - tb = 0 - for match in line_re.finditer(text): - line = match.group() - if line.startswith(u'>>> ') or line.startswith(u'... '): - tb = 0 - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:4])])) - curcode += line[4:] - elif line.rstrip() == u'...' and not tb: - # only a new >>> prompt can end an exception block - # otherwise an ellipsis in place of the traceback frames - # will be mishandled - insertions.append((len(curcode), - [(0, Generic.Prompt, u'...')])) - curcode += line[3:] - else: - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - if (line.startswith(u'Traceback (most recent call last):') or - re.match(u' File "[^"]+", line \\d+\\n$', line)): - tb = 1 - curtb = line - tbindex = match.start() - elif line == 'KeyboardInterrupt\n': - yield match.start(), Name.Class, line - elif tb: - curtb += line - if not (line.startswith(' ') or line.strip() == u'...'): - tb = 0 - for i, t, v in tblexer.get_tokens_unprocessed(curtb): - yield tbindex+i, t, v - else: - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - pylexer.get_tokens_unprocessed(curcode)): - yield item - - -class PythonTracebackLexer(RegexLexer): - """ - For Python tracebacks. - - .. versionadded:: 0.7 - """ - - name = 'Python Traceback' - aliases = ['pytb'] - filenames = ['*.pytb'] - mimetypes = ['text/x-python-traceback'] - - tokens = { - 'root': [ - (r'^Traceback \(most recent call last\):\n', - Generic.Traceback, 'intb'), - # SyntaxError starts with this. - (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), - (r'^.*\n', Other), - ], - 'intb': [ - (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), - (r'^( File )("[^"]+")(, line )(\d+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text)), - (r'^( )(.+)(\n)', - bygroups(Text, using(PythonLexer), Text)), - (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... - (r'^([^:]+)(: )(.+)(\n)', - bygroups(Generic.Error, Text, Name, Text), '#pop'), - (r'^([a-zA-Z_]\w*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') - ], - } - - -class Python3TracebackLexer(RegexLexer): - """ - For Python 3.0 tracebacks, with support for chained exceptions. - - .. versionadded:: 1.0 - """ - - name = 'Python 3.0 Traceback' - aliases = ['py3tb'] - filenames = ['*.py3tb'] - mimetypes = ['text/x-python3-traceback'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), - (r'^During handling of the above exception, another ' - r'exception occurred:\n\n', Generic.Traceback), - (r'^The above exception was the direct cause of the ' - r'following exception:\n\n', Generic.Traceback), - (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), - ], - 'intb': [ - (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), - (r'^( File )("[^"]+")(, line )(\d+)(\n)', - bygroups(Text, Name.Builtin, Text, Number, Text)), - (r'^( )(.+)(\n)', - bygroups(Text, using(Python3Lexer), Text)), - (r'^([ \t]*)(\.\.\.)(\n)', - bygroups(Text, Comment, Text)), # for doctests... - (r'^([^:]+)(: )(.+)(\n)', - bygroups(Generic.Error, Text, Name, Text), '#pop'), - (r'^([a-zA-Z_]\w*)(:?\n)', - bygroups(Generic.Error, Text), '#pop') - ], - } - - -class RubyLexer(ExtendedRegexLexer): - """ - For `Ruby `_ source code. - """ - - name = 'Ruby' - aliases = ['rb', 'ruby', 'duby'] - filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', - '*.rbx', '*.duby'] - mimetypes = ['text/x-ruby', 'application/x-ruby'] - - flags = re.DOTALL | re.MULTILINE - - def heredoc_callback(self, match, ctx): - # okay, this is the hardest part of parsing Ruby... - # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line - - start = match.start(1) - yield start, Operator, match.group(1) # <<-? - yield match.start(2), String.Heredoc, match.group(2) # quote ", ', ` - yield match.start(3), Name.Constant, match.group(3) # heredoc name - yield match.start(4), String.Heredoc, match.group(4) # quote again - - heredocstack = ctx.__dict__.setdefault('heredocstack', []) - outermost = not bool(heredocstack) - heredocstack.append((match.group(1) == '<<-', match.group(3))) - - ctx.pos = match.start(5) - ctx.end = match.end(5) - # this may find other heredocs - for i, t, v in self.get_tokens_unprocessed(context=ctx): - yield i, t, v - ctx.pos = match.end() - - if outermost: - # this is the outer heredoc again, now we can process them all - for tolerant, hdname in heredocstack: - lines = [] - for match in line_re.finditer(ctx.text, ctx.pos): - if tolerant: - check = match.group().strip() - else: - check = match.group().rstrip() - if check == hdname: - for amatch in lines: - yield amatch.start(), String.Heredoc, amatch.group() - yield match.start(), Name.Constant, match.group() - ctx.pos = match.end() - break - else: - lines.append(match) - else: - # end of heredoc not found -- error! - for amatch in lines: - yield amatch.start(), Error, amatch.group() - ctx.end = len(ctx.text) - del heredocstack[:] - - - def gen_rubystrings_rules(): - def intp_regex_callback(self, match, ctx): - yield match.start(1), String.Regex, match.group(1) # begin - nctx = LexerContext(match.group(3), 0, ['interpolated-regex']) - for i, t, v in self.get_tokens_unprocessed(context=nctx): - yield match.start(3)+i, t, v - yield match.start(4), String.Regex, match.group(4) # end[mixounse]* - ctx.pos = match.end() - - def intp_string_callback(self, match, ctx): - yield match.start(1), String.Other, match.group(1) - nctx = LexerContext(match.group(3), 0, ['interpolated-string']) - for i, t, v in self.get_tokens_unprocessed(context=nctx): - yield match.start(3)+i, t, v - yield match.start(4), String.Other, match.group(4) # end - ctx.pos = match.end() - - states = {} - states['strings'] = [ - # easy ones - (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|' - r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol), - (r":'(\\\\|\\'|[^'])*'", String.Symbol), - (r"'(\\\\|\\'|[^'])*'", String.Single), - (r':"', String.Symbol, 'simple-sym'), - (r'([a-zA-Z_]\w*)(:)(?!:)', - bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9 - (r'"', String.Double, 'simple-string'), - (r'(?', 'ab'): - states[name+'-intp-string'] = [ - (r'\\[\\' + lbrace + rbrace + ']', String.Other), - (r'(?! - states['strings'] += [ - # %r regex - (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)', - intp_regex_callback), - # regular fancy strings with qsw - (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other), - (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)', - intp_string_callback), - # special forms of fancy strings after operators or - # in method calls with braces - (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', - bygroups(Text, String.Other, None)), - # and because of fixed width lookbehinds the whole thing a - # second time for line startings... - (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', - bygroups(Text, String.Other, None)), - # all regular fancy strings without qsw - (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)', - intp_string_callback), - ] - - return states - - tokens = { - 'root': [ - (r'#.*?$', Comment.Single), - (r'=begin\s.*?\n=end.*?$', Comment.Multiline), - # keywords - (r'(BEGIN|END|alias|begin|break|case|defined\?|' - r'do|else|elsif|end|ensure|for|if|in|next|redo|' - r'rescue|raise|retry|return|super|then|undef|unless|until|when|' - r'while|yield)\b', Keyword), - # start of function, class and module names - (r'(module)(\s+)([a-zA-Z_]\w*' - r'(?:::[a-zA-Z_]\w*)*)', - bygroups(Keyword, Text, Name.Namespace)), - (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'), - (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'), - (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), - # special methods - (r'(initialize|new|loop|include|extend|raise|attr_reader|' - r'attr_writer|attr_accessor|attr|catch|throw|private|' - r'module_function|public|protected|true|false|nil)\b', - Keyword.Pseudo), - (r'(not|and|or)\b', Operator.Word), - (r'(autoload|block_given|const_defined|eql|equal|frozen|include|' - r'instance_of|is_a|iterator|kind_of|method_defined|nil|' - r'private_method_defined|protected_method_defined|' - r'public_method_defined|respond_to|tainted)\?', Name.Builtin), - (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin), - (r'(?~!:])|' - r'(?<=(?:\s|;)when\s)|' - r'(?<=(?:\s|;)or\s)|' - r'(?<=(?:\s|;)and\s)|' - r'(?<=(?:\s|;|\.)index\s)|' - r'(?<=(?:\s|;|\.)scan\s)|' - r'(?<=(?:\s|;|\.)sub\s)|' - r'(?<=(?:\s|;|\.)sub!\s)|' - r'(?<=(?:\s|;|\.)gsub\s)|' - r'(?<=(?:\s|;|\.)gsub!\s)|' - r'(?<=(?:\s|;|\.)match\s)|' - r'(?<=(?:\s|;)if\s)|' - r'(?<=(?:\s|;)elsif\s)|' - r'(?<=^when\s)|' - r'(?<=^index\s)|' - r'(?<=^scan\s)|' - r'(?<=^sub\s)|' - r'(?<=^gsub\s)|' - r'(?<=^sub!\s)|' - r'(?<=^gsub!\s)|' - r'(?<=^match\s)|' - r'(?<=^if\s)|' - r'(?<=^elsif\s)' - r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'), - # multiline regex (in method calls or subscripts) - (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'), - # multiline regex (this time the funny no whitespace rule) - (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex), - 'multiline-regex'), - # lex numbers and ignore following regular expressions which - # are division operators in fact (grrrr. i hate that. any - # better ideas?) - # since pygments 0.7 we also eat a "?" operator after numbers - # so that the char operator does not work. Chars are not allowed - # there so that you can use the ternary operator. - # stupid example: - # x>=0?n[x]:"" - (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?', - bygroups(Number.Oct, Text, Operator)), - (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?', - bygroups(Number.Hex, Text, Operator)), - (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?', - bygroups(Number.Bin, Text, Operator)), - (r'([\d]+(?:_\d+)*)(\s*)([/?])?', - bygroups(Number.Integer, Text, Operator)), - # Names - (r'@@[a-zA-Z_]\w*', Name.Variable.Class), - (r'@[a-zA-Z_]\w*', Name.Variable.Instance), - (r'\$\w+', Name.Variable.Global), - (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global), - (r'\$-[0adFiIlpvw]', Name.Variable.Global), - (r'::', Operator), - include('strings'), - # chars - (r'\?(\\[MC]-)*' # modifiers - r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)' - r'(?!\w)', - String.Char), - (r'[A-Z]\w+', Name.Constant), - # this is needed because ruby attributes can look - # like keywords (class) or like this: ` ?!? - (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])', - bygroups(Operator, Name)), - (r'[a-zA-Z_]\w*[\!\?]?', Name), - (r'(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|' - r'!~|&&?|\|\||\.{1,3})', Operator), - (r'[-+/*%=<>&!^|~]=?', Operator), - (r'[(){};,/?:\\]', Punctuation), - (r'\s+', Text) - ], - 'funcname': [ - (r'\(', Punctuation, 'defexpr'), - (r'(?:([a-zA-Z_]\w*)(\.))?' - r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|' - r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', - bygroups(Name.Class, Operator, Name.Function), '#pop'), - default('#pop') - ], - 'classname': [ - (r'\(', Punctuation, 'defexpr'), - (r'<<', Operator, '#pop'), - (r'[A-Z_]\w*', Name.Class, '#pop'), - default('#pop') - ], - 'defexpr': [ - (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'), - (r'\(', Operator, '#push'), - include('root') - ], - 'in-intp': [ - ('}', String.Interpol, '#pop'), - include('root'), - ], - 'string-intp': [ - (r'#{', String.Interpol, 'in-intp'), - (r'#@@?[a-zA-Z_]\w*', String.Interpol), - (r'#\$[a-zA-Z_]\w*', String.Interpol) - ], - 'string-intp-escaped': [ - include('string-intp'), - (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', - String.Escape) - ], - 'interpolated-regex': [ - include('string-intp'), - (r'[\\#]', String.Regex), - (r'[^\\#]+', String.Regex), - ], - 'interpolated-string': [ - include('string-intp'), - (r'[\\#]', String.Other), - (r'[^\\#]+', String.Other), - ], - 'multiline-regex': [ - include('string-intp'), - (r'\\\\', String.Regex), - (r'\\/', String.Regex), - (r'[\\#]', String.Regex), - (r'[^\\/#]+', String.Regex), - (r'/[mixounse]*', String.Regex, '#pop'), - ], - 'end-part': [ - (r'.+', Comment.Preproc, '#pop') - ] - } - tokens.update(gen_rubystrings_rules()) - - def analyse_text(text): - return shebang_matches(text, r'ruby(1\.\d)?') - - -class RubyConsoleLexer(Lexer): - """ - For Ruby interactive console (**irb**) output like: - - .. sourcecode:: rbcon - - irb(main):001:0> a = 1 - => 1 - irb(main):002:0> puts a - 1 - => nil - """ - name = 'Ruby irb session' - aliases = ['rbcon', 'irb'] - mimetypes = ['text/x-ruby-shellsession'] - - _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] ' - '|>> |\?> ') - - def get_tokens_unprocessed(self, text): - rblexer = RubyLexer(**self.options) - - curcode = '' - insertions = [] - for match in line_re.finditer(text): - line = match.group() - m = self._prompt_re.match(line) - if m is not None: - end = m.end() - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:end])])) - curcode += line[end:] - else: - if curcode: - for item in do_insertions(insertions, - rblexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - rblexer.get_tokens_unprocessed(curcode)): - yield item - - -class PerlLexer(RegexLexer): - """ - For `Perl `_ source code. - """ - - name = 'Perl' - aliases = ['perl', 'pl'] - filenames = ['*.pl', '*.pm', '*.t'] - mimetypes = ['text/x-perl', 'application/x-perl'] - - flags = re.DOTALL | re.MULTILINE - # TODO: give this to a perl guy who knows how to parse perl... - tokens = { - 'balanced-regex': [ - (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'), - (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'), - (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'), - (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'), - (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'), - (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'), - (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'), - (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'), - (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'), - (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'), - ], - 'root': [ - (r'\#.*?$', Comment.Single), - (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline), - (r'(case|continue|do|else|elsif|for|foreach|if|last|my|' - r'next|our|redo|reset|then|unless|until|while|use|' - r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword), - (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)', - bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'), - (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word), - # common delimiters - (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', - String.Regex), - (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex), - (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex), - (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', - String.Regex), - (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', - String.Regex), - # balanced delimiters - (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'), - (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'), - (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex, - 'balanced-regex'), - (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex, - 'balanced-regex'), - - (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex), - (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'), - (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*', - String.Regex), - (r'\s+', Text), - (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|' - r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|' - r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|' - r'dump|each|endgrent|endhostent|endnetent|endprotoent|' - r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|' - r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|' - r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|' - r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|' - r'getppid|getpriority|getprotobyname|getprotobynumber|' - r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|' - r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|' - r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|' - r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|' - r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|' - r'opendir|ord|our|pack|package|pipe|pop|pos|printf|' - r'prototype|push|quotemeta|rand|read|readdir|' - r'readline|readlink|readpipe|recv|redo|ref|rename|require|' - r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|' - r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|' - r'setpgrp|setpriority|setprotoent|setpwent|setservent|' - r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|' - r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|' - r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|' - r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|' - r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|' - r'utime|values|vec|wait|waitpid|wantarray|warn|write' - r')\b', Name.Builtin), - (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo), - (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String), - (r'__END__', Comment.Preproc, 'end-part'), - (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global), - (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global), - (r'[$@%#]+', Name.Variable, 'varname'), - (r'0_?[0-7]+(_[0-7]+)*', Number.Oct), - (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex), - (r'0b[01]+(_[01]+)*', Number.Bin), - (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?', - Number.Float), - (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float), - (r'\d+(_\d+)*', Number.Integer), - (r"'(\\\\|\\[^\\]|[^'\\])*'", String), - (r'"(\\\\|\\[^\\]|[^"\\])*"', String), - (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick), - (r'<([^\s>]+)>', String.Regex), - (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'), - (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'), - (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'), - (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'), - (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other), - (r'package\s+', Keyword, 'modulename'), - (r'sub\s+', Keyword, 'funcname'), - (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|' - r'!~|&&?|\|\||\.{1,3})', Operator), - (r'[-+/*%=<>&^|!\\~]=?', Operator), - (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage - # of punctuation in Perl! - (r'(?=\w)', Name, 'name'), - ], - 'format': [ - (r'\.\n', String.Interpol, '#pop'), - (r'[^\n]*\n', String.Interpol), - ], - 'varname': [ - (r'\s+', Text), - (r'\{', Punctuation, '#pop'), # hash syntax? - (r'\)|,', Punctuation, '#pop'), # argument specifier - (r'\w+::', Name.Namespace), - (r'[\w:]+', Name.Variable, '#pop'), - ], - 'name': [ - (r'\w+::', Name.Namespace), - (r'[\w:]+', Name, '#pop'), - (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'), - (r'(?=\W)', Text, '#pop'), - ], - 'modulename': [ - (r'[a-zA-Z_]\w*', Name.Namespace, '#pop') - ], - 'funcname': [ - (r'[a-zA-Z_]\w*[\!\?]?', Name.Function), - (r'\s+', Text), - # argument declaration - (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)), - (r'.*?{', Punctuation, '#pop'), - (r';', Punctuation, '#pop'), - ], - 'cb-string': [ - (r'\\[\{\}\\]', String.Other), - (r'\\', String.Other), - (r'\{', String.Other, 'cb-string'), - (r'\}', String.Other, '#pop'), - (r'[^\{\}\\]+', String.Other) - ], - 'rb-string': [ - (r'\\[\(\)\\]', String.Other), - (r'\\', String.Other), - (r'\(', String.Other, 'rb-string'), - (r'\)', String.Other, '#pop'), - (r'[^\(\)]+', String.Other) - ], - 'sb-string': [ - (r'\\[\[\]\\]', String.Other), - (r'\\', String.Other), - (r'\[', String.Other, 'sb-string'), - (r'\]', String.Other, '#pop'), - (r'[^\[\]]+', String.Other) - ], - 'lt-string': [ - (r'\\[\<\>\\]', String.Other), - (r'\\', String.Other), - (r'\<', String.Other, 'lt-string'), - (r'\>', String.Other, '#pop'), - (r'[^\<\>]+', String.Other) - ], - 'end-part': [ - (r'.+', Comment.Preproc, '#pop') - ] - } - - def analyse_text(text): - if shebang_matches(text, r'perl'): - return True - if re.search('(?:my|our)\s+[$@%(]', text): - return 0.9 - - -class LuaLexer(RegexLexer): - """ - For `Lua `_ source code. - - Additional options accepted: - - `func_name_highlighting` - If given and ``True``, highlight builtin function names - (default: ``True``). - `disabled_modules` - If given, must be a list of module names whose function names - should not be highlighted. By default all modules are highlighted. - - To get a list of allowed modules have a look into the - `_luabuiltins` module: - - .. sourcecode:: pycon - - >>> from pygments.lexers._luabuiltins import MODULES - >>> MODULES.keys() - ['string', 'coroutine', 'modules', 'io', 'basic', ...] - """ - - name = 'Lua' - aliases = ['lua'] - filenames = ['*.lua', '*.wlua'] - mimetypes = ['text/x-lua', 'application/x-lua'] - - tokens = { - 'root': [ - # lua allows a file to start with a shebang - (r'#!(.*?)$', Comment.Preproc), - default('base'), - ], - 'base': [ - (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline), - ('--.*$', Comment.Single), - - (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float), - (r'(?i)\d+e[+-]?\d+', Number.Float), - ('(?i)0x[0-9a-f]*', Number.Hex), - (r'\d+', Number.Integer), - - (r'\n', Text), - (r'[^\S\n]', Text), - # multiline strings - (r'(?s)\[(=*)\[.*?\]\1\]', String), - - (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator), - (r'[\[\]\{\}\(\)\.,:;]', Punctuation), - (r'(and|or|not)\b', Operator.Word), - - ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|' - r'while)\b', Keyword), - (r'(local)\b', Keyword.Declaration), - (r'(true|false|nil)\b', Keyword.Constant), - - (r'(function)\b', Keyword, 'funcname'), - - (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name), - - ("'", String.Single, combined('stringescape', 'sqs')), - ('"', String.Double, combined('stringescape', 'dqs')) - ], - - 'funcname': [ - (r'\s+', Text), - ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)', - bygroups(Name.Class, Punctuation, Name.Function), '#pop'), - # inline function - ('\(', Punctuation, '#pop'), - ], - - # if I understand correctly, every character is valid in a lua string, - # so this state is only for later corrections - 'string': [ - ('.', String) - ], - - 'stringescape': [ - (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape) - ], - - 'sqs': [ - ("'", String, '#pop'), - include('string') - ], - - 'dqs': [ - ('"', String, '#pop'), - include('string') - ] - } - - def __init__(self, **options): - self.func_name_highlighting = get_bool_opt( - options, 'func_name_highlighting', True) - self.disabled_modules = get_list_opt(options, 'disabled_modules', []) - - self._functions = set() - if self.func_name_highlighting: - from pygments.lexers._luabuiltins import MODULES - for mod, func in iteritems(MODULES): - if mod not in self.disabled_modules: - self._functions.update(func) - RegexLexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): - if token is Name: - if value in self._functions: - yield index, Name.Builtin, value - continue - elif '.' in value: - a, b = value.split('.') - yield index, Name, a - yield index + len(a), Punctuation, u'.' - yield index + len(a) + 1, Name, b - continue - yield index, token, value - - -class MoonScriptLexer(LuaLexer): - """ - For `MoonScript `_ source code. - - .. versionadded:: 1.5 - """ - - name = "MoonScript" - aliases = ["moon", "moonscript"] - filenames = ["*.moon"] - mimetypes = ['text/x-moonscript', 'application/x-moonscript'] - - tokens = { - 'root': [ - (r'#!(.*?)$', Comment.Preproc), - default('base'), - ], - 'base': [ - ('--.*$', Comment.Single), - (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float), - (r'(?i)\d+e[+-]?\d+', Number.Float), - (r'(?i)0x[0-9a-f]*', Number.Hex), - (r'\d+', Number.Integer), - (r'\n', Text), - (r'[^\S\n]+', Text), - (r'(?s)\[(=*)\[.*?\]\1\]', String), - (r'(->|=>)', Name.Function), - (r':[a-zA-Z_]\w*', Name.Variable), - (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator), - (r'[;,]', Punctuation), - (r'[\[\]\{\}\(\)]', Keyword.Type), - (r'[a-zA-Z_]\w*:', Name.Variable), - (r"(class|extends|if|then|super|do|with|import|export|" - r"while|elseif|return|for|in|from|when|using|else|" - r"and|or|not|switch|break)\b", Keyword), - (r'(true|false|nil)\b', Keyword.Constant), - (r'(and|or|not)\b', Operator.Word), - (r'(self)\b', Name.Builtin.Pseudo), - (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class), - (r'[A-Z]\w*', Name.Class), # proper name - (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name), - ("'", String.Single, combined('stringescape', 'sqs')), - ('"', String.Double, combined('stringescape', 'dqs')) - ], - 'stringescape': [ - (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape) - ], - 'sqs': [ - ("'", String.Single, '#pop'), - (".", String) - ], - 'dqs': [ - ('"', String.Double, '#pop'), - (".", String) - ] - } - - def get_tokens_unprocessed(self, text): - # set . as Operator instead of Punctuation - for index, token, value in \ - LuaLexer.get_tokens_unprocessed(self, text): - if token == Punctuation and value == ".": - token = Operator - yield index, token, value - - -class CrocLexer(RegexLexer): - """ - For `Croc `_ source. - """ - name = 'Croc' - filenames = ['*.croc'] - aliases = ['croc'] - mimetypes = ['text/x-crocsrc'] - - tokens = { - 'root': [ - (r'\n', Text), - (r'\s+', Text), - # Comments - (r'//(.*?)\n', Comment.Single), - (r'/\*', Comment.Multiline, 'nestedcomment'), - # Keywords - (r'(as|assert|break|case|catch|class|continue|default' - r'|do|else|finally|for|foreach|function|global|namespace' - r'|if|import|in|is|local|module|return|scope|super|switch' - r'|this|throw|try|vararg|while|with|yield)\b', Keyword), - (r'(false|true|null)\b', Keyword.Constant), - # FloatLiteral - (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?', - Number.Float), - # IntegerLiteral - # -- Binary - (r'0[bB][01][01_]*', Number.Bin), - # -- Hexadecimal - (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), - # -- Decimal - (r'([0-9][0-9_]*)(?![.eE])', Number.Integer), - # CharacterLiteral - (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}""" - r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""", - String.Char - ), - # StringLiteral - # -- WysiwygString - (r'@"(""|[^"])*"', String), - (r'@`(``|[^`])*`', String), - (r"@'(''|[^'])*'", String), - # -- DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), - # Tokens - ( - r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>' - r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)' - r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation - ), - # Identifier - (r'[a-zA-Z_]\w*', Name), - ], - 'nestedcomment': [ - (r'[^*/]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline), - ], - } - - -class MiniDLexer(CrocLexer): - """ - For MiniD source. MiniD is now known as Croc. - """ - name = 'MiniD' - filenames = ['*.md'] - aliases = ['minid'] - mimetypes = ['text/x-minidsrc'] - - -class IoLexer(RegexLexer): - """ - For `Io `_ (a small, prototype-based - programming language) source. - - .. versionadded:: 0.10 - """ - name = 'Io' - filenames = ['*.io'] - aliases = ['io'] - mimetypes = ['text/x-iosrc'] - tokens = { - 'root': [ - (r'\n', Text), - (r'\s+', Text), - # Comments - (r'//(.*?)\n', Comment.Single), - (r'#(.*?)\n', Comment.Single), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - (r'/\+', Comment.Multiline, 'nestedcomment'), - # DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), - # Operators - (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}', - Operator), - # keywords - (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b', - Keyword), - # constants - (r'(nil|false|true)\b', Name.Constant), - # names - (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b', - Name.Builtin), - ('[a-zA-Z_]\w*', Name), - # numbers - (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+', Number.Integer) - ], - 'nestedcomment': [ - (r'[^+/]+', Comment.Multiline), - (r'/\+', Comment.Multiline, '#push'), - (r'\+/', Comment.Multiline, '#pop'), - (r'[+/]', Comment.Multiline), - ] - } - - -class TclLexer(RegexLexer): - """ - For Tcl source code. - - .. versionadded:: 0.10 - """ - - keyword_cmds_re = ( - r'\b(after|apply|array|break|catch|continue|elseif|else|error|' - r'eval|expr|for|foreach|global|if|namespace|proc|rename|return|' - r'set|switch|then|trace|unset|update|uplevel|upvar|variable|' - r'vwait|while)\b' - ) - - builtin_cmds_re = ( - r'\b(append|bgerror|binary|cd|chan|clock|close|concat|dde|dict|' - r'encoding|eof|exec|exit|fblocked|fconfigure|fcopy|file|' - r'fileevent|flush|format|gets|glob|history|http|incr|info|interp|' - r'join|lappend|lassign|lindex|linsert|list|llength|load|loadTk|' - r'lrange|lrepeat|lreplace|lreverse|lsearch|lset|lsort|mathfunc|' - r'mathop|memory|msgcat|open|package|pid|pkg::create|pkg_mkIndex|' - r'platform|platform::shell|puts|pwd|re_syntax|read|refchan|' - r'regexp|registry|regsub|scan|seek|socket|source|split|string|' - r'subst|tell|time|tm|unknown|unload)\b' - ) - - name = 'Tcl' - aliases = ['tcl'] - filenames = ['*.tcl'] - mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl'] - - def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""): - return [ - (keyword_cmds_re, Keyword, 'params' + context), - (builtin_cmds_re, Name.Builtin, 'params' + context), - (r'([\w\.\-]+)', Name.Variable, 'params' + context), - (r'#', Comment, 'comment'), - ] - - tokens = { - 'root': [ - include('command'), - include('basic'), - include('data'), - (r'}', Keyword), # HACK: somehow we miscounted our braces - ], - 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re), - 'command-in-brace': _gen_command_rules(keyword_cmds_re, - builtin_cmds_re, - "-in-brace"), - 'command-in-bracket': _gen_command_rules(keyword_cmds_re, - builtin_cmds_re, - "-in-bracket"), - 'command-in-paren': _gen_command_rules(keyword_cmds_re, - builtin_cmds_re, - "-in-paren"), - 'basic': [ - (r'\(', Keyword, 'paren'), - (r'\[', Keyword, 'bracket'), - (r'\{', Keyword, 'brace'), - (r'"', String.Double, 'string'), - (r'(eq|ne|in|ni)\b', Operator.Word), - (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator), - ], - 'data': [ - (r'\s+', Text), - (r'0x[a-fA-F0-9]+', Number.Hex), - (r'0[0-7]+', Number.Oct), - (r'\d+\.\d+', Number.Float), - (r'\d+', Number.Integer), - (r'\$([\w\.\-\:]+)', Name.Variable), - (r'([\w\.\-\:]+)', Text), - ], - 'params': [ - (r';', Keyword, '#pop'), - (r'\n', Text, '#pop'), - (r'(else|elseif|then)\b', Keyword), - include('basic'), - include('data'), - ], - 'params-in-brace': [ - (r'}', Keyword, ('#pop', '#pop')), - include('params') - ], - 'params-in-paren': [ - (r'\)', Keyword, ('#pop', '#pop')), - include('params') - ], - 'params-in-bracket': [ - (r'\]', Keyword, ('#pop', '#pop')), - include('params') - ], - 'string': [ - (r'\[', String.Double, 'string-square'), - (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double), - (r'"', String.Double, '#pop') - ], - 'string-square': [ - (r'\[', String.Double, 'string-square'), - (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double), - (r'\]', String.Double, '#pop') - ], - 'brace': [ - (r'}', Keyword, '#pop'), - include('command-in-brace'), - include('basic'), - include('data'), - ], - 'paren': [ - (r'\)', Keyword, '#pop'), - include('command-in-paren'), - include('basic'), - include('data'), - ], - 'bracket': [ - (r'\]', Keyword, '#pop'), - include('command-in-bracket'), - include('basic'), - include('data'), - ], - 'comment': [ - (r'.*[^\\]\n', Comment, '#pop'), - (r'.*\\\n', Comment), - ], - } - - def analyse_text(text): - return shebang_matches(text, r'(tcl)') - - -class FactorLexer(RegexLexer): - """ - Lexer for the `Factor `_ language. - - .. versionadded:: 1.4 - """ - name = 'Factor' - aliases = ['factor'] - filenames = ['*.factor'] - mimetypes = ['text/x-factor'] - - flags = re.MULTILINE | re.UNICODE - - builtin_kernel = ( - r'(?:-rot|2bi|2bi@|2bi\*|2curry|2dip|2drop|2dup|2keep|2nip|' - r'2over|2tri|2tri@|2tri\*|3bi|3curry|3dip|3drop|3dup|3keep|' - r'3tri|4dip|4drop|4dup|4keep||=|>boolean|\(clone\)|' - r'\?|\?execute|\?if|and|assert|assert=|assert\?|bi|bi-curry|' - r'bi-curry@|bi-curry\*|bi@|bi\*|boa|boolean|boolean\?|both\?|' - r'build|call|callstack|callstack>array|callstack\?|clear|clone|' - r'compose|compose\?|curry|curry\?|datastack|die|dip|do|drop|' - r'dup|dupd|either\?|eq\?|equal\?|execute|hashcode|hashcode\*|' - r'identity-hashcode|identity-tuple|identity-tuple\?|if|if\*|' - r'keep|loop|most|new|nip|not|null|object|or|over|pick|prepose|' - r'retainstack|rot|same\?|swap|swapd|throw|tri|tri-curry|' - r'tri-curry@|tri-curry\*|tri@|tri\*|tuple|tuple\?|unless|' - r'unless\*|until|when|when\*|while|with|wrapper|wrapper\?|xor)\s' - ) - - builtin_assocs = ( - r'(?:2cache||>alist|\?at|\?of|assoc|assoc-all\?|' - r'assoc-any\?|assoc-clone-like|assoc-combine|assoc-diff|' - r'assoc-diff!|assoc-differ|assoc-each|assoc-empty\?|' - r'assoc-filter|assoc-filter!|assoc-filter-as|assoc-find|' - r'assoc-hashcode|assoc-intersect|assoc-like|assoc-map|' - r'assoc-map-as|assoc-partition|assoc-refine|assoc-size|' - r'assoc-stack|assoc-subset\?|assoc-union|assoc-union!|' - r'assoc=|assoc>map|assoc\?|at|at+|at\*|cache|change-at|' - r'clear-assoc|delete-at|delete-at\*|enum|enum\?|extract-keys|' - r'inc-at|key\?|keys|map>assoc|maybe-set-at|new-assoc|of|' - r'push-at|rename-at|set-at|sift-keys|sift-values|substitute|' - r'unzip|value-at|value-at\*|value\?|values|zip)\s' - ) - - builtin_combinators = ( - r'(?:2cleave|2cleave>quot|3cleave|3cleave>quot|4cleave|' - r'4cleave>quot|alist>quot|call-effect|case|case-find|' - r'case>quot|cleave|cleave>quot|cond|cond>quot|deep-spread>quot|' - r'execute-effect|linear-case-quot|no-case|no-case\?|no-cond|' - r'no-cond\?|recursive-hashcode|shallow-spread>quot|spread|' - r'to-fixed-point|wrong-values|wrong-values\?)\s' - ) - - builtin_math = ( - r'(?:-|/|/f|/i|/mod|2/|2\^|<|<=||>|>=|>bignum|' - r'>fixnum|>float|>integer|\(all-integers\?\)|' - r'\(each-integer\)|\(find-integer\)|\*|\+|\?1\+|' - r'abs|align|all-integers\?|bignum|bignum\?|bit\?|bitand|' - r'bitnot|bitor|bits>double|bits>float|bitxor|complex|' - r'complex\?|denominator|double>bits|each-integer|even\?|' - r'find-integer|find-last-integer|fixnum|fixnum\?|float|' - r'float>bits|float\?|fp-bitwise=|fp-infinity\?|fp-nan-payload|' - r'fp-nan\?|fp-qnan\?|fp-sign|fp-snan\?|fp-special\?|' - r'if-zero|imaginary-part|integer|integer>fixnum|' - r'integer>fixnum-strict|integer\?|log2|log2-expects-positive|' - r'log2-expects-positive\?|mod|neg|neg\?|next-float|' - r'next-power-of-2|number|number=|number\?|numerator|odd\?|' - r'out-of-fixnum-range|out-of-fixnum-range\?|power-of-2\?|' - r'prev-float|ratio|ratio\?|rational|rational\?|real|' - r'real-part|real\?|recip|rem|sgn|shift|sq|times|u<|u<=|u>|' - r'u>=|unless-zero|unordered\?|when-zero|zero\?)\s' - ) - - builtin_sequences = ( - r'(?:1sequence|2all\?|2each|2map|2map-as|2map-reduce|2reduce|' - r'2selector|2sequence|3append|3append-as|3each|3map|3map-as|' - r'3sequence|4sequence||||\?first|' - r'\?last|\?nth|\?second|\?set-nth|accumulate|accumulate!|' - r'accumulate-as|all\?|any\?|append|append!|append-as|' - r'assert-sequence|assert-sequence=|assert-sequence\?|' - r'binary-reduce|bounds-check|bounds-check\?|bounds-error|' - r'bounds-error\?|but-last|but-last-slice|cartesian-each|' - r'cartesian-map|cartesian-product|change-nth|check-slice|' - r'check-slice-error|clone-like|collapse-slice|collector|' - r'collector-for|concat|concat-as|copy|count|cut|cut-slice|' - r'cut\*|delete-all|delete-slice|drop-prefix|each|each-from|' - r'each-index|empty\?|exchange|filter|filter!|filter-as|find|' - r'find-from|find-index|find-index-from|find-last|find-last-from|' - r'first|first2|first3|first4|flip|follow|fourth|glue|halves|' - r'harvest|head|head-slice|head-slice\*|head\*|head\?|' - r'if-empty|immutable|immutable-sequence|immutable-sequence\?|' - r'immutable\?|index|index-from|indices|infimum|infimum-by|' - r'insert-nth|interleave|iota|iota-tuple|iota-tuple\?|join|' - r'join-as|last|last-index|last-index-from|length|lengthen|' - r'like|longer|longer\?|longest|map|map!|map-as|map-find|' - r'map-find-last|map-index|map-integers|map-reduce|map-sum|' - r'max-length|member-eq\?|member\?|midpoint@|min-length|' - r'mismatch|move|new-like|new-resizable|new-sequence|' - r'non-negative-integer-expected|non-negative-integer-expected\?|' - r'nth|nths|pad-head|pad-tail|padding|partition|pop|pop\*|' - r'prefix|prepend|prepend-as|produce|produce-as|product|push|' - r'push-all|push-either|push-if|reduce|reduce-index|remove|' - r'remove!|remove-eq|remove-eq!|remove-nth|remove-nth!|repetition|' - r'repetition\?|replace-slice|replicate|replicate-as|rest|' - r'rest-slice|reverse|reverse!|reversed|reversed\?|second|' - r'selector|selector-for|sequence|sequence-hashcode|sequence=|' - r'sequence\?|set-first|set-fourth|set-last|set-length|set-nth|' - r'set-second|set-third|short|shorten|shorter|shorter\?|' - r'shortest|sift|slice|slice-error|slice-error\?|slice\?|' - r'snip|snip-slice|start|start\*|subseq|subseq\?|suffix|' - r'suffix!|sum|sum-lengths|supremum|supremum-by|surround|tail|' - r'tail-slice|tail-slice\*|tail\*|tail\?|third|trim|' - r'trim-head|trim-head-slice|trim-slice|trim-tail|trim-tail-slice|' - r'unclip|unclip-last|unclip-last-slice|unclip-slice|unless-empty|' - r'virtual-exemplar|virtual-sequence|virtual-sequence\?|virtual@|' - r'when-empty)\s' - ) - - builtin_namespaces = ( - r'(?:\+@|change|change-global|counter|dec|get|get-global|' - r'global|inc|init-namespaces|initialize|is-global|make-assoc|' - r'namespace|namestack|off|on|set|set-global|set-namestack|' - r'toggle|with-global|with-scope|with-variable|with-variables)\s' - ) - - builtin_arrays = ( - r'(?:1array|2array|3array|4array||>array|array|array\?|' - r'pair|pair\?|resize-array)\s' - ) - - builtin_io = ( - r'(?:\(each-stream-block-slice\)|\(each-stream-block\)|' - r'\(stream-contents-by-block\)|\(stream-contents-by-element\)|' - r'\(stream-contents-by-length-or-block\)|' - r'\(stream-contents-by-length\)|\+byte\+|\+character\+|' - r'bad-seek-type|bad-seek-type\?|bl|contents|each-block|' - r'each-block-size|each-block-slice|each-line|each-morsel|' - r'each-stream-block|each-stream-block-slice|each-stream-line|' - r'error-stream|flush|input-stream|input-stream\?|' - r'invalid-read-buffer|invalid-read-buffer\?|lines|nl|' - r'output-stream|output-stream\?|print|read|read-into|' - r'read-partial|read-partial-into|read-until|read1|readln|' - r'seek-absolute|seek-absolute\?|seek-end|seek-end\?|' - r'seek-input|seek-output|seek-relative|seek-relative\?|' - r'stream-bl|stream-contents|stream-contents\*|stream-copy|' - r'stream-copy\*|stream-element-type|stream-flush|' - r'stream-length|stream-lines|stream-nl|stream-print|' - r'stream-read|stream-read-into|stream-read-partial|' - r'stream-read-partial-into|stream-read-partial-unsafe|' - r'stream-read-unsafe|stream-read-until|stream-read1|' - r'stream-readln|stream-seek|stream-seekable\?|stream-tell|' - r'stream-write|stream-write1|tell-input|tell-output|' - r'with-error-stream|with-error-stream\*|with-error>output|' - r'with-input-output\+error-streams|' - r'with-input-output\+error-streams\*|with-input-stream|' - r'with-input-stream\*|with-output-stream|with-output-stream\*|' - r'with-output>error|with-output\+error-stream|' - r'with-output\+error-stream\*|with-streams|with-streams\*|' - r'write|write1)\s' - ) - - builtin_strings = ( - r'(?:1string||>string|resize-string|string|string\?)\s' - ) - - builtin_vectors = ( - r'(?:1vector||>vector|\?push|vector|vector\?)\s' - ) - - builtin_continuations = ( - r'(?:|||attempt-all|' - r'attempt-all-error|attempt-all-error\?|callback-error-hook|' - r'callcc0|callcc1|cleanup|compute-restarts|condition|' - r'condition\?|continuation|continuation\?|continue|' - r'continue-restart|continue-with|current-continuation|' - r'error|error-continuation|error-in-thread|error-thread|' - r'ifcc|ignore-errors|in-callback\?|original-error|recover|' - r'restart|restart\?|restarts|rethrow|rethrow-restarts|' - r'return|return-continuation|thread-error-hook|throw-continue|' - r'throw-restarts|with-datastack|with-return)\s' - ) - - tokens = { - 'root': [ - # factor allows a file to start with a shebang - (r'#!.*$', Comment.Preproc), - default('base'), - ], - 'base': [ - (r'\s+', Text), - - # defining words - (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function)), - (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Class, Text, Name.Function)), - (r'(C:)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function, Text, Name.Class)), - (r'(GENERIC:)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function)), - (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function, Text, Name.Function)), - (r'\(\s', Name.Function, 'stackeffect'), - (r';\s', Keyword), - - # imports and namespaces - (r'(USING:)(\s+)', - bygroups(Keyword.Namespace, Text), 'vocabs'), - (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)', - bygroups(Keyword.Namespace, Text, Name.Namespace)), - (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)), - (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)', - bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'), - (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)', - bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)), - (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)), - (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)', - bygroups(Keyword.Namespace, Text, Name.Function)), - - # tuples and classes - (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)', - bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'), - (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Class), 'slots'), - (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Class)), - (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)', - bygroups(Keyword, Text, Name.Class, Text, Name.Class)), - (r'(C:)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function, Text, Name.Class)), - (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Class, Text, Name.Class)), - (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)), - (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)), - (r'SINGLETONS:', Keyword, 'classes'), - - # other syntax - (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)', - bygroups(Keyword, Text, Name.Function)), - (r'SYMBOLS:\s', Keyword, 'words'), - (r'SYNTAX:\s', Keyword), - (r'ALIEN:\s', Keyword), - (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)), - (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)', - bygroups(Keyword.Namespace, Text, Name.Function, Text)), - (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)', - bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)), - - # vocab.private - (r'(?:)\s', Keyword.Namespace), - - # strings - (r'"""\s+(?:.|\n)*?\s+"""', String), - (r'"(?:\\\\|\\"|[^"])*"', String), - (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String), - (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char), - - # comments - (r'!\s+.*$', Comment), - (r'#!\s+.*$', Comment), - (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment), - - # boolean constants - (r'[tf]\s', Name.Constant), - - # symbols and literals - (r'[\\$]\s+\S+', Name.Constant), - (r'M\\\s+\S+\s+\S+', Name.Constant), - - # numbers - (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number), - (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number), - (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number), - (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number), - (r'0b[01]+\s', Number.Bin), - (r'0o[0-7]+\s', Number.Oct), - (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number), - (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number), - - # keywords - (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s', - Keyword), - - # builtins - (builtin_kernel, Name.Builtin), - (builtin_assocs, Name.Builtin), - (builtin_combinators, Name.Builtin), - (builtin_math, Name.Builtin), - (builtin_sequences, Name.Builtin), - (builtin_namespaces, Name.Builtin), - (builtin_arrays, Name.Builtin), - (builtin_io, Name.Builtin), - (builtin_strings, Name.Builtin), - (builtin_vectors, Name.Builtin), - (builtin_continuations, Name.Builtin), - - # everything else is text - (r'\S+', Text), - ], - 'stackeffect': [ - (r'\s+', Text), - (r'\(\s+', Name.Function, 'stackeffect'), - (r'\)\s', Name.Function, '#pop'), - (r'--\s', Name.Function), - (r'\S+', Name.Variable), - ], - 'slots': [ - (r'\s+', Text), - (r';\s', Keyword, '#pop'), - (r'({\s+)(\S+)(\s+[^}]+\s+}\s)', - bygroups(Text, Name.Variable, Text)), - (r'\S+', Name.Variable), - ], - 'vocabs': [ - (r'\s+', Text), - (r';\s', Keyword, '#pop'), - (r'\S+', Name.Namespace), - ], - 'classes': [ - (r'\s+', Text), - (r';\s', Keyword, '#pop'), - (r'\S+', Name.Class), - ], - 'words': [ - (r'\s+', Text), - (r';\s', Keyword, '#pop'), - (r'\S+', Name.Function), - ], - } - - -class FancyLexer(RegexLexer): - """ - Pygments Lexer For `Fancy `_. - - Fancy is a self-hosted, pure object-oriented, dynamic, - class-based, concurrent general-purpose programming language - running on Rubinius, the Ruby VM. - - .. versionadded:: 1.5 - """ - name = 'Fancy' - filenames = ['*.fy', '*.fancypack'] - aliases = ['fancy', 'fy'] - mimetypes = ['text/x-fancysrc'] - - tokens = { - # copied from PerlLexer: - 'balanced-regex': [ - (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'), - (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'), - (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'), - (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'), - (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'), - (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'), - (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'), - (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'), - (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'), - (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'), - ], - 'root': [ - (r'\s+', Text), - - # balanced delimiters (copied from PerlLexer): - (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'), - (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'), - (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'), - (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'), - (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex), - (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'), - - # Comments - (r'#(.*?)\n', Comment.Single), - # Symbols - (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol), - # Multi-line DoubleQuotedString - (r'"""(\\\\|\\"|[^"])*"""', String), - # DoubleQuotedString - (r'"(\\\\|\\"|[^"])*"', String), - # keywords - (r'(def|class|try|catch|finally|retry|return|return_local|match|' - r'case|->|=>)\b', Keyword), - # constants - (r'(self|super|nil|false|true)\b', Name.Constant), - (r'[(){};,/?\|:\\]', Punctuation), - # names - (r'(Object|Array|Hash|Directory|File|Class|String|Number|' - r'Enumerable|FancyEnumerable|Block|TrueClass|NilClass|' - r'FalseClass|Tuple|Symbol|Stack|Set|FancySpec|Method|Package|' - r'Range)\b', Name.Builtin), - # functions - (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function), - # operators, must be below functions - (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator), - ('[A-Z]\w*', Name.Constant), - ('@[a-zA-Z_]\w*', Name.Variable.Instance), - ('@@[a-zA-Z_]\w*', Name.Variable.Class), - ('@@?', Operator), - ('[a-zA-Z_]\w*', Name), - # numbers - / checks are necessary to avoid mismarking regexes, - # see comment in RubyLexer - (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?', - bygroups(Number.Oct, Text, Operator)), - (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?', - bygroups(Number.Hex, Text, Operator)), - (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?', - bygroups(Number.Bin, Text, Operator)), - (r'([\d]+(?:_\d+)*)(\s*)([/?])?', - bygroups(Number.Integer, Text, Operator)), - (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+', Number.Integer) - ] - } - - -class DgLexer(RegexLexer): - """ - Lexer for `dg `_, - a functional and object-oriented programming language - running on the CPython 3 VM. - - .. versionadded:: 1.6 - """ - name = 'dg' - aliases = ['dg'] - filenames = ['*.dg'] - mimetypes = ['text/x-dg'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'#.*?$', Comment.Single), - - (r'(?i)0b[01]+', Number.Bin), - (r'(?i)0o[0-7]+', Number.Oct), - (r'(?i)0x[0-9a-f]+', Number.Hex), - (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float), - (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float), - (r'(?i)[+-]?[0-9]+j?', Number.Integer), - - (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')), - (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')), - (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')), - (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')), - - (r"`\w+'*`", Operator), - (r'\b(and|in|is|or|where)\b', Operator.Word), - (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator), - - (r"(?`_ source code. - - .. versionadded:: 2.0 - """ - - name = 'Perl6' - aliases = ['perl6', 'pl6'] - filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', - '*.6pm', '*.p6m', '*.pm6', '*.t'] - mimetypes = ['text/x-perl6', 'application/x-perl6'] - flags = re.MULTILINE | re.DOTALL | re.UNICODE - - PERL6_IDENTIFIER_RANGE = "['\w:-]" - - PERL6_KEYWORDS = ( - 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT', - 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP', - 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but', - 'cached', 'category', 'class', 'constant', 'contend', 'continue', - 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else', - 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for', - 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline', - 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro', - 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of', - 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec', - 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat', - 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw', - 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede', - 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary', - 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will', - ) - - PERL6_BUILTINS = ( - 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH', - 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh', - 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh', - 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by', - 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat', - 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot', - 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes', - 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech', - 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag', - 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval', - 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists', - 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo', - 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw', - 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix', - 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator', - 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst', - 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map', - 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc', - 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not', - 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord', - 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi', - 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix', - 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi', - 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce', - 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round', - 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say', - 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature', - 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice', - 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ', - 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to', - 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc', - 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack', - 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec', - 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip', - ) - - PERL6_BUILTIN_CLASSES = ( - 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit', - 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class', - 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception', - 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing', - 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet', - 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method', - 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair', - 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex', - 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen', - 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void', - 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32', - 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2', - 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2', - 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2', - 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8', - ) - - PERL6_OPERATORS = ( - 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div', - 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm', - 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx', - '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^', - '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&', - 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^', - '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^', - '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv', - '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so', - 'not', '<==', '==>', '<<==', '==>>', - ) - - # Perl 6 has a *lot* of possible bracketing characters - # this list was lifted from STD.pm6 (https://github.com/perl6/std) - PERL6_BRACKETS = { - u'\u0028' : u'\u0029', u'\u003c' : u'\u003e', u'\u005b' : u'\u005d', - u'\u007b' : u'\u007d', u'\u00ab' : u'\u00bb', u'\u0f3a' : u'\u0f3b', - u'\u0f3c' : u'\u0f3d', u'\u169b' : u'\u169c', u'\u2018' : u'\u2019', - u'\u201a' : u'\u2019', u'\u201b' : u'\u2019', u'\u201c' : u'\u201d', - u'\u201e' : u'\u201d', u'\u201f' : u'\u201d', u'\u2039' : u'\u203a', - u'\u2045' : u'\u2046', u'\u207d' : u'\u207e', u'\u208d' : u'\u208e', - u'\u2208' : u'\u220b', u'\u2209' : u'\u220c', u'\u220a' : u'\u220d', - u'\u2215' : u'\u29f5', u'\u223c' : u'\u223d', u'\u2243' : u'\u22cd', - u'\u2252' : u'\u2253', u'\u2254' : u'\u2255', u'\u2264' : u'\u2265', - u'\u2266' : u'\u2267', u'\u2268' : u'\u2269', u'\u226a' : u'\u226b', - u'\u226e' : u'\u226f', u'\u2270' : u'\u2271', u'\u2272' : u'\u2273', - u'\u2274' : u'\u2275', u'\u2276' : u'\u2277', u'\u2278' : u'\u2279', - u'\u227a' : u'\u227b', u'\u227c' : u'\u227d', u'\u227e' : u'\u227f', - u'\u2280' : u'\u2281', u'\u2282' : u'\u2283', u'\u2284' : u'\u2285', - u'\u2286' : u'\u2287', u'\u2288' : u'\u2289', u'\u228a' : u'\u228b', - u'\u228f' : u'\u2290', u'\u2291' : u'\u2292', u'\u2298' : u'\u29b8', - u'\u22a2' : u'\u22a3', u'\u22a6' : u'\u2ade', u'\u22a8' : u'\u2ae4', - u'\u22a9' : u'\u2ae3', u'\u22ab' : u'\u2ae5', u'\u22b0' : u'\u22b1', - u'\u22b2' : u'\u22b3', u'\u22b4' : u'\u22b5', u'\u22b6' : u'\u22b7', - u'\u22c9' : u'\u22ca', u'\u22cb' : u'\u22cc', u'\u22d0' : u'\u22d1', - u'\u22d6' : u'\u22d7', u'\u22d8' : u'\u22d9', u'\u22da' : u'\u22db', - u'\u22dc' : u'\u22dd', u'\u22de' : u'\u22df', u'\u22e0' : u'\u22e1', - u'\u22e2' : u'\u22e3', u'\u22e4' : u'\u22e5', u'\u22e6' : u'\u22e7', - u'\u22e8' : u'\u22e9', u'\u22ea' : u'\u22eb', u'\u22ec' : u'\u22ed', - u'\u22f0' : u'\u22f1', u'\u22f2' : u'\u22fa', u'\u22f3' : u'\u22fb', - u'\u22f4' : u'\u22fc', u'\u22f6' : u'\u22fd', u'\u22f7' : u'\u22fe', - u'\u2308' : u'\u2309', u'\u230a' : u'\u230b', u'\u2329' : u'\u232a', - u'\u23b4' : u'\u23b5', u'\u2768' : u'\u2769', u'\u276a' : u'\u276b', - u'\u276c' : u'\u276d', u'\u276e' : u'\u276f', u'\u2770' : u'\u2771', - u'\u2772' : u'\u2773', u'\u2774' : u'\u2775', u'\u27c3' : u'\u27c4', - u'\u27c5' : u'\u27c6', u'\u27d5' : u'\u27d6', u'\u27dd' : u'\u27de', - u'\u27e2' : u'\u27e3', u'\u27e4' : u'\u27e5', u'\u27e6' : u'\u27e7', - u'\u27e8' : u'\u27e9', u'\u27ea' : u'\u27eb', u'\u2983' : u'\u2984', - u'\u2985' : u'\u2986', u'\u2987' : u'\u2988', u'\u2989' : u'\u298a', - u'\u298b' : u'\u298c', u'\u298d' : u'\u298e', u'\u298f' : u'\u2990', - u'\u2991' : u'\u2992', u'\u2993' : u'\u2994', u'\u2995' : u'\u2996', - u'\u2997' : u'\u2998', u'\u29c0' : u'\u29c1', u'\u29c4' : u'\u29c5', - u'\u29cf' : u'\u29d0', u'\u29d1' : u'\u29d2', u'\u29d4' : u'\u29d5', - u'\u29d8' : u'\u29d9', u'\u29da' : u'\u29db', u'\u29f8' : u'\u29f9', - u'\u29fc' : u'\u29fd', u'\u2a2b' : u'\u2a2c', u'\u2a2d' : u'\u2a2e', - u'\u2a34' : u'\u2a35', u'\u2a3c' : u'\u2a3d', u'\u2a64' : u'\u2a65', - u'\u2a79' : u'\u2a7a', u'\u2a7d' : u'\u2a7e', u'\u2a7f' : u'\u2a80', - u'\u2a81' : u'\u2a82', u'\u2a83' : u'\u2a84', u'\u2a8b' : u'\u2a8c', - u'\u2a91' : u'\u2a92', u'\u2a93' : u'\u2a94', u'\u2a95' : u'\u2a96', - u'\u2a97' : u'\u2a98', u'\u2a99' : u'\u2a9a', u'\u2a9b' : u'\u2a9c', - u'\u2aa1' : u'\u2aa2', u'\u2aa6' : u'\u2aa7', u'\u2aa8' : u'\u2aa9', - u'\u2aaa' : u'\u2aab', u'\u2aac' : u'\u2aad', u'\u2aaf' : u'\u2ab0', - u'\u2ab3' : u'\u2ab4', u'\u2abb' : u'\u2abc', u'\u2abd' : u'\u2abe', - u'\u2abf' : u'\u2ac0', u'\u2ac1' : u'\u2ac2', u'\u2ac3' : u'\u2ac4', - u'\u2ac5' : u'\u2ac6', u'\u2acd' : u'\u2ace', u'\u2acf' : u'\u2ad0', - u'\u2ad1' : u'\u2ad2', u'\u2ad3' : u'\u2ad4', u'\u2ad5' : u'\u2ad6', - u'\u2aec' : u'\u2aed', u'\u2af7' : u'\u2af8', u'\u2af9' : u'\u2afa', - u'\u2e02' : u'\u2e03', u'\u2e04' : u'\u2e05', u'\u2e09' : u'\u2e0a', - u'\u2e0c' : u'\u2e0d', u'\u2e1c' : u'\u2e1d', u'\u2e20' : u'\u2e21', - u'\u3008' : u'\u3009', u'\u300a' : u'\u300b', u'\u300c' : u'\u300d', - u'\u300e' : u'\u300f', u'\u3010' : u'\u3011', u'\u3014' : u'\u3015', - u'\u3016' : u'\u3017', u'\u3018' : u'\u3019', u'\u301a' : u'\u301b', - u'\u301d' : u'\u301e', u'\ufd3e' : u'\ufd3f', u'\ufe17' : u'\ufe18', - u'\ufe35' : u'\ufe36', u'\ufe37' : u'\ufe38', u'\ufe39' : u'\ufe3a', - u'\ufe3b' : u'\ufe3c', u'\ufe3d' : u'\ufe3e', u'\ufe3f' : u'\ufe40', - u'\ufe41' : u'\ufe42', u'\ufe43' : u'\ufe44', u'\ufe47' : u'\ufe48', - u'\ufe59' : u'\ufe5a', u'\ufe5b' : u'\ufe5c', u'\ufe5d' : u'\ufe5e', - u'\uff08' : u'\uff09', u'\uff1c' : u'\uff1e', u'\uff3b' : u'\uff3d', - u'\uff5b' : u'\uff5d', u'\uff5f' : u'\uff60', u'\uff62' : u'\uff63', - } - - def _build_word_match(words, boundary_regex_fragment = None, prefix = '', suffix = ''): - if boundary_regex_fragment is None: - return r'\b(' + prefix + r'|'.join([ re.escape(x) for x in words]) + \ - suffix + r')\b' - else: - return r'(? 0: - next_open_pos = text.find(opening_chars, search_pos + n_chars) - next_close_pos = text.find(closing_chars, search_pos + n_chars) - - if next_close_pos == -1: - next_close_pos = len(text) - nesting_level = 0 - elif next_open_pos != -1 and next_open_pos < next_close_pos: - nesting_level += 1 - search_pos = next_open_pos - else: # next_close_pos < next_open_pos - nesting_level -= 1 - search_pos = next_close_pos - - end_pos = next_close_pos - - if end_pos < 0: # if we didn't find a closer, just highlight the - # rest of the text in this class - end_pos = len(text) - - if adverbs is not None and re.search(r':to\b', adverbs): - heredoc_terminator = text[match.start('delimiter') + n_chars : end_pos] - end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) + r'\s*$', text[ end_pos : ], re.MULTILINE) - - if end_heredoc: - end_pos += end_heredoc.end() - else: - end_pos = len(text) - - yield match.start(), token_class, text[match.start() : end_pos + n_chars] - context.pos = end_pos + n_chars - - return callback - - def opening_brace_callback(lexer, match, context): - stack = context.stack - - yield match.start(), Text, context.text[match.start() : match.end()] - context.pos = match.end() - - # if we encounter an opening brace and we're one level - # below a token state, it means we need to increment - # the nesting level for braces so we know later when - # we should return to the token rules. - if len(stack) > 2 and stack[-2] == 'token': - context.perl6_token_nesting_level += 1 - - def closing_brace_callback(lexer, match, context): - stack = context.stack - - yield match.start(), Text, context.text[match.start() : match.end()] - context.pos = match.end() - - # if we encounter a free closing brace and we're one level - # below a token state, it means we need to check the nesting - # level to see if we need to return to the token state. - if len(stack) > 2 and stack[-2] == 'token': - context.perl6_token_nesting_level -= 1 - if context.perl6_token_nesting_level == 0: - stack.pop() - - def embedded_perl6_callback(lexer, match, context): - context.perl6_token_nesting_level = 1 - yield match.start(), Text, context.text[match.start() : match.end()] - context.pos = match.end() - context.stack.append('root') - - # If you're modifying these rules, be careful if you need to process '{' or '}' - # characters. We have special logic for processing these characters (due to the fact - # that you can nest Perl 6 code in regex blocks), so if you need to process one of - # them, make sure you also process the corresponding one! - tokens = { - 'common' : [ - (r'#[`|=](?P(?P[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)', brackets_callback(Comment.Multiline)), - (r'#[^\n]*$', Comment.Singleline), - (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline), - (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline), - (r'^=.*?\n\s*?\n', Comment.Multiline), - (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)', - bygroups(Keyword, Name), 'token-sym-brackets'), - (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', bygroups(Keyword, Name), 'pre-token'), - # deal with a special case in the Perl 6 grammar (role q { ... }) - (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)), - (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword), - (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix = '(?::[UD])?'), Name.Builtin), - (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin), - # copied from PerlLexer - (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', - Name.Variable), - (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global), - (r'::\?\w+', Name.Variable.Global), - (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', - Name.Variable.Global), - (r'\$(?:<.*?>)+', Name.Variable), - (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z:\s])(?P=first_char)*)', brackets_callback(String)), - # copied from PerlLexer - (r'0_?[0-7]+(_[0-7]+)*', Number.Oct), - (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex), - (r'0b[01]+(_[01]+)*', Number.Bin), - (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?', - Number.Float), - (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float), - (r'\d+(_\d+)*', Number.Integer), - (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex), - (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex), - (r'm\w+(?=\()', Name), - (r'(?:m|ms|rx)\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z_:\s])(?P=first_char)*)', brackets_callback(String.Regex)), - (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/', - String.Regex), - (r'<[^\s=].*?\S>', String), - (_build_word_match(PERL6_OPERATORS), Operator), - (r'[0-9a-zA-Z_]' + PERL6_IDENTIFIER_RANGE + '*', Name), - (r"'(\\\\|\\[^\\]|[^'\\])*'", String), - (r'"(\\\\|\\[^\\]|[^"\\])*"', String), - ], - 'root' : [ - include('common'), - (r'\{', opening_brace_callback), - (r'\}', closing_brace_callback), - (r'.+?', Text), - ], - 'pre-token' : [ - include('common'), - (r'\{', Text, ('#pop', 'token')), - (r'.+?', Text), - ], - 'token-sym-brackets' : [ - (r'(?P(?P[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)', brackets_callback(Name), ('#pop', 'pre-token')), - default(('#pop', 'pre-token')), - ], - 'token': [ - (r'}', Text, '#pop'), - (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)), - # make sure that quotes in character classes aren't treated as strings - (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex), - # make sure that '#' characters in quotes aren't treated as comments - (r"(?my|our)\s+)?(?:module|class|role|enum|grammar)', line) - if class_decl: - if saw_perl_decl or class_decl.group('scope') is not None: - return True - rating = 0.05 - continue - break - - return rating - - def __init__(self, **options): - super(Perl6Lexer, self).__init__(**options) - self.encoding = options.get('encoding', 'utf-8') - - -class HyLexer(RegexLexer): - """ - Lexer for `Hy `_ source code. - - .. versionadded:: 2.0 - """ - name = 'Hy' - aliases = ['hylang'] - filenames = ['*.hy'] - mimetypes = ['text/x-hy', 'application/x-hy'] - - special_forms = [ - 'cond', 'for', '->', '->>', 'car', - 'cdr', 'first', 'rest', 'let', 'when', 'unless', - 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator', - ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in', - 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=', - 'foreach', 'while', - 'eval-and-compile', 'eval-when-compile' - ] - - declarations = [ - 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' - ] - - hy_builtins = [] - - hy_core = [ - 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc', - 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?', - 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat', - 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?' - ] - - builtins = hy_builtins + hy_core - - # valid names for identifiers - # well, names can only not consist fully of numbers - # but this should be good enough for now - valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' - - def _multi_escape(entries): - return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries)) - - tokens = { - 'root': [ - # the comments - always starting with semicolon - # and going to the end of the line - (r';.*$', Comment.Single), - - # whitespaces - usually not relevant - (r'[,\s]+', Text), - - # numbers - (r'-?\d+\.\d+', Number.Float), - (r'-?\d+', Number.Integer), - (r'0[0-7]+j?', Number.Oct), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - - # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), - (r"'" + valid_name, String.Symbol), - (r"\\(.|[a-z]+)", String.Char), - (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), - (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), - - # keywords - (r'::?' + valid_name, String.Symbol), - - # special operators - (r'~@|[`\'#^~&@]', Operator), - - include('py-keywords'), - include('py-builtins'), - - # highlight the special forms - (_multi_escape(special_forms), Keyword), - - # Technically, only the special forms are 'keywords'. The problem - # is that only treating them as keywords means that things like - # 'defn' and 'ns' need to be highlighted as builtins. This is ugly - # and weird for most styles. So, as a compromise we're going to - # highlight them as Keyword.Declarations. - (_multi_escape(declarations), Keyword.Declaration), - - # highlight the builtins - (_multi_escape(builtins), Name.Builtin), - - # the remaining functions - (r'(?<=\()' + valid_name, Name.Function), - - # find the remaining variables - (valid_name, Name.Variable), - - # Hy accepts vector notation - (r'(\[|\])', Punctuation), - - # Hy accepts map notation - (r'(\{|\})', Punctuation), - - # the famous parentheses! - (r'(\(|\))', Punctuation), - - ], - 'py-keywords': PythonLexer.tokens['keywords'], - 'py-builtins': PythonLexer.tokens['builtins'], - } - - def analyse_text(text): - if '(import ' in text or '(defn ' in text: - return 0.9 - - -class ChaiscriptLexer(RegexLexer): - """ - For `ChaiScript `_ source code. - - .. versionadded:: 2.0 - """ - - name = 'ChaiScript' - aliases = ['chai', 'chaiscript'] - filenames = ['*.chai'] - mimetypes = ['text/x-chaiscript', 'application/x-chaiscript'] - - flags = re.DOTALL - tokens = { - 'commentsandwhitespace': [ - (r'\s+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - (r'^\#.*?\n', Comment.Single) - ], - 'slashstartsregex': [ - include('commentsandwhitespace'), - (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' - r'([gim]+\b|\B)', String.Regex, '#pop'), - (r'(?=/)', Text, ('#pop', 'badregex')), - default('#pop') - ], - 'badregex': [ - ('\n', Text, '#pop') - ], - 'root': [ - include('commentsandwhitespace'), - (r'\n', Text), - (r'[^\S\n]+', Text), - (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.' - r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), - (r'[{(\[;,]', Punctuation, 'slashstartsregex'), - (r'[})\].]', Punctuation), - (r'[=+\-*/]', Operator), - (r'(for|in|while|do|break|return|continue|if|else|' - r'throw|try|catch' - r')\b', Keyword, 'slashstartsregex'), - (r'(var)\b', Keyword.Declaration, 'slashstartsregex'), - (r'(attr|def|fun)\b', Keyword.Reserved), - (r'(true|false)\b', Keyword.Constant), - (r'(eval|throw)\b', Name.Builtin), - (r'`\S+`', Name.Builtin), - (r'[$a-zA-Z_]\w*', Name.Other), - (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'[0-9]+', Number.Integer), - (r'"', String.Double, 'dqstring'), - (r"'(\\\\|\\'|[^'])*'", String.Single), - ], - 'dqstring': [ - (r'\${[^"}]+?}', String.Iterpol), - (r'\$', String.Double), - (r'\\\\', String.Double), - (r'\\"', String.Double), - (r'[^\\\\\\"$]+', String.Double), - (r'"', String.Double, '#pop'), - ], - } +from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \ + PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer +from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer +from pygments.lexers.perl import PerlLexer, Perl6Lexer +from pygments.lexers.lua import LuaLexer, MoonScriptLexer +from pygments.lexers.c_like.d import CrocLexer, MiniDLexer +from pygments.lexers.misc.iolang import IoLexer +from pygments.lexers.misc.tcl import TclLexer + +__all__ = [] diff --git a/pygments/lexers/c_like/d.py b/pygments/lexers/c_like/d.py index e4aadd40..e2440f37 100644 --- a/pygments/lexers/c_like/d.py +++ b/pygments/lexers/c_like/d.py @@ -13,7 +13,7 @@ from pygments.lexer import RegexLexer, include, words from pygments.token import Text, Comment, Keyword, Name, String, \ Number, Punctuation -__all__ = ['DLexer'] +__all__ = ['DLexer', 'CrocLexer', 'MiniDLexer'] class DLexer(RegexLexer): @@ -177,3 +177,72 @@ class DLexer(RegexLexer): (r'}', String, '#pop'), ], } + + +class CrocLexer(RegexLexer): + """ + For `Croc `_ source. + """ + name = 'Croc' + filenames = ['*.croc'] + aliases = ['croc'] + mimetypes = ['text/x-crocsrc'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text), + # Comments + (r'//(.*?)\n', Comment.Single), + (r'/\*', Comment.Multiline, 'nestedcomment'), + # Keywords + (r'(as|assert|break|case|catch|class|continue|default' + r'|do|else|finally|for|foreach|function|global|namespace' + r'|if|import|in|is|local|module|return|scope|super|switch' + r'|this|throw|try|vararg|while|with|yield)\b', Keyword), + (r'(false|true|null)\b', Keyword.Constant), + # FloatLiteral + (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?', + Number.Float), + # IntegerLiteral + # -- Binary + (r'0[bB][01][01_]*', Number.Bin), + # -- Hexadecimal + (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex), + # -- Decimal + (r'([0-9][0-9_]*)(?![.eE])', Number.Integer), + # CharacterLiteral + (r"""'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-9]{1,3}""" + r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|.)'""", + String.Char), + # StringLiteral + # -- WysiwygString + (r'@"(""|[^"])*"', String), + (r'@`(``|[^`])*`', String), + (r"@'(''|[^'])*'", String), + # -- DoubleQuotedString + (r'"(\\\\|\\"|[^"])*"', String), + # Tokens + (r'(~=|\^=|%=|\*=|==|!=|>>>=|>>>|>>=|>>|>=|<=>|\?=|-\>' + r'|<<=|<<|<=|\+\+|\+=|--|-=|\|\||\|=|&&|&=|\.\.|/=)' + r'|[-/.&$@|\+<>!()\[\]{}?,;:=*%^~#\\]', Punctuation), + # Identifier + (r'[a-zA-Z_]\w*', Name), + ], + 'nestedcomment': [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ], + } + + +class MiniDLexer(CrocLexer): + """ + For MiniD source. MiniD is now known as Croc. + """ + name = 'MiniD' + filenames = ['*.md'] + aliases = ['minid'] + mimetypes = ['text/x-minidsrc'] diff --git a/pygments/lexers/misc/factor.py b/pygments/lexers/misc/factor.py new file mode 100644 index 00000000..c51a45a6 --- /dev/null +++ b/pygments/lexers/misc/factor.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.factor + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Factor language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, default +from pygments.token import Text, Comment, Keyword, Name, String, Number + +__all__ = ['FactorLexer'] + + +class FactorLexer(RegexLexer): + """ + Lexer for the `Factor `_ language. + + .. versionadded:: 1.4 + """ + name = 'Factor' + aliases = ['factor'] + filenames = ['*.factor'] + mimetypes = ['text/x-factor'] + + flags = re.MULTILINE | re.UNICODE + + builtin_kernel = ( + r'(?:-rot|2bi|2bi@|2bi\*|2curry|2dip|2drop|2dup|2keep|2nip|' + r'2over|2tri|2tri@|2tri\*|3bi|3curry|3dip|3drop|3dup|3keep|' + r'3tri|4dip|4drop|4dup|4keep||=|>boolean|\(clone\)|' + r'\?|\?execute|\?if|and|assert|assert=|assert\?|bi|bi-curry|' + r'bi-curry@|bi-curry\*|bi@|bi\*|boa|boolean|boolean\?|both\?|' + r'build|call|callstack|callstack>array|callstack\?|clear|clone|' + r'compose|compose\?|curry|curry\?|datastack|die|dip|do|drop|' + r'dup|dupd|either\?|eq\?|equal\?|execute|hashcode|hashcode\*|' + r'identity-hashcode|identity-tuple|identity-tuple\?|if|if\*|' + r'keep|loop|most|new|nip|not|null|object|or|over|pick|prepose|' + r'retainstack|rot|same\?|swap|swapd|throw|tri|tri-curry|' + r'tri-curry@|tri-curry\*|tri@|tri\*|tuple|tuple\?|unless|' + r'unless\*|until|when|when\*|while|with|wrapper|wrapper\?|xor)\s' + ) + + builtin_assocs = ( + r'(?:2cache||>alist|\?at|\?of|assoc|assoc-all\?|' + r'assoc-any\?|assoc-clone-like|assoc-combine|assoc-diff|' + r'assoc-diff!|assoc-differ|assoc-each|assoc-empty\?|' + r'assoc-filter|assoc-filter!|assoc-filter-as|assoc-find|' + r'assoc-hashcode|assoc-intersect|assoc-like|assoc-map|' + r'assoc-map-as|assoc-partition|assoc-refine|assoc-size|' + r'assoc-stack|assoc-subset\?|assoc-union|assoc-union!|' + r'assoc=|assoc>map|assoc\?|at|at+|at\*|cache|change-at|' + r'clear-assoc|delete-at|delete-at\*|enum|enum\?|extract-keys|' + r'inc-at|key\?|keys|map>assoc|maybe-set-at|new-assoc|of|' + r'push-at|rename-at|set-at|sift-keys|sift-values|substitute|' + r'unzip|value-at|value-at\*|value\?|values|zip)\s' + ) + + builtin_combinators = ( + r'(?:2cleave|2cleave>quot|3cleave|3cleave>quot|4cleave|' + r'4cleave>quot|alist>quot|call-effect|case|case-find|' + r'case>quot|cleave|cleave>quot|cond|cond>quot|deep-spread>quot|' + r'execute-effect|linear-case-quot|no-case|no-case\?|no-cond|' + r'no-cond\?|recursive-hashcode|shallow-spread>quot|spread|' + r'to-fixed-point|wrong-values|wrong-values\?)\s' + ) + + builtin_math = ( + r'(?:-|/|/f|/i|/mod|2/|2\^|<|<=||>|>=|>bignum|' + r'>fixnum|>float|>integer|\(all-integers\?\)|' + r'\(each-integer\)|\(find-integer\)|\*|\+|\?1\+|' + r'abs|align|all-integers\?|bignum|bignum\?|bit\?|bitand|' + r'bitnot|bitor|bits>double|bits>float|bitxor|complex|' + r'complex\?|denominator|double>bits|each-integer|even\?|' + r'find-integer|find-last-integer|fixnum|fixnum\?|float|' + r'float>bits|float\?|fp-bitwise=|fp-infinity\?|fp-nan-payload|' + r'fp-nan\?|fp-qnan\?|fp-sign|fp-snan\?|fp-special\?|' + r'if-zero|imaginary-part|integer|integer>fixnum|' + r'integer>fixnum-strict|integer\?|log2|log2-expects-positive|' + r'log2-expects-positive\?|mod|neg|neg\?|next-float|' + r'next-power-of-2|number|number=|number\?|numerator|odd\?|' + r'out-of-fixnum-range|out-of-fixnum-range\?|power-of-2\?|' + r'prev-float|ratio|ratio\?|rational|rational\?|real|' + r'real-part|real\?|recip|rem|sgn|shift|sq|times|u<|u<=|u>|' + r'u>=|unless-zero|unordered\?|when-zero|zero\?)\s' + ) + + builtin_sequences = ( + r'(?:1sequence|2all\?|2each|2map|2map-as|2map-reduce|2reduce|' + r'2selector|2sequence|3append|3append-as|3each|3map|3map-as|' + r'3sequence|4sequence||||\?first|' + r'\?last|\?nth|\?second|\?set-nth|accumulate|accumulate!|' + r'accumulate-as|all\?|any\?|append|append!|append-as|' + r'assert-sequence|assert-sequence=|assert-sequence\?|' + r'binary-reduce|bounds-check|bounds-check\?|bounds-error|' + r'bounds-error\?|but-last|but-last-slice|cartesian-each|' + r'cartesian-map|cartesian-product|change-nth|check-slice|' + r'check-slice-error|clone-like|collapse-slice|collector|' + r'collector-for|concat|concat-as|copy|count|cut|cut-slice|' + r'cut\*|delete-all|delete-slice|drop-prefix|each|each-from|' + r'each-index|empty\?|exchange|filter|filter!|filter-as|find|' + r'find-from|find-index|find-index-from|find-last|find-last-from|' + r'first|first2|first3|first4|flip|follow|fourth|glue|halves|' + r'harvest|head|head-slice|head-slice\*|head\*|head\?|' + r'if-empty|immutable|immutable-sequence|immutable-sequence\?|' + r'immutable\?|index|index-from|indices|infimum|infimum-by|' + r'insert-nth|interleave|iota|iota-tuple|iota-tuple\?|join|' + r'join-as|last|last-index|last-index-from|length|lengthen|' + r'like|longer|longer\?|longest|map|map!|map-as|map-find|' + r'map-find-last|map-index|map-integers|map-reduce|map-sum|' + r'max-length|member-eq\?|member\?|midpoint@|min-length|' + r'mismatch|move|new-like|new-resizable|new-sequence|' + r'non-negative-integer-expected|non-negative-integer-expected\?|' + r'nth|nths|pad-head|pad-tail|padding|partition|pop|pop\*|' + r'prefix|prepend|prepend-as|produce|produce-as|product|push|' + r'push-all|push-either|push-if|reduce|reduce-index|remove|' + r'remove!|remove-eq|remove-eq!|remove-nth|remove-nth!|repetition|' + r'repetition\?|replace-slice|replicate|replicate-as|rest|' + r'rest-slice|reverse|reverse!|reversed|reversed\?|second|' + r'selector|selector-for|sequence|sequence-hashcode|sequence=|' + r'sequence\?|set-first|set-fourth|set-last|set-length|set-nth|' + r'set-second|set-third|short|shorten|shorter|shorter\?|' + r'shortest|sift|slice|slice-error|slice-error\?|slice\?|' + r'snip|snip-slice|start|start\*|subseq|subseq\?|suffix|' + r'suffix!|sum|sum-lengths|supremum|supremum-by|surround|tail|' + r'tail-slice|tail-slice\*|tail\*|tail\?|third|trim|' + r'trim-head|trim-head-slice|trim-slice|trim-tail|trim-tail-slice|' + r'unclip|unclip-last|unclip-last-slice|unclip-slice|unless-empty|' + r'virtual-exemplar|virtual-sequence|virtual-sequence\?|virtual@|' + r'when-empty)\s' + ) + + builtin_namespaces = ( + r'(?:\+@|change|change-global|counter|dec|get|get-global|' + r'global|inc|init-namespaces|initialize|is-global|make-assoc|' + r'namespace|namestack|off|on|set|set-global|set-namestack|' + r'toggle|with-global|with-scope|with-variable|with-variables)\s' + ) + + builtin_arrays = ( + r'(?:1array|2array|3array|4array||>array|array|array\?|' + r'pair|pair\?|resize-array)\s' + ) + + builtin_io = ( + r'(?:\(each-stream-block-slice\)|\(each-stream-block\)|' + r'\(stream-contents-by-block\)|\(stream-contents-by-element\)|' + r'\(stream-contents-by-length-or-block\)|' + r'\(stream-contents-by-length\)|\+byte\+|\+character\+|' + r'bad-seek-type|bad-seek-type\?|bl|contents|each-block|' + r'each-block-size|each-block-slice|each-line|each-morsel|' + r'each-stream-block|each-stream-block-slice|each-stream-line|' + r'error-stream|flush|input-stream|input-stream\?|' + r'invalid-read-buffer|invalid-read-buffer\?|lines|nl|' + r'output-stream|output-stream\?|print|read|read-into|' + r'read-partial|read-partial-into|read-until|read1|readln|' + r'seek-absolute|seek-absolute\?|seek-end|seek-end\?|' + r'seek-input|seek-output|seek-relative|seek-relative\?|' + r'stream-bl|stream-contents|stream-contents\*|stream-copy|' + r'stream-copy\*|stream-element-type|stream-flush|' + r'stream-length|stream-lines|stream-nl|stream-print|' + r'stream-read|stream-read-into|stream-read-partial|' + r'stream-read-partial-into|stream-read-partial-unsafe|' + r'stream-read-unsafe|stream-read-until|stream-read1|' + r'stream-readln|stream-seek|stream-seekable\?|stream-tell|' + r'stream-write|stream-write1|tell-input|tell-output|' + r'with-error-stream|with-error-stream\*|with-error>output|' + r'with-input-output\+error-streams|' + r'with-input-output\+error-streams\*|with-input-stream|' + r'with-input-stream\*|with-output-stream|with-output-stream\*|' + r'with-output>error|with-output\+error-stream|' + r'with-output\+error-stream\*|with-streams|with-streams\*|' + r'write|write1)\s' + ) + + builtin_strings = ( + r'(?:1string||>string|resize-string|string|string\?)\s' + ) + + builtin_vectors = ( + r'(?:1vector||>vector|\?push|vector|vector\?)\s' + ) + + builtin_continuations = ( + r'(?:|||attempt-all|' + r'attempt-all-error|attempt-all-error\?|callback-error-hook|' + r'callcc0|callcc1|cleanup|compute-restarts|condition|' + r'condition\?|continuation|continuation\?|continue|' + r'continue-restart|continue-with|current-continuation|' + r'error|error-continuation|error-in-thread|error-thread|' + r'ifcc|ignore-errors|in-callback\?|original-error|recover|' + r'restart|restart\?|restarts|rethrow|rethrow-restarts|' + r'return|return-continuation|thread-error-hook|throw-continue|' + r'throw-restarts|with-datastack|with-return)\s' + ) + + tokens = { + 'root': [ + # factor allows a file to start with a shebang + (r'#!.*$', Comment.Preproc), + default('base'), + ], + 'base': [ + (r'\s+', Text), + + # defining words + (r'((?:MACRO|MEMO|TYPED)?:[:]?)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function)), + (r'(M:[:]?)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Class, Text, Name.Function)), + (r'(C:)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function, Text, Name.Class)), + (r'(GENERIC:)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function)), + (r'(HOOK:|GENERIC#)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function, Text, Name.Function)), + (r'\(\s', Name.Function, 'stackeffect'), + (r';\s', Keyword), + + # imports and namespaces + (r'(USING:)(\s+)', + bygroups(Keyword.Namespace, Text), 'vocabs'), + (r'(USE:|UNUSE:|IN:|QUALIFIED:)(\s+)(\S+)', + bygroups(Keyword.Namespace, Text, Name.Namespace)), + (r'(QUALIFIED-WITH:)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword.Namespace, Text, Name.Namespace, Text, Name.Namespace)), + (r'(FROM:|EXCLUDE:)(\s+)(\S+)(\s+=>\s)', + bygroups(Keyword.Namespace, Text, Name.Namespace, Text), 'words'), + (r'(RENAME:)(\s+)(\S+)(\s+)(\S+)(\s+=>\s+)(\S+)', + bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Namespace, Text, Name.Function)), + (r'(ALIAS:|TYPEDEF:)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function)), + (r'(DEFER:|FORGET:|POSTPONE:)(\s+)(\S+)', + bygroups(Keyword.Namespace, Text, Name.Function)), + + # tuples and classes + (r'(TUPLE:|ERROR:)(\s+)(\S+)(\s+<\s+)(\S+)', + bygroups(Keyword, Text, Name.Class, Text, Name.Class), 'slots'), + (r'(TUPLE:|ERROR:|BUILTIN:)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Class), 'slots'), + (r'(MIXIN:|UNION:|INTERSECTION:)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Class)), + (r'(PREDICATE:)(\s+)(\S+)(\s+<\s+)(\S+)', + bygroups(Keyword, Text, Name.Class, Text, Name.Class)), + (r'(C:)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function, Text, Name.Class)), + (r'(INSTANCE:)(\s+)(\S+)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Class, Text, Name.Class)), + (r'(SLOT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Function)), + (r'(SINGLETON:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)), + (r'SINGLETONS:', Keyword, 'classes'), + + # other syntax + (r'(CONSTANT:|SYMBOL:|MAIN:|HELP:)(\s+)(\S+)', + bygroups(Keyword, Text, Name.Function)), + (r'SYMBOLS:\s', Keyword, 'words'), + (r'SYNTAX:\s', Keyword), + (r'ALIEN:\s', Keyword), + (r'(STRUCT:)(\s+)(\S+)', bygroups(Keyword, Text, Name.Class)), + (r'(FUNCTION:)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)', + bygroups(Keyword.Namespace, Text, Name.Function, Text)), + (r'(FUNCTION-ALIAS:)(\s+)(\S+)(\s+\S+\s+)(\S+)(\s+\(\s+[^\)]+\)\s)', + bygroups(Keyword.Namespace, Text, Name.Function, Text, Name.Function, Text)), + + # vocab.private + (r'(?:)\s', Keyword.Namespace), + + # strings + (r'"""\s+(?:.|\n)*?\s+"""', String), + (r'"(?:\\\\|\\"|[^"])*"', String), + (r'\S+"\s+(?:\\\\|\\"|[^"])*"', String), + (r'CHAR:\s+(?:\\[\\abfnrstv]|[^\\]\S*)\s', String.Char), + + # comments + (r'!\s+.*$', Comment), + (r'#!\s+.*$', Comment), + (r'/\*\s+(?:.|\n)*?\s\*/\s', Comment), + + # boolean constants + (r'[tf]\s', Name.Constant), + + # symbols and literals + (r'[\\$]\s+\S+', Name.Constant), + (r'M\\\s+\S+\s+\S+', Name.Constant), + + # numbers + (r'[+-]?(?:[\d,]*\d)?\.(?:\d([\d,]*\d)?)?(?:[eE][+-]?\d+)?\s', Number), + (r'[+-]?\d(?:[\d,]*\d)?(?:[eE][+-]?\d+)?\s', Number), + (r'0x[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number), + (r'NAN:\s+[a-fA-F\d](?:[a-fA-F\d,]*[a-fA-F\d])?(?:p\d([\d,]*\d)?)?\s', Number), + (r'0b[01]+\s', Number.Bin), + (r'0o[0-7]+\s', Number.Oct), + (r'(?:\d([\d,]*\d)?)?\+\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number), + (r'(?:\-\d([\d,]*\d)?)?\-\d(?:[\d,]*\d)?/\d(?:[\d,]*\d)?\s', Number), + + # keywords + (r'(?:deprecated|final|foldable|flushable|inline|recursive)\s', + Keyword), + + # builtins + (builtin_kernel, Name.Builtin), + (builtin_assocs, Name.Builtin), + (builtin_combinators, Name.Builtin), + (builtin_math, Name.Builtin), + (builtin_sequences, Name.Builtin), + (builtin_namespaces, Name.Builtin), + (builtin_arrays, Name.Builtin), + (builtin_io, Name.Builtin), + (builtin_strings, Name.Builtin), + (builtin_vectors, Name.Builtin), + (builtin_continuations, Name.Builtin), + + # everything else is text + (r'\S+', Text), + ], + 'stackeffect': [ + (r'\s+', Text), + (r'\(\s+', Name.Function, 'stackeffect'), + (r'\)\s', Name.Function, '#pop'), + (r'--\s', Name.Function), + (r'\S+', Name.Variable), + ], + 'slots': [ + (r'\s+', Text), + (r';\s', Keyword, '#pop'), + (r'({\s+)(\S+)(\s+[^}]+\s+}\s)', + bygroups(Text, Name.Variable, Text)), + (r'\S+', Name.Variable), + ], + 'vocabs': [ + (r'\s+', Text), + (r';\s', Keyword, '#pop'), + (r'\S+', Name.Namespace), + ], + 'classes': [ + (r'\s+', Text), + (r';\s', Keyword, '#pop'), + (r'\S+', Name.Class), + ], + 'words': [ + (r'\s+', Text), + (r';\s', Keyword, '#pop'), + (r'\S+', Name.Function), + ], + } diff --git a/pygments/lexers/misc/iolang.py b/pygments/lexers/misc/iolang.py new file mode 100644 index 00000000..f6d6bb47 --- /dev/null +++ b/pygments/lexers/misc/iolang.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.iolang + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Io language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number + +__all__ = ['IoLexer'] + + +class IoLexer(RegexLexer): + """ + For `Io `_ (a small, prototype-based + programming language) source. + + .. versionadded:: 0.10 + """ + name = 'Io' + filenames = ['*.io'] + aliases = ['io'] + mimetypes = ['text/x-iosrc'] + tokens = { + 'root': [ + (r'\n', Text), + (r'\s+', Text), + # Comments + (r'//(.*?)\n', Comment.Single), + (r'#(.*?)\n', Comment.Single), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + (r'/\+', Comment.Multiline, 'nestedcomment'), + # DoubleQuotedString + (r'"(\\\\|\\"|[^"])*"', String), + # Operators + (r'::=|:=|=|\(|\)|;|,|\*|-|\+|>|<|@|!|/|\||\^|\.|%|&|\[|\]|\{|\}', + Operator), + # keywords + (r'(clone|do|doFile|doString|method|for|if|else|elseif|then)\b', + Keyword), + # constants + (r'(nil|false|true)\b', Name.Constant), + # names + (r'(Object|list|List|Map|args|Sequence|Coroutine|File)\b', + Name.Builtin), + ('[a-zA-Z_]\w*', Name), + # numbers + (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+', Number.Integer) + ], + 'nestedcomment': [ + (r'[^+/]+', Comment.Multiline), + (r'/\+', Comment.Multiline, '#push'), + (r'\+/', Comment.Multiline, '#pop'), + (r'[+/]', Comment.Multiline), + ] + } diff --git a/pygments/lexers/misc/tcl.py b/pygments/lexers/misc/tcl.py new file mode 100644 index 00000000..d276aa94 --- /dev/null +++ b/pygments/lexers/misc/tcl.py @@ -0,0 +1,145 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.tcl + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Tcl and related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number +from pygments.util import shebang_matches + +__all__ = ['TclLexer'] + + +class TclLexer(RegexLexer): + """ + For Tcl source code. + + .. versionadded:: 0.10 + """ + + keyword_cmds_re = words(( + 'after', 'apply', 'array', 'break', 'catch', 'continue', 'elseif', 'else', 'error', + 'eval', 'expr', 'for', 'foreach', 'global', 'if', 'namespace', 'proc', 'rename', 'return', + 'set', 'switch', 'then', 'trace', 'unset', 'update', 'uplevel', 'upvar', 'variable', + 'vwait', 'while'), prefix=r'\b', suffix=r'\b') + + builtin_cmds_re = words(( + 'append', 'bgerror', 'binary', 'cd', 'chan', 'clock', 'close', 'concat', 'dde', 'dict', + 'encoding', 'eof', 'exec', 'exit', 'fblocked', 'fconfigure', 'fcopy', 'file', + 'fileevent', 'flush', 'format', 'gets', 'glob', 'history', 'http', 'incr', 'info', 'interp', + 'join', 'lappend', 'lassign', 'lindex', 'linsert', 'list', 'llength', 'load', 'loadTk', + 'lrange', 'lrepeat', 'lreplace', 'lreverse', 'lsearch', 'lset', 'lsort', 'mathfunc', + 'mathop', 'memory', 'msgcat', 'open', 'package', 'pid', 'pkg::create', 'pkg_mkIndex', + 'platform', 'platform::shell', 'puts', 'pwd', 're_syntax', 'read', 'refchan', + 'regexp', 'registry', 'regsub', 'scan', 'seek', 'socket', 'source', 'split', 'string', + 'subst', 'tell', 'time', 'tm', 'unknown', 'unload'), prefix=r'\b', suffix=r'\b') + + name = 'Tcl' + aliases = ['tcl'] + filenames = ['*.tcl'] + mimetypes = ['text/x-tcl', 'text/x-script.tcl', 'application/x-tcl'] + + def _gen_command_rules(keyword_cmds_re, builtin_cmds_re, context=""): + return [ + (keyword_cmds_re, Keyword, 'params' + context), + (builtin_cmds_re, Name.Builtin, 'params' + context), + (r'([\w\.\-]+)', Name.Variable, 'params' + context), + (r'#', Comment, 'comment'), + ] + + tokens = { + 'root': [ + include('command'), + include('basic'), + include('data'), + (r'}', Keyword), # HACK: somehow we miscounted our braces + ], + 'command': _gen_command_rules(keyword_cmds_re, builtin_cmds_re), + 'command-in-brace': _gen_command_rules(keyword_cmds_re, + builtin_cmds_re, + "-in-brace"), + 'command-in-bracket': _gen_command_rules(keyword_cmds_re, + builtin_cmds_re, + "-in-bracket"), + 'command-in-paren': _gen_command_rules(keyword_cmds_re, + builtin_cmds_re, + "-in-paren"), + 'basic': [ + (r'\(', Keyword, 'paren'), + (r'\[', Keyword, 'bracket'), + (r'\{', Keyword, 'brace'), + (r'"', String.Double, 'string'), + (r'(eq|ne|in|ni)\b', Operator.Word), + (r'!=|==|<<|>>|<=|>=|&&|\|\||\*\*|[-+~!*/%<>&^|?:]', Operator), + ], + 'data': [ + (r'\s+', Text), + (r'0x[a-fA-F0-9]+', Number.Hex), + (r'0[0-7]+', Number.Oct), + (r'\d+\.\d+', Number.Float), + (r'\d+', Number.Integer), + (r'\$([\w\.\-\:]+)', Name.Variable), + (r'([\w\.\-\:]+)', Text), + ], + 'params': [ + (r';', Keyword, '#pop'), + (r'\n', Text, '#pop'), + (r'(else|elseif|then)\b', Keyword), + include('basic'), + include('data'), + ], + 'params-in-brace': [ + (r'}', Keyword, ('#pop', '#pop')), + include('params') + ], + 'params-in-paren': [ + (r'\)', Keyword, ('#pop', '#pop')), + include('params') + ], + 'params-in-bracket': [ + (r'\]', Keyword, ('#pop', '#pop')), + include('params') + ], + 'string': [ + (r'\[', String.Double, 'string-square'), + (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\])', String.Double), + (r'"', String.Double, '#pop') + ], + 'string-square': [ + (r'\[', String.Double, 'string-square'), + (r'(?s)(\\\\|\\[0-7]+|\\.|\\\n|[^\]\\])', String.Double), + (r'\]', String.Double, '#pop') + ], + 'brace': [ + (r'}', Keyword, '#pop'), + include('command-in-brace'), + include('basic'), + include('data'), + ], + 'paren': [ + (r'\)', Keyword, '#pop'), + include('command-in-paren'), + include('basic'), + include('data'), + ], + 'bracket': [ + (r'\]', Keyword, '#pop'), + include('command-in-bracket'), + include('basic'), + include('data'), + ], + 'comment': [ + (r'.*[^\\]\n', Comment, '#pop'), + (r'.*\\\n', Comment), + ], + } + + def analyse_text(text): + return shebang_matches(text, r'(tcl)') diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py new file mode 100644 index 00000000..fbd1c47f --- /dev/null +++ b/pygments/lexers/perl.py @@ -0,0 +1,604 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.perl + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for Perl and related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \ + using, this, default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation +from pygments.util import shebang_matches + +__all__ = ['PerlLexer', 'Perl6Lexer'] + + +class PerlLexer(RegexLexer): + """ + For `Perl `_ source code. + """ + + name = 'Perl' + aliases = ['perl', 'pl'] + filenames = ['*.pl', '*.pm', '*.t'] + mimetypes = ['text/x-perl', 'application/x-perl'] + + flags = re.DOTALL | re.MULTILINE + # TODO: give this to a perl guy who knows how to parse perl... + tokens = { + 'balanced-regex': [ + (r'/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', String.Regex, '#pop'), + (r'!(\\\\|\\[^\\]|[^\\!])*![egimosx]*', String.Regex, '#pop'), + (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'), + (r'{(\\\\|\\[^\\]|[^\\}])*}[egimosx]*', String.Regex, '#pop'), + (r'<(\\\\|\\[^\\]|[^\\>])*>[egimosx]*', String.Regex, '#pop'), + (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'), + (r'\((\\\\|\\[^\\]|[^\\\)])*\)[egimosx]*', String.Regex, '#pop'), + (r'@(\\\\|\\[^\\]|[^\\\@])*@[egimosx]*', String.Regex, '#pop'), + (r'%(\\\\|\\[^\\]|[^\\\%])*%[egimosx]*', String.Regex, '#pop'), + (r'\$(\\\\|\\[^\\]|[^\\\$])*\$[egimosx]*', String.Regex, '#pop'), + ], + 'root': [ + (r'\#.*?$', Comment.Single), + (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline), + (r'(case|continue|do|else|elsif|for|foreach|if|last|my|' + r'next|our|redo|reset|then|unless|until|while|use|' + r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword), + (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)', + bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'), + (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word), + # common delimiters + (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*', + String.Regex), + (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex), + (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex), + (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', + String.Regex), + (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', + String.Regex), + # balanced delimiters + (r's{(\\\\|\\[^\\]|[^\\}])*}\s*', String.Regex, 'balanced-regex'), + (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'), + (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex, + 'balanced-regex'), + (r's\((\\\\|\\[^\\]|[^\\\)])*\)\s*', String.Regex, + 'balanced-regex'), + + (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex), + (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'), + (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*', + String.Regex), + (r'\s+', Text), + (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|' + r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|' + r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|' + r'dump|each|endgrent|endhostent|endnetent|endprotoent|' + r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|' + r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|' + r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|' + r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|' + r'getppid|getpriority|getprotobyname|getprotobynumber|' + r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|' + r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|' + r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|' + r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|' + r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|' + r'opendir|ord|our|pack|package|pipe|pop|pos|printf|' + r'prototype|push|quotemeta|rand|read|readdir|' + r'readline|readlink|readpipe|recv|redo|ref|rename|require|' + r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|' + r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|' + r'setpgrp|setpriority|setprotoent|setpwent|setservent|' + r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|' + r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|' + r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|' + r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|' + r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|' + r'utime|values|vec|wait|waitpid|wantarray|warn|write' + r')\b', Name.Builtin), + (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo), + (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String), + (r'__END__', Comment.Preproc, 'end-part'), + (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global), + (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global), + (r'[$@%#]+', Name.Variable, 'varname'), + (r'0_?[0-7]+(_[0-7]+)*', Number.Oct), + (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex), + (r'0b[01]+(_[01]+)*', Number.Bin), + (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?', + Number.Float), + (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float), + (r'\d+(_\d+)*', Number.Integer), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), + (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick), + (r'<([^\s>]+)>', String.Regex), + (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'), + (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'), + (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'), + (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'), + (r'(q|qq|qw|qr|qx)([^a-zA-Z0-9])(.|\n)*?\2', String.Other), + (r'package\s+', Keyword, 'modulename'), + (r'sub\s+', Keyword, 'funcname'), + (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|' + r'!~|&&?|\|\||\.{1,3})', Operator), + (r'[-+/*%=<>&^|!\\~]=?', Operator), + (r'[\(\)\[\]:;,<>/\?\{\}]', Punctuation), # yes, there's no shortage + # of punctuation in Perl! + (r'(?=\w)', Name, 'name'), + ], + 'format': [ + (r'\.\n', String.Interpol, '#pop'), + (r'[^\n]*\n', String.Interpol), + ], + 'varname': [ + (r'\s+', Text), + (r'\{', Punctuation, '#pop'), # hash syntax? + (r'\)|,', Punctuation, '#pop'), # argument specifier + (r'\w+::', Name.Namespace), + (r'[\w:]+', Name.Variable, '#pop'), + ], + 'name': [ + (r'\w+::', Name.Namespace), + (r'[\w:]+', Name, '#pop'), + (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'), + (r'(?=\W)', Text, '#pop'), + ], + 'modulename': [ + (r'[a-zA-Z_]\w*', Name.Namespace, '#pop') + ], + 'funcname': [ + (r'[a-zA-Z_]\w*[\!\?]?', Name.Function), + (r'\s+', Text), + # argument declaration + (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Text)), + (r'.*?{', Punctuation, '#pop'), + (r';', Punctuation, '#pop'), + ], + 'cb-string': [ + (r'\\[\{\}\\]', String.Other), + (r'\\', String.Other), + (r'\{', String.Other, 'cb-string'), + (r'\}', String.Other, '#pop'), + (r'[^\{\}\\]+', String.Other) + ], + 'rb-string': [ + (r'\\[\(\)\\]', String.Other), + (r'\\', String.Other), + (r'\(', String.Other, 'rb-string'), + (r'\)', String.Other, '#pop'), + (r'[^\(\)]+', String.Other) + ], + 'sb-string': [ + (r'\\[\[\]\\]', String.Other), + (r'\\', String.Other), + (r'\[', String.Other, 'sb-string'), + (r'\]', String.Other, '#pop'), + (r'[^\[\]]+', String.Other) + ], + 'lt-string': [ + (r'\\[\<\>\\]', String.Other), + (r'\\', String.Other), + (r'\<', String.Other, 'lt-string'), + (r'\>', String.Other, '#pop'), + (r'[^\<\>]+', String.Other) + ], + 'end-part': [ + (r'.+', Comment.Preproc, '#pop') + ] + } + + def analyse_text(text): + if shebang_matches(text, r'perl'): + return True + if re.search('(?:my|our)\s+[$@%(]', text): + return 0.9 + + +class Perl6Lexer(ExtendedRegexLexer): + """ + For `Perl 6 `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'Perl6' + aliases = ['perl6', 'pl6'] + filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', + '*.6pm', '*.p6m', '*.pm6', '*.t'] + mimetypes = ['text/x-perl6', 'application/x-perl6'] + flags = re.MULTILINE | re.DOTALL | re.UNICODE + + PERL6_IDENTIFIER_RANGE = "['\w:-]" + + PERL6_KEYWORDS = ( + 'BEGIN', 'CATCH', 'CHECK', 'CONTROL', 'END', 'ENTER', 'FIRST', 'INIT', + 'KEEP', 'LAST', 'LEAVE', 'NEXT', 'POST', 'PRE', 'START', 'TEMP', + 'UNDO', 'as', 'assoc', 'async', 'augment', 'binary', 'break', 'but', + 'cached', 'category', 'class', 'constant', 'contend', 'continue', + 'copy', 'deep', 'default', 'defequiv', 'defer', 'die', 'do', 'else', + 'elsif', 'enum', 'equiv', 'exit', 'export', 'fail', 'fatal', 'for', + 'gather', 'given', 'goto', 'grammar', 'handles', 'has', 'if', 'inline', + 'irs', 'is', 'last', 'leave', 'let', 'lift', 'loop', 'looser', 'macro', + 'make', 'maybe', 'method', 'module', 'multi', 'my', 'next', 'of', + 'ofs', 'only', 'oo', 'ors', 'our', 'package', 'parsed', 'prec', + 'proto', 'readonly', 'redo', 'ref', 'regex', 'reparsed', 'repeat', + 'require', 'required', 'return', 'returns', 'role', 'rule', 'rw', + 'self', 'slang', 'state', 'sub', 'submethod', 'subset', 'supersede', + 'take', 'temp', 'tighter', 'token', 'trusts', 'try', 'unary', + 'unless', 'until', 'use', 'warn', 'when', 'where', 'while', 'will', + ) + + PERL6_BUILTINS = ( + 'ACCEPTS', 'HOW', 'REJECTS', 'VAR', 'WHAT', 'WHENCE', 'WHERE', 'WHICH', + 'WHO', 'abs', 'acos', 'acosec', 'acosech', 'acosh', 'acotan', 'acotanh', + 'all', 'any', 'approx', 'arity', 'asec', 'asech', 'asin', 'asinh', + 'assuming', 'atan', 'atan2', 'atanh', 'attr', 'bless', 'body', 'by', + 'bytes', 'caller', 'callsame', 'callwith', 'can', 'capitalize', 'cat', + 'ceiling', 'chars', 'chmod', 'chomp', 'chop', 'chr', 'chroot', + 'circumfix', 'cis', 'classify', 'clone', 'close', 'cmp_ok', 'codes', + 'comb', 'connect', 'contains', 'context', 'cos', 'cosec', 'cosech', + 'cosh', 'cotan', 'cotanh', 'count', 'defined', 'delete', 'diag', + 'dies_ok', 'does', 'e', 'each', 'eager', 'elems', 'end', 'eof', 'eval', + 'eval_dies_ok', 'eval_elsewhere', 'eval_lives_ok', 'evalfile', 'exists', + 'exp', 'first', 'flip', 'floor', 'flunk', 'flush', 'fmt', 'force_todo', + 'fork', 'from', 'getc', 'gethost', 'getlogin', 'getpeername', 'getpw', + 'gmtime', 'graphs', 'grep', 'hints', 'hyper', 'im', 'index', 'infix', + 'invert', 'is_approx', 'is_deeply', 'isa', 'isa_ok', 'isnt', 'iterator', + 'join', 'key', 'keys', 'kill', 'kv', 'lastcall', 'lazy', 'lc', 'lcfirst', + 'like', 'lines', 'link', 'lives_ok', 'localtime', 'log', 'log10', 'map', + 'max', 'min', 'minmax', 'name', 'new', 'nextsame', 'nextwith', 'nfc', + 'nfd', 'nfkc', 'nfkd', 'nok_error', 'nonce', 'none', 'normalize', 'not', + 'nothing', 'ok', 'once', 'one', 'open', 'opendir', 'operator', 'ord', + 'p5chomp', 'p5chop', 'pack', 'pair', 'pairs', 'pass', 'perl', 'pi', + 'pick', 'plan', 'plan_ok', 'polar', 'pop', 'pos', 'postcircumfix', + 'postfix', 'pred', 'prefix', 'print', 'printf', 'push', 'quasi', + 'quotemeta', 'rand', 're', 'read', 'readdir', 'readline', 'reduce', + 'reverse', 'rewind', 'rewinddir', 'rindex', 'roots', 'round', + 'roundrobin', 'run', 'runinstead', 'sameaccent', 'samecase', 'say', + 'sec', 'sech', 'sech', 'seek', 'shape', 'shift', 'sign', 'signature', + 'sin', 'sinh', 'skip', 'skip_rest', 'sleep', 'slurp', 'sort', 'splice', + 'split', 'sprintf', 'sqrt', 'srand', 'strand', 'subst', 'substr', 'succ', + 'sum', 'symlink', 'tan', 'tanh', 'throws_ok', 'time', 'times', 'to', + 'todo', 'trim', 'trim_end', 'trim_start', 'true', 'truncate', 'uc', + 'ucfirst', 'undef', 'undefine', 'uniq', 'unlike', 'unlink', 'unpack', + 'unpolar', 'unshift', 'unwrap', 'use_ok', 'value', 'values', 'vec', + 'version_lt', 'void', 'wait', 'want', 'wrap', 'write', 'zip', + ) + + PERL6_BUILTIN_CLASSES = ( + 'Abstraction', 'Any', 'AnyChar', 'Array', 'Associative', 'Bag', 'Bit', + 'Blob', 'Block', 'Bool', 'Buf', 'Byte', 'Callable', 'Capture', 'Char', 'Class', + 'Code', 'Codepoint', 'Comparator', 'Complex', 'Decreasing', 'Exception', + 'Failure', 'False', 'Grammar', 'Grapheme', 'Hash', 'IO', 'Increasing', + 'Int', 'Junction', 'KeyBag', 'KeyExtractor', 'KeyHash', 'KeySet', + 'KitchenSink', 'List', 'Macro', 'Mapping', 'Match', 'Matcher', 'Method', + 'Module', 'Num', 'Object', 'Ordered', 'Ordering', 'OrderingPair', + 'Package', 'Pair', 'Positional', 'Proxy', 'Range', 'Rat', 'Regex', + 'Role', 'Routine', 'Scalar', 'Seq', 'Set', 'Signature', 'Str', 'StrLen', + 'StrPos', 'Sub', 'Submethod', 'True', 'UInt', 'Undef', 'Version', 'Void', + 'Whatever', 'bit', 'bool', 'buf', 'buf1', 'buf16', 'buf2', 'buf32', + 'buf4', 'buf64', 'buf8', 'complex', 'int', 'int1', 'int16', 'int2', + 'int32', 'int4', 'int64', 'int8', 'num', 'rat', 'rat1', 'rat16', 'rat2', + 'rat32', 'rat4', 'rat64', 'rat8', 'uint', 'uint1', 'uint16', 'uint2', + 'uint32', 'uint4', 'uint64', 'uint8', 'utf16', 'utf32', 'utf8', + ) + + PERL6_OPERATORS = ( + 'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div', + 'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm', + 'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx', + '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^', + '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&', + 'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^', + '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^', + '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv', + '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so', + 'not', '<==', '==>', '<<==', '==>>', + ) + + # Perl 6 has a *lot* of possible bracketing characters + # this list was lifted from STD.pm6 (https://github.com/perl6/std) + PERL6_BRACKETS = { + u'\u0028': u'\u0029', u'\u003c': u'\u003e', u'\u005b': u'\u005d', + u'\u007b': u'\u007d', u'\u00ab': u'\u00bb', u'\u0f3a': u'\u0f3b', + u'\u0f3c': u'\u0f3d', u'\u169b': u'\u169c', u'\u2018': u'\u2019', + u'\u201a': u'\u2019', u'\u201b': u'\u2019', u'\u201c': u'\u201d', + u'\u201e': u'\u201d', u'\u201f': u'\u201d', u'\u2039': u'\u203a', + u'\u2045': u'\u2046', u'\u207d': u'\u207e', u'\u208d': u'\u208e', + u'\u2208': u'\u220b', u'\u2209': u'\u220c', u'\u220a': u'\u220d', + u'\u2215': u'\u29f5', u'\u223c': u'\u223d', u'\u2243': u'\u22cd', + u'\u2252': u'\u2253', u'\u2254': u'\u2255', u'\u2264': u'\u2265', + u'\u2266': u'\u2267', u'\u2268': u'\u2269', u'\u226a': u'\u226b', + u'\u226e': u'\u226f', u'\u2270': u'\u2271', u'\u2272': u'\u2273', + u'\u2274': u'\u2275', u'\u2276': u'\u2277', u'\u2278': u'\u2279', + u'\u227a': u'\u227b', u'\u227c': u'\u227d', u'\u227e': u'\u227f', + u'\u2280': u'\u2281', u'\u2282': u'\u2283', u'\u2284': u'\u2285', + u'\u2286': u'\u2287', u'\u2288': u'\u2289', u'\u228a': u'\u228b', + u'\u228f': u'\u2290', u'\u2291': u'\u2292', u'\u2298': u'\u29b8', + u'\u22a2': u'\u22a3', u'\u22a6': u'\u2ade', u'\u22a8': u'\u2ae4', + u'\u22a9': u'\u2ae3', u'\u22ab': u'\u2ae5', u'\u22b0': u'\u22b1', + u'\u22b2': u'\u22b3', u'\u22b4': u'\u22b5', u'\u22b6': u'\u22b7', + u'\u22c9': u'\u22ca', u'\u22cb': u'\u22cc', u'\u22d0': u'\u22d1', + u'\u22d6': u'\u22d7', u'\u22d8': u'\u22d9', u'\u22da': u'\u22db', + u'\u22dc': u'\u22dd', u'\u22de': u'\u22df', u'\u22e0': u'\u22e1', + u'\u22e2': u'\u22e3', u'\u22e4': u'\u22e5', u'\u22e6': u'\u22e7', + u'\u22e8': u'\u22e9', u'\u22ea': u'\u22eb', u'\u22ec': u'\u22ed', + u'\u22f0': u'\u22f1', u'\u22f2': u'\u22fa', u'\u22f3': u'\u22fb', + u'\u22f4': u'\u22fc', u'\u22f6': u'\u22fd', u'\u22f7': u'\u22fe', + u'\u2308': u'\u2309', u'\u230a': u'\u230b', u'\u2329': u'\u232a', + u'\u23b4': u'\u23b5', u'\u2768': u'\u2769', u'\u276a': u'\u276b', + u'\u276c': u'\u276d', u'\u276e': u'\u276f', u'\u2770': u'\u2771', + u'\u2772': u'\u2773', u'\u2774': u'\u2775', u'\u27c3': u'\u27c4', + u'\u27c5': u'\u27c6', u'\u27d5': u'\u27d6', u'\u27dd': u'\u27de', + u'\u27e2': u'\u27e3', u'\u27e4': u'\u27e5', u'\u27e6': u'\u27e7', + u'\u27e8': u'\u27e9', u'\u27ea': u'\u27eb', u'\u2983': u'\u2984', + u'\u2985': u'\u2986', u'\u2987': u'\u2988', u'\u2989': u'\u298a', + u'\u298b': u'\u298c', u'\u298d': u'\u298e', u'\u298f': u'\u2990', + u'\u2991': u'\u2992', u'\u2993': u'\u2994', u'\u2995': u'\u2996', + u'\u2997': u'\u2998', u'\u29c0': u'\u29c1', u'\u29c4': u'\u29c5', + u'\u29cf': u'\u29d0', u'\u29d1': u'\u29d2', u'\u29d4': u'\u29d5', + u'\u29d8': u'\u29d9', u'\u29da': u'\u29db', u'\u29f8': u'\u29f9', + u'\u29fc': u'\u29fd', u'\u2a2b': u'\u2a2c', u'\u2a2d': u'\u2a2e', + u'\u2a34': u'\u2a35', u'\u2a3c': u'\u2a3d', u'\u2a64': u'\u2a65', + u'\u2a79': u'\u2a7a', u'\u2a7d': u'\u2a7e', u'\u2a7f': u'\u2a80', + u'\u2a81': u'\u2a82', u'\u2a83': u'\u2a84', u'\u2a8b': u'\u2a8c', + u'\u2a91': u'\u2a92', u'\u2a93': u'\u2a94', u'\u2a95': u'\u2a96', + u'\u2a97': u'\u2a98', u'\u2a99': u'\u2a9a', u'\u2a9b': u'\u2a9c', + u'\u2aa1': u'\u2aa2', u'\u2aa6': u'\u2aa7', u'\u2aa8': u'\u2aa9', + u'\u2aaa': u'\u2aab', u'\u2aac': u'\u2aad', u'\u2aaf': u'\u2ab0', + u'\u2ab3': u'\u2ab4', u'\u2abb': u'\u2abc', u'\u2abd': u'\u2abe', + u'\u2abf': u'\u2ac0', u'\u2ac1': u'\u2ac2', u'\u2ac3': u'\u2ac4', + u'\u2ac5': u'\u2ac6', u'\u2acd': u'\u2ace', u'\u2acf': u'\u2ad0', + u'\u2ad1': u'\u2ad2', u'\u2ad3': u'\u2ad4', u'\u2ad5': u'\u2ad6', + u'\u2aec': u'\u2aed', u'\u2af7': u'\u2af8', u'\u2af9': u'\u2afa', + u'\u2e02': u'\u2e03', u'\u2e04': u'\u2e05', u'\u2e09': u'\u2e0a', + u'\u2e0c': u'\u2e0d', u'\u2e1c': u'\u2e1d', u'\u2e20': u'\u2e21', + u'\u3008': u'\u3009', u'\u300a': u'\u300b', u'\u300c': u'\u300d', + u'\u300e': u'\u300f', u'\u3010': u'\u3011', u'\u3014': u'\u3015', + u'\u3016': u'\u3017', u'\u3018': u'\u3019', u'\u301a': u'\u301b', + u'\u301d': u'\u301e', u'\ufd3e': u'\ufd3f', u'\ufe17': u'\ufe18', + u'\ufe35': u'\ufe36', u'\ufe37': u'\ufe38', u'\ufe39': u'\ufe3a', + u'\ufe3b': u'\ufe3c', u'\ufe3d': u'\ufe3e', u'\ufe3f': u'\ufe40', + u'\ufe41': u'\ufe42', u'\ufe43': u'\ufe44', u'\ufe47': u'\ufe48', + u'\ufe59': u'\ufe5a', u'\ufe5b': u'\ufe5c', u'\ufe5d': u'\ufe5e', + u'\uff08': u'\uff09', u'\uff1c': u'\uff1e', u'\uff3b': u'\uff3d', + u'\uff5b': u'\uff5d', u'\uff5f': u'\uff60', u'\uff62': u'\uff63', + } + + def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''): + if boundary_regex_fragment is None: + return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \ + suffix + r')\b' + else: + return r'(? 0: + next_open_pos = text.find(opening_chars, search_pos + n_chars) + next_close_pos = text.find(closing_chars, search_pos + n_chars) + + if next_close_pos == -1: + next_close_pos = len(text) + nesting_level = 0 + elif next_open_pos != -1 and next_open_pos < next_close_pos: + nesting_level += 1 + search_pos = next_open_pos + else: # next_close_pos < next_open_pos + nesting_level -= 1 + search_pos = next_close_pos + + end_pos = next_close_pos + + if end_pos < 0: # if we didn't find a closer, just highlight the + # rest of the text in this class + end_pos = len(text) + + if adverbs is not None and re.search(r':to\b', adverbs): + heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos] + end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) + + r'\s*$', text[end_pos:], re.MULTILINE) + + if end_heredoc: + end_pos += end_heredoc.end() + else: + end_pos = len(text) + + yield match.start(), token_class, text[match.start():end_pos + n_chars] + context.pos = end_pos + n_chars + + return callback + + def opening_brace_callback(lexer, match, context): + stack = context.stack + + yield match.start(), Text, context.text[match.start():match.end()] + context.pos = match.end() + + # if we encounter an opening brace and we're one level + # below a token state, it means we need to increment + # the nesting level for braces so we know later when + # we should return to the token rules. + if len(stack) > 2 and stack[-2] == 'token': + context.perl6_token_nesting_level += 1 + + def closing_brace_callback(lexer, match, context): + stack = context.stack + + yield match.start(), Text, context.text[match.start():match.end()] + context.pos = match.end() + + # if we encounter a free closing brace and we're one level + # below a token state, it means we need to check the nesting + # level to see if we need to return to the token state. + if len(stack) > 2 and stack[-2] == 'token': + context.perl6_token_nesting_level -= 1 + if context.perl6_token_nesting_level == 0: + stack.pop() + + def embedded_perl6_callback(lexer, match, context): + context.perl6_token_nesting_level = 1 + yield match.start(), Text, context.text[match.start():match.end()] + context.pos = match.end() + context.stack.append('root') + + # If you're modifying these rules, be careful if you need to process '{' or '}' + # characters. We have special logic for processing these characters (due to the fact + # that you can nest Perl 6 code in regex blocks), so if you need to process one of + # them, make sure you also process the corresponding one! + tokens = { + 'common': [ + (r'#[`|=](?P(?P[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)', brackets_callback(Comment.Multiline)), + (r'#[^\n]*$', Comment.Singleline), + (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline), + (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline), + (r'^=.*?\n\s*?\n', Comment.Multiline), + (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)', + bygroups(Keyword, Name), 'token-sym-brackets'), + (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + ')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?', bygroups(Keyword, Name), 'pre-token'), + # deal with a special case in the Perl 6 grammar (role q { ... }) + (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Text, Name, Text)), + (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword), + (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'), Name.Builtin), + (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin), + # copied from PerlLexer + (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', + Name.Variable), + (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global), + (r'::\?\w+', Name.Variable.Global), + (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + u'+(?:<<.*?>>|<.*?>|«.*?»)*', + Name.Variable.Global), + (r'\$(?:<.*?>)+', Name.Variable), + (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z:\s])(?P=first_char)*)', brackets_callback(String)), + # copied from PerlLexer + (r'0_?[0-7]+(_[0-7]+)*', Number.Oct), + (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex), + (r'0b[01]+(_[01]+)*', Number.Bin), + (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?', + Number.Float), + (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float), + (r'\d+(_\d+)*', Number.Integer), + (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex), + (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex), + (r'm\w+(?=\()', Name), + (r'(?:m|ms|rx)\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z_:\s])(?P=first_char)*)', brackets_callback(String.Regex)), + (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/', + String.Regex), + (r'<[^\s=].*?\S>', String), + (_build_word_match(PERL6_OPERATORS), Operator), + (r'[0-9a-zA-Z_]' + PERL6_IDENTIFIER_RANGE + '*', Name), + (r"'(\\\\|\\[^\\]|[^'\\])*'", String), + (r'"(\\\\|\\[^\\]|[^"\\])*"', String), + ], + 'root': [ + include('common'), + (r'\{', opening_brace_callback), + (r'\}', closing_brace_callback), + (r'.+?', Text), + ], + 'pre-token': [ + include('common'), + (r'\{', Text, ('#pop', 'token')), + (r'.+?', Text), + ], + 'token-sym-brackets': [ + (r'(?P(?P[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)', brackets_callback(Name), ('#pop', 'pre-token')), + default(('#pop', 'pre-token')), + ], + 'token': [ + (r'}', Text, '#pop'), + (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)), + # make sure that quotes in character classes aren't treated as strings + (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex), + # make sure that '#' characters in quotes aren't treated as comments + (r"(?my|our)\s+)?(?:module|class|role|enum|grammar)', line) + if class_decl: + if saw_perl_decl or class_decl.group('scope') is not None: + return True + rating = 0.05 + continue + break + + return rating + + def __init__(self, **options): + super(Perl6Lexer, self).__init__(**options) + self.encoding = options.get('encoding', 'utf-8') diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py index aea29355..2a29aadb 100644 --- a/pygments/lexers/python.py +++ b/pygments/lexers/python.py @@ -11,13 +11,436 @@ import re -from pygments.lexer import RegexLexer, include, bygroups, using, \ - this, inherit, default, words, combined -from pygments.util import get_bool_opt +from pygments.lexer import Lexer, RegexLexer, include, bygroups, using, \ + default, words, combined, do_insertions +from pygments.util import get_bool_opt, shebang_matches from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Error + Number, Punctuation, Generic, Other, Error +from pygments import unistring as uni -__all__ = ['CythonLexer'] +__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', + 'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer', + 'HyLexer', 'DgLexer'] + +line_re = re.compile('.*?\n') + + +class PythonLexer(RegexLexer): + """ + For `Python `_ source code. + """ + + name = 'Python' + aliases = ['python', 'py', 'sage'] + filenames = ['*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'] + mimetypes = ['text/x-python', 'application/x-python'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), + (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), + (r'[^\S\n]+', Text), + (r'#.*$', Comment), + (r'[]{}:(),;[]', Punctuation), + (r'\\\n', Text), + (r'\\', Text), + (r'(in|is|and|or|not)\b', Operator.Word), + (r'!=|==|<<|>>|[-~+/*%=<>&^|.]', Operator), + include('keywords'), + (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'), + (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'), + (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), + 'fromimport'), + (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text), + 'import'), + include('builtins'), + include('backtick'), + ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'), + ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'), + ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'), + ('[uU]?"""', String, combined('stringescape', 'tdqs')), + ("[uU]?'''", String, combined('stringescape', 'tsqs')), + ('[uU]?"', String, combined('stringescape', 'dqs')), + ("[uU]?'", String, combined('stringescape', 'sqs')), + include('name'), + include('numbers'), + ], + 'keywords': [ + (r'(assert|break|continue|del|elif|else|except|exec|' + r'finally|for|global|if|lambda|pass|print|raise|' + r'return|try|while|yield(\s+from)?|as|with)\b', Keyword), + ], + 'builtins': [ + (r'(?`_ source code (version 3.0). + + .. versionadded:: 0.10 + """ + + name = 'Python 3' + aliases = ['python3', 'py3'] + filenames = [] # Nothing until Python 3 gets widespread + mimetypes = ['text/x-python3', 'application/x-python3'] + + flags = re.MULTILINE | re.UNICODE + + uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue) + + tokens = PythonLexer.tokens.copy() + tokens['keywords'] = [ + (r'(assert|break|continue|del|elif|else|except|' + r'finally|for|global|if|lambda|pass|raise|nonlocal|' + r'return|try|while|yield(\s+from)?|as|with|True|False|None)\b', + Keyword), + ] + tokens['builtins'] = [ + (r'(?>> a = 'foo' + >>> print a + foo + >>> 1 / 0 + Traceback (most recent call last): + File "", line 1, in + ZeroDivisionError: integer division or modulo by zero + + Additional options: + + `python3` + Use Python 3 lexer for code. Default is ``False``. + + .. versionadded:: 1.0 + """ + name = 'Python console session' + aliases = ['pycon'] + mimetypes = ['text/x-python-doctest'] + + def __init__(self, **options): + self.python3 = get_bool_opt(options, 'python3', False) + Lexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + if self.python3: + pylexer = Python3Lexer(**self.options) + tblexer = Python3TracebackLexer(**self.options) + else: + pylexer = PythonLexer(**self.options) + tblexer = PythonTracebackLexer(**self.options) + + curcode = '' + insertions = [] + curtb = '' + tbindex = 0 + tb = 0 + for match in line_re.finditer(text): + line = match.group() + if line.startswith(u'>>> ') or line.startswith(u'... '): + tb = 0 + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:4])])) + curcode += line[4:] + elif line.rstrip() == u'...' and not tb: + # only a new >>> prompt can end an exception block + # otherwise an ellipsis in place of the traceback frames + # will be mishandled + insertions.append((len(curcode), + [(0, Generic.Prompt, u'...')])) + curcode += line[3:] + else: + if curcode: + for item in do_insertions( + insertions, pylexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + if (line.startswith(u'Traceback (most recent call last):') or + re.match(u' File "[^"]+", line \\d+\\n$', line)): + tb = 1 + curtb = line + tbindex = match.start() + elif line == 'KeyboardInterrupt\n': + yield match.start(), Name.Class, line + elif tb: + curtb += line + if not (line.startswith(' ') or line.strip() == u'...'): + tb = 0 + for i, t, v in tblexer.get_tokens_unprocessed(curtb): + yield tbindex+i, t, v + else: + yield match.start(), Generic.Output, line + if curcode: + for item in do_insertions(insertions, + pylexer.get_tokens_unprocessed(curcode)): + yield item + + +class PythonTracebackLexer(RegexLexer): + """ + For Python tracebacks. + + .. versionadded:: 0.7 + """ + + name = 'Python Traceback' + aliases = ['pytb'] + filenames = ['*.pytb'] + mimetypes = ['text/x-python-traceback'] + + tokens = { + 'root': [ + (r'^Traceback \(most recent call last\):\n', + Generic.Traceback, 'intb'), + # SyntaxError starts with this. + (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), + (r'^.*\n', Other), + ], + 'intb': [ + (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), + (r'^( File )("[^"]+")(, line )(\d+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text)), + (r'^( )(.+)(\n)', + bygroups(Text, using(PythonLexer), Text)), + (r'^([ \t]*)(\.\.\.)(\n)', + bygroups(Text, Comment, Text)), # for doctests... + (r'^([^:]+)(: )(.+)(\n)', + bygroups(Generic.Error, Text, Name, Text), '#pop'), + (r'^([a-zA-Z_]\w*)(:?\n)', + bygroups(Generic.Error, Text), '#pop') + ], + } + + +class Python3TracebackLexer(RegexLexer): + """ + For Python 3.0 tracebacks, with support for chained exceptions. + + .. versionadded:: 1.0 + """ + + name = 'Python 3.0 Traceback' + aliases = ['py3tb'] + filenames = ['*.py3tb'] + mimetypes = ['text/x-python3-traceback'] + + tokens = { + 'root': [ + (r'\n', Text), + (r'^Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'), + (r'^During handling of the above exception, another ' + r'exception occurred:\n\n', Generic.Traceback), + (r'^The above exception was the direct cause of the ' + r'following exception:\n\n', Generic.Traceback), + (r'^(?= File "[^"]+", line \d+)', Generic.Traceback, 'intb'), + ], + 'intb': [ + (r'^( File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text, Name, Text)), + (r'^( File )("[^"]+")(, line )(\d+)(\n)', + bygroups(Text, Name.Builtin, Text, Number, Text)), + (r'^( )(.+)(\n)', + bygroups(Text, using(Python3Lexer), Text)), + (r'^([ \t]*)(\.\.\.)(\n)', + bygroups(Text, Comment, Text)), # for doctests... + (r'^([^:]+)(: )(.+)(\n)', + bygroups(Generic.Error, Text, Name, Text), '#pop'), + (r'^([a-zA-Z_]\w*)(:?\n)', + bygroups(Generic.Error, Text), '#pop') + ], + } class CythonLexer(RegexLexer): @@ -194,3 +617,208 @@ class CythonLexer(RegexLexer): include('nl') ], } + + +class HyLexer(RegexLexer): + """ + Lexer for `Hy `_ source code. + + .. versionadded:: 2.0 + """ + name = 'Hy' + aliases = ['hylang'] + filenames = ['*.hy'] + mimetypes = ['text/x-hy', 'application/x-hy'] + + special_forms = [ + 'cond', 'for', '->', '->>', 'car', + 'cdr', 'first', 'rest', 'let', 'when', 'unless', + 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator', + ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in', + 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=', + 'foreach', 'while', + 'eval-and-compile', 'eval-when-compile' + ] + + declarations = [ + 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' + ] + + hy_builtins = [] + + hy_core = [ + 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc', + 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?', + 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat', + 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?' + ] + + builtins = hy_builtins + hy_core + + # valid names for identifiers + # well, names can only not consist fully of numbers + # but this should be good enough for now + valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' + + def _multi_escape(entries): + return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries)) + + tokens = { + 'root': [ + # the comments - always starting with semicolon + # and going to the end of the line + (r';.*$', Comment.Single), + + # whitespaces - usually not relevant + (r'[,\s]+', Text), + + # numbers + (r'-?\d+\.\d+', Number.Float), + (r'-?\d+', Number.Integer), + (r'0[0-7]+j?', Number.Oct), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + + # strings, symbols and characters + (r'"(\\\\|\\"|[^"])*"', String), + (r"'" + valid_name, String.Symbol), + (r"\\(.|[a-z]+)", String.Char), + (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), + (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), + + # keywords + (r'::?' + valid_name, String.Symbol), + + # special operators + (r'~@|[`\'#^~&@]', Operator), + + include('py-keywords'), + include('py-builtins'), + + # highlight the special forms + (_multi_escape(special_forms), Keyword), + + # Technically, only the special forms are 'keywords'. The problem + # is that only treating them as keywords means that things like + # 'defn' and 'ns' need to be highlighted as builtins. This is ugly + # and weird for most styles. So, as a compromise we're going to + # highlight them as Keyword.Declarations. + (_multi_escape(declarations), Keyword.Declaration), + + # highlight the builtins + (_multi_escape(builtins), Name.Builtin), + + # the remaining functions + (r'(?<=\()' + valid_name, Name.Function), + + # find the remaining variables + (valid_name, Name.Variable), + + # Hy accepts vector notation + (r'(\[|\])', Punctuation), + + # Hy accepts map notation + (r'(\{|\})', Punctuation), + + # the famous parentheses! + (r'(\(|\))', Punctuation), + + ], + 'py-keywords': PythonLexer.tokens['keywords'], + 'py-builtins': PythonLexer.tokens['builtins'], + } + + def analyse_text(text): + if '(import ' in text or '(defn ' in text: + return 0.9 + + +class DgLexer(RegexLexer): + """ + Lexer for `dg `_, + a functional and object-oriented programming language + running on the CPython 3 VM. + + .. versionadded:: 1.6 + """ + name = 'dg' + aliases = ['dg'] + filenames = ['*.dg'] + mimetypes = ['text/x-dg'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'#.*?$', Comment.Single), + + (r'(?i)0b[01]+', Number.Bin), + (r'(?i)0o[0-7]+', Number.Oct), + (r'(?i)0x[0-9a-f]+', Number.Hex), + (r'(?i)[+-]?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?j?', Number.Float), + (r'(?i)[+-]?[0-9]+e[+-]?\d+j?', Number.Float), + (r'(?i)[+-]?[0-9]+j?', Number.Integer), + + (r"(?i)(br|r?b?)'''", String, combined('stringescape', 'tsqs', 'string')), + (r'(?i)(br|r?b?)"""', String, combined('stringescape', 'tdqs', 'string')), + (r"(?i)(br|r?b?)'", String, combined('stringescape', 'sqs', 'string')), + (r'(?i)(br|r?b?)"', String, combined('stringescape', 'dqs', 'string')), + + (r"`\w+'*`", Operator), + (r'\b(and|in|is|or|where)\b', Operator.Word), + (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator), + + (r"(?`_ source code. + """ + + name = 'Ruby' + aliases = ['rb', 'ruby', 'duby'] + filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', + '*.rbx', '*.duby'] + mimetypes = ['text/x-ruby', 'application/x-ruby'] + + flags = re.DOTALL | re.MULTILINE + + def heredoc_callback(self, match, ctx): + # okay, this is the hardest part of parsing Ruby... + # match: 1 = <<-?, 2 = quote? 3 = name 4 = quote? 5 = rest of line + + start = match.start(1) + yield start, Operator, match.group(1) # <<-? + yield match.start(2), String.Heredoc, match.group(2) # quote ", ', ` + yield match.start(3), Name.Constant, match.group(3) # heredoc name + yield match.start(4), String.Heredoc, match.group(4) # quote again + + heredocstack = ctx.__dict__.setdefault('heredocstack', []) + outermost = not bool(heredocstack) + heredocstack.append((match.group(1) == '<<-', match.group(3))) + + ctx.pos = match.start(5) + ctx.end = match.end(5) + # this may find other heredocs + for i, t, v in self.get_tokens_unprocessed(context=ctx): + yield i, t, v + ctx.pos = match.end() + + if outermost: + # this is the outer heredoc again, now we can process them all + for tolerant, hdname in heredocstack: + lines = [] + for match in line_re.finditer(ctx.text, ctx.pos): + if tolerant: + check = match.group().strip() + else: + check = match.group().rstrip() + if check == hdname: + for amatch in lines: + yield amatch.start(), String.Heredoc, amatch.group() + yield match.start(), Name.Constant, match.group() + ctx.pos = match.end() + break + else: + lines.append(match) + else: + # end of heredoc not found -- error! + for amatch in lines: + yield amatch.start(), Error, amatch.group() + ctx.end = len(ctx.text) + del heredocstack[:] + + def gen_rubystrings_rules(): + def intp_regex_callback(self, match, ctx): + yield match.start(1), String.Regex, match.group(1) # begin + nctx = LexerContext(match.group(3), 0, ['interpolated-regex']) + for i, t, v in self.get_tokens_unprocessed(context=nctx): + yield match.start(3)+i, t, v + yield match.start(4), String.Regex, match.group(4) # end[mixounse]* + ctx.pos = match.end() + + def intp_string_callback(self, match, ctx): + yield match.start(1), String.Other, match.group(1) + nctx = LexerContext(match.group(3), 0, ['interpolated-string']) + for i, t, v in self.get_tokens_unprocessed(context=nctx): + yield match.start(3)+i, t, v + yield match.start(4), String.Other, match.group(4) # end + ctx.pos = match.end() + + states = {} + states['strings'] = [ + # easy ones + (r'\:@{0,2}([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|' + r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', String.Symbol), + (r":'(\\\\|\\'|[^'])*'", String.Symbol), + (r"'(\\\\|\\'|[^'])*'", String.Single), + (r':"', String.Symbol, 'simple-sym'), + (r'([a-zA-Z_]\w*)(:)(?!:)', + bygroups(String.Symbol, Punctuation)), # Since Ruby 1.9 + (r'"', String.Double, 'simple-string'), + (r'(?', 'ab'): + states[name+'-intp-string'] = [ + (r'\\[\\' + lbrace + rbrace + ']', String.Other), + (r'(?! + states['strings'] += [ + # %r regex + (r'(%r([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)', + intp_regex_callback), + # regular fancy strings with qsw + (r'%[qsw]([^a-zA-Z0-9])((?:\\\1|(?!\1).)*)\1', String.Other), + (r'(%[QWx]([^a-zA-Z0-9]))((?:\\\2|(?!\2).)*)(\2)', + intp_string_callback), + # special forms of fancy strings after operators or + # in method calls with braces + (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', + bygroups(Text, String.Other, None)), + # and because of fixed width lookbehinds the whole thing a + # second time for line startings... + (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)', + bygroups(Text, String.Other, None)), + # all regular fancy strings without qsw + (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)', + intp_string_callback), + ] + + return states + + tokens = { + 'root': [ + (r'#.*?$', Comment.Single), + (r'=begin\s.*?\n=end.*?$', Comment.Multiline), + # keywords + (r'(BEGIN|END|alias|begin|break|case|defined\?|' + r'do|else|elsif|end|ensure|for|if|in|next|redo|' + r'rescue|raise|retry|return|super|then|undef|unless|until|when|' + r'while|yield)\b', Keyword), + # start of function, class and module names + (r'(module)(\s+)([a-zA-Z_]\w*' + r'(?:::[a-zA-Z_]\w*)*)', + bygroups(Keyword, Text, Name.Namespace)), + (r'(def)(\s+)', bygroups(Keyword, Text), 'funcname'), + (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'), + (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), + # special methods + (r'(initialize|new|loop|include|extend|raise|attr_reader|' + r'attr_writer|attr_accessor|attr|catch|throw|private|' + r'module_function|public|protected|true|false|nil)\b', + Keyword.Pseudo), + (r'(not|and|or)\b', Operator.Word), + (r'(autoload|block_given|const_defined|eql|equal|frozen|include|' + r'instance_of|is_a|iterator|kind_of|method_defined|nil|' + r'private_method_defined|protected_method_defined|' + r'public_method_defined|respond_to|tainted)\?', Name.Builtin), + (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin), + (r'(?~!:])|' + r'(?<=(?:\s|;)when\s)|' + r'(?<=(?:\s|;)or\s)|' + r'(?<=(?:\s|;)and\s)|' + r'(?<=(?:\s|;|\.)index\s)|' + r'(?<=(?:\s|;|\.)scan\s)|' + r'(?<=(?:\s|;|\.)sub\s)|' + r'(?<=(?:\s|;|\.)sub!\s)|' + r'(?<=(?:\s|;|\.)gsub\s)|' + r'(?<=(?:\s|;|\.)gsub!\s)|' + r'(?<=(?:\s|;|\.)match\s)|' + r'(?<=(?:\s|;)if\s)|' + r'(?<=(?:\s|;)elsif\s)|' + r'(?<=^when\s)|' + r'(?<=^index\s)|' + r'(?<=^scan\s)|' + r'(?<=^sub\s)|' + r'(?<=^gsub\s)|' + r'(?<=^sub!\s)|' + r'(?<=^gsub!\s)|' + r'(?<=^match\s)|' + r'(?<=^if\s)|' + r'(?<=^elsif\s)' + r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'), + # multiline regex (in method calls or subscripts) + (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'), + # multiline regex (this time the funny no whitespace rule) + (r'(\s+)(/)(?![\s=])', bygroups(Text, String.Regex), + 'multiline-regex'), + # lex numbers and ignore following regular expressions which + # are division operators in fact (grrrr. i hate that. any + # better ideas?) + # since pygments 0.7 we also eat a "?" operator after numbers + # so that the char operator does not work. Chars are not allowed + # there so that you can use the ternary operator. + # stupid example: + # x>=0?n[x]:"" + (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?', + bygroups(Number.Oct, Text, Operator)), + (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?', + bygroups(Number.Hex, Text, Operator)), + (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?', + bygroups(Number.Bin, Text, Operator)), + (r'([\d]+(?:_\d+)*)(\s*)([/?])?', + bygroups(Number.Integer, Text, Operator)), + # Names + (r'@@[a-zA-Z_]\w*', Name.Variable.Class), + (r'@[a-zA-Z_]\w*', Name.Variable.Instance), + (r'\$\w+', Name.Variable.Global), + (r'\$[!@&`\'+~=/\\,;.<>_*$?:"]', Name.Variable.Global), + (r'\$-[0adFiIlpvw]', Name.Variable.Global), + (r'::', Operator), + include('strings'), + # chars + (r'\?(\\[MC]-)*' # modifiers + r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)' + r'(?!\w)', + String.Char), + (r'[A-Z]\w+', Name.Constant), + # this is needed because ruby attributes can look + # like keywords (class) or like this: ` ?!? + (r'(\.|::)([a-zA-Z_]\w*[\!\?]?|[*%&^`~+-/\[<>=])', + bygroups(Operator, Name)), + (r'[a-zA-Z_]\w*[\!\?]?', Name), + (r'(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|' + r'!~|&&?|\|\||\.{1,3})', Operator), + (r'[-+/*%=<>&!^|~]=?', Operator), + (r'[(){};,/?:\\]', Punctuation), + (r'\s+', Text) + ], + 'funcname': [ + (r'\(', Punctuation, 'defexpr'), + (r'(?:([a-zA-Z_]\w*)(\.))?' + r'([a-zA-Z_]\w*[\!\?]?|\*\*?|[-+]@?|' + r'[/%&|^`~]|\[\]=?|<<|>>|<=?>|>=?|===?)', + bygroups(Name.Class, Operator, Name.Function), '#pop'), + default('#pop') + ], + 'classname': [ + (r'\(', Punctuation, 'defexpr'), + (r'<<', Operator, '#pop'), + (r'[A-Z_]\w*', Name.Class, '#pop'), + default('#pop') + ], + 'defexpr': [ + (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'), + (r'\(', Operator, '#push'), + include('root') + ], + 'in-intp': [ + ('}', String.Interpol, '#pop'), + include('root'), + ], + 'string-intp': [ + (r'#{', String.Interpol, 'in-intp'), + (r'#@@?[a-zA-Z_]\w*', String.Interpol), + (r'#\$[a-zA-Z_]\w*', String.Interpol) + ], + 'string-intp-escaped': [ + include('string-intp'), + (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})', + String.Escape) + ], + 'interpolated-regex': [ + include('string-intp'), + (r'[\\#]', String.Regex), + (r'[^\\#]+', String.Regex), + ], + 'interpolated-string': [ + include('string-intp'), + (r'[\\#]', String.Other), + (r'[^\\#]+', String.Other), + ], + 'multiline-regex': [ + include('string-intp'), + (r'\\\\', String.Regex), + (r'\\/', String.Regex), + (r'[\\#]', String.Regex), + (r'[^\\/#]+', String.Regex), + (r'/[mixounse]*', String.Regex, '#pop'), + ], + 'end-part': [ + (r'.+', Comment.Preproc, '#pop') + ] + } + tokens.update(gen_rubystrings_rules()) + + def analyse_text(text): + return shebang_matches(text, r'ruby(1\.\d)?') + + +class RubyConsoleLexer(Lexer): + """ + For Ruby interactive console (**irb**) output like: + + .. sourcecode:: rbcon + + irb(main):001:0> a = 1 + => 1 + irb(main):002:0> puts a + 1 + => nil + """ + name = 'Ruby irb session' + aliases = ['rbcon', 'irb'] + mimetypes = ['text/x-ruby-shellsession'] + + _prompt_re = re.compile('irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] ' + '|>> |\?> ') + + def get_tokens_unprocessed(self, text): + rblexer = RubyLexer(**self.options) + + curcode = '' + insertions = [] + for match in line_re.finditer(text): + line = match.group() + m = self._prompt_re.match(line) + if m is not None: + end = m.end() + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:end])])) + curcode += line[end:] + else: + if curcode: + for item in do_insertions( + insertions, rblexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + yield match.start(), Generic.Output, line + if curcode: + for item in do_insertions( + insertions, rblexer.get_tokens_unprocessed(curcode)): + yield item + + +class FancyLexer(RegexLexer): + """ + Pygments Lexer For `Fancy `_. + + Fancy is a self-hosted, pure object-oriented, dynamic, + class-based, concurrent general-purpose programming language + running on Rubinius, the Ruby VM. + + .. versionadded:: 1.5 + """ + name = 'Fancy' + filenames = ['*.fy', '*.fancypack'] + aliases = ['fancy', 'fy'] + mimetypes = ['text/x-fancysrc'] + + tokens = { + # copied from PerlLexer: + 'balanced-regex': [ + (r'/(\\\\|\\/|[^/])*/[egimosx]*', String.Regex, '#pop'), + (r'!(\\\\|\\!|[^!])*![egimosx]*', String.Regex, '#pop'), + (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'), + (r'{(\\\\|\\}|[^}])*}[egimosx]*', String.Regex, '#pop'), + (r'<(\\\\|\\>|[^>])*>[egimosx]*', String.Regex, '#pop'), + (r'\[(\\\\|\\\]|[^\]])*\][egimosx]*', String.Regex, '#pop'), + (r'\((\\\\|\\\)|[^\)])*\)[egimosx]*', String.Regex, '#pop'), + (r'@(\\\\|\\\@|[^\@])*@[egimosx]*', String.Regex, '#pop'), + (r'%(\\\\|\\\%|[^\%])*%[egimosx]*', String.Regex, '#pop'), + (r'\$(\\\\|\\\$|[^\$])*\$[egimosx]*', String.Regex, '#pop'), + ], + 'root': [ + (r'\s+', Text), + + # balanced delimiters (copied from PerlLexer): + (r's{(\\\\|\\}|[^}])*}\s*', String.Regex, 'balanced-regex'), + (r's<(\\\\|\\>|[^>])*>\s*', String.Regex, 'balanced-regex'), + (r's\[(\\\\|\\\]|[^\]])*\]\s*', String.Regex, 'balanced-regex'), + (r's\((\\\\|\\\)|[^\)])*\)\s*', String.Regex, 'balanced-regex'), + (r'm?/(\\\\|\\/|[^/\n])*/[gcimosx]*', String.Regex), + (r'm(?=[/!\\{<\[\(@%\$])', String.Regex, 'balanced-regex'), + + # Comments + (r'#(.*?)\n', Comment.Single), + # Symbols + (r'\'([^\'\s\[\]\(\)\{\}]+|\[\])', String.Symbol), + # Multi-line DoubleQuotedString + (r'"""(\\\\|\\"|[^"])*"""', String), + # DoubleQuotedString + (r'"(\\\\|\\"|[^"])*"', String), + # keywords + (r'(def|class|try|catch|finally|retry|return|return_local|match|' + r'case|->|=>)\b', Keyword), + # constants + (r'(self|super|nil|false|true)\b', Name.Constant), + (r'[(){};,/?\|:\\]', Punctuation), + # names + (r'(Object|Array|Hash|Directory|File|Class|String|Number|' + r'Enumerable|FancyEnumerable|Block|TrueClass|NilClass|' + r'FalseClass|Tuple|Symbol|Stack|Set|FancySpec|Method|Package|' + r'Range)\b', Name.Builtin), + # functions + (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function), + # operators, must be below functions + (r'[-+*/~,<>=&!?%^\[\]\.$]+', Operator), + ('[A-Z]\w*', Name.Constant), + ('@[a-zA-Z_]\w*', Name.Variable.Instance), + ('@@[a-zA-Z_]\w*', Name.Variable.Class), + ('@@?', Operator), + ('[a-zA-Z_]\w*', Name), + # numbers - / checks are necessary to avoid mismarking regexes, + # see comment in RubyLexer + (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?', + bygroups(Number.Oct, Text, Operator)), + (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?', + bygroups(Number.Hex, Text, Operator)), + (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?', + bygroups(Number.Bin, Text, Operator)), + (r'([\d]+(?:_\d+)*)(\s*)([/?])?', + bygroups(Number.Integer, Text, Operator)), + (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+', Number.Integer) + ] + } diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py new file mode 100644 index 00000000..d5ec2405 --- /dev/null +++ b/pygments/lexers/scripting.py @@ -0,0 +1,272 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.scripting + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for scripting and embedded languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, default, combined +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation +from pygments.util import get_bool_opt, get_list_opt, iteritems + +__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer'] + + +class LuaLexer(RegexLexer): + """ + For `Lua `_ source code. + + Additional options accepted: + + `func_name_highlighting` + If given and ``True``, highlight builtin function names + (default: ``True``). + `disabled_modules` + If given, must be a list of module names whose function names + should not be highlighted. By default all modules are highlighted. + + To get a list of allowed modules have a look into the + `_luabuiltins` module: + + .. sourcecode:: pycon + + >>> from pygments.lexers._luabuiltins import MODULES + >>> MODULES.keys() + ['string', 'coroutine', 'modules', 'io', 'basic', ...] + """ + + name = 'Lua' + aliases = ['lua'] + filenames = ['*.lua', '*.wlua'] + mimetypes = ['text/x-lua', 'application/x-lua'] + + tokens = { + 'root': [ + # lua allows a file to start with a shebang + (r'#!(.*?)$', Comment.Preproc), + default('base'), + ], + 'base': [ + (r'(?s)--\[(=*)\[.*?\]\1\]', Comment.Multiline), + ('--.*$', Comment.Single), + + (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float), + (r'(?i)\d+e[+-]?\d+', Number.Float), + ('(?i)0x[0-9a-f]*', Number.Hex), + (r'\d+', Number.Integer), + + (r'\n', Text), + (r'[^\S\n]', Text), + # multiline strings + (r'(?s)\[(=*)\[.*?\]\1\]', String), + + (r'(==|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#])', Operator), + (r'[\[\]\{\}\(\)\.,:;]', Punctuation), + (r'(and|or|not)\b', Operator.Word), + + ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|' + r'while)\b', Keyword), + (r'(local)\b', Keyword.Declaration), + (r'(true|false|nil)\b', Keyword.Constant), + + (r'(function)\b', Keyword, 'funcname'), + + (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name), + + ("'", String.Single, combined('stringescape', 'sqs')), + ('"', String.Double, combined('stringescape', 'dqs')) + ], + + 'funcname': [ + (r'\s+', Text), + ('(?:([A-Za-z_]\w*)(\.))?([A-Za-z_]\w*)', + bygroups(Name.Class, Punctuation, Name.Function), '#pop'), + # inline function + ('\(', Punctuation, '#pop'), + ], + + # if I understand correctly, every character is valid in a lua string, + # so this state is only for later corrections + 'string': [ + ('.', String) + ], + + 'stringescape': [ + (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape) + ], + + 'sqs': [ + ("'", String, '#pop'), + include('string') + ], + + 'dqs': [ + ('"', String, '#pop'), + include('string') + ] + } + + def __init__(self, **options): + self.func_name_highlighting = get_bool_opt( + options, 'func_name_highlighting', True) + self.disabled_modules = get_list_opt(options, 'disabled_modules', []) + + self._functions = set() + if self.func_name_highlighting: + from pygments.lexers._luabuiltins import MODULES + for mod, func in iteritems(MODULES): + if mod not in self.disabled_modules: + self._functions.update(func) + RegexLexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + for index, token, value in \ + RegexLexer.get_tokens_unprocessed(self, text): + if token is Name: + if value in self._functions: + yield index, Name.Builtin, value + continue + elif '.' in value: + a, b = value.split('.') + yield index, Name, a + yield index + len(a), Punctuation, u'.' + yield index + len(a) + 1, Name, b + continue + yield index, token, value + + +class MoonScriptLexer(LuaLexer): + """ + For `MoonScript `_ source code. + + .. versionadded:: 1.5 + """ + + name = "MoonScript" + aliases = ["moon", "moonscript"] + filenames = ["*.moon"] + mimetypes = ['text/x-moonscript', 'application/x-moonscript'] + + tokens = { + 'root': [ + (r'#!(.*?)$', Comment.Preproc), + default('base'), + ], + 'base': [ + ('--.*$', Comment.Single), + (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float), + (r'(?i)\d+e[+-]?\d+', Number.Float), + (r'(?i)0x[0-9a-f]*', Number.Hex), + (r'\d+', Number.Integer), + (r'\n', Text), + (r'[^\S\n]+', Text), + (r'(?s)\[(=*)\[.*?\]\1\]', String), + (r'(->|=>)', Name.Function), + (r':[a-zA-Z_]\w*', Name.Variable), + (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator), + (r'[;,]', Punctuation), + (r'[\[\]\{\}\(\)]', Keyword.Type), + (r'[a-zA-Z_]\w*:', Name.Variable), + (r"(class|extends|if|then|super|do|with|import|export|" + r"while|elseif|return|for|in|from|when|using|else|" + r"and|or|not|switch|break)\b", Keyword), + (r'(true|false|nil)\b', Keyword.Constant), + (r'(and|or|not)\b', Operator.Word), + (r'(self)\b', Name.Builtin.Pseudo), + (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class), + (r'[A-Z]\w*', Name.Class), # proper name + (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name), + ("'", String.Single, combined('stringescape', 'sqs')), + ('"', String.Double, combined('stringescape', 'dqs')) + ], + 'stringescape': [ + (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape) + ], + 'sqs': [ + ("'", String.Single, '#pop'), + (".", String) + ], + 'dqs': [ + ('"', String.Double, '#pop'), + (".", String) + ] + } + + def get_tokens_unprocessed(self, text): + # set . as Operator instead of Punctuation + for index, token, value in LuaLexer.get_tokens_unprocessed(self, text): + if token == Punctuation and value == ".": + token = Operator + yield index, token, value + + +class ChaiscriptLexer(RegexLexer): + """ + For `ChaiScript `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'ChaiScript' + aliases = ['chai', 'chaiscript'] + filenames = ['*.chai'] + mimetypes = ['text/x-chaiscript', 'application/x-chaiscript'] + + flags = re.DOTALL + tokens = { + 'commentsandwhitespace': [ + (r'\s+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'^\#.*?\n', Comment.Single) + ], + 'slashstartsregex': [ + include('commentsandwhitespace'), + (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' + r'([gim]+\b|\B)', String.Regex, '#pop'), + (r'(?=/)', Text, ('#pop', 'badregex')), + default('#pop') + ], + 'badregex': [ + ('\n', Text, '#pop') + ], + 'root': [ + include('commentsandwhitespace'), + (r'\n', Text), + (r'[^\S\n]+', Text), + (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.' + r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), + (r'[{(\[;,]', Punctuation, 'slashstartsregex'), + (r'[})\].]', Punctuation), + (r'[=+\-*/]', Operator), + (r'(for|in|while|do|break|return|continue|if|else|' + r'throw|try|catch' + r')\b', Keyword, 'slashstartsregex'), + (r'(var)\b', Keyword.Declaration, 'slashstartsregex'), + (r'(attr|def|fun)\b', Keyword.Reserved), + (r'(true|false)\b', Keyword.Constant), + (r'(eval|throw)\b', Name.Builtin), + (r'`\S+`', Name.Builtin), + (r'[$a-zA-Z_]\w*', Name.Other), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9]+', Number.Integer), + (r'"', String.Double, 'dqstring'), + (r"'(\\\\|\\'|[^'])*'", String.Single), + ], + 'dqstring': [ + (r'\${[^"}]+?}', String.Iterpol), + (r'\$', String.Double), + (r'\\\\', String.Double), + (r'\\"', String.Double), + (r'[^\\\\\\"$]+', String.Double), + (r'"', String.Double, '#pop'), + ], + } -- cgit v1.2.1 From dc5e347d72ee03f968abad62c6d0be46f64e8f44 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Tue, 16 Sep 2014 20:36:59 +0200 Subject: move Hy to lisp, use words() in python.py --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/lisp.py | 133 +++++++++++++++++++++ pygments/lexers/python.py | 273 +++++++++++++++----------------------------- 3 files changed, 226 insertions(+), 182 deletions(-) create mode 100644 pygments/lexers/lisp.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f3c09f61..9907a9ac 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -153,7 +153,7 @@ LEXERS = { 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()), 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), - 'HyLexer': ('pygments.lexers.python', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), + 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py new file mode 100644 index 00000000..9e5b037c --- /dev/null +++ b/pygments/lexers/lisp.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.lisp + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for Lispy languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +from pygments.lexers.python import PythonLexer + +__all__ = ['HyLexer'] + + +class HyLexer(RegexLexer): + """ + Lexer for `Hy `_ source code. + + .. versionadded:: 2.0 + """ + name = 'Hy' + aliases = ['hylang'] + filenames = ['*.hy'] + mimetypes = ['text/x-hy', 'application/x-hy'] + + special_forms = [ + 'cond', 'for', '->', '->>', 'car', + 'cdr', 'first', 'rest', 'let', 'when', 'unless', + 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator', + ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in', + 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=', + 'foreach', 'while', + 'eval-and-compile', 'eval-when-compile' + ] + + declarations = [ + 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' + ] + + hy_builtins = [] + + hy_core = [ + 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc', + 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?', + 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat', + 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?' + ] + + builtins = hy_builtins + hy_core + + # valid names for identifiers + # well, names can only not consist fully of numbers + # but this should be good enough for now + valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' + + def _multi_escape(entries): + return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries)) + + tokens = { + 'root': [ + # the comments - always starting with semicolon + # and going to the end of the line + (r';.*$', Comment.Single), + + # whitespaces - usually not relevant + (r'[,\s]+', Text), + + # numbers + (r'-?\d+\.\d+', Number.Float), + (r'-?\d+', Number.Integer), + (r'0[0-7]+j?', Number.Oct), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + + # strings, symbols and characters + (r'"(\\\\|\\"|[^"])*"', String), + (r"'" + valid_name, String.Symbol), + (r"\\(.|[a-z]+)", String.Char), + (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), + (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), + + # keywords + (r'::?' + valid_name, String.Symbol), + + # special operators + (r'~@|[`\'#^~&@]', Operator), + + include('py-keywords'), + include('py-builtins'), + + # highlight the special forms + (_multi_escape(special_forms), Keyword), + + # Technically, only the special forms are 'keywords'. The problem + # is that only treating them as keywords means that things like + # 'defn' and 'ns' need to be highlighted as builtins. This is ugly + # and weird for most styles. So, as a compromise we're going to + # highlight them as Keyword.Declarations. + (_multi_escape(declarations), Keyword.Declaration), + + # highlight the builtins + (_multi_escape(builtins), Name.Builtin), + + # the remaining functions + (r'(?<=\()' + valid_name, Name.Function), + + # find the remaining variables + (valid_name, Name.Variable), + + # Hy accepts vector notation + (r'(\[|\])', Punctuation), + + # Hy accepts map notation + (r'(\{|\})', Punctuation), + + # the famous parentheses! + (r'(\(|\))', Punctuation), + + ], + 'py-keywords': PythonLexer.tokens['keywords'], + 'py-builtins': PythonLexer.tokens['builtins'], + } + + def analyse_text(text): + if '(import ' in text or '(defn ' in text: + return 0.9 diff --git a/pygments/lexers/python.py b/pygments/lexers/python.py index 2a29aadb..db747d2e 100644 --- a/pygments/lexers/python.py +++ b/pygments/lexers/python.py @@ -20,7 +20,7 @@ from pygments import unistring as uni __all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer', 'Python3Lexer', 'Python3TracebackLexer', 'CythonLexer', - 'HyLexer', 'DgLexer'] + 'DgLexer'] line_re = re.compile('.*?\n') @@ -68,35 +68,46 @@ class PythonLexer(RegexLexer): include('numbers'), ], 'keywords': [ - (r'(assert|break|continue|del|elif|else|except|exec|' - r'finally|for|global|if|lambda|pass|print|raise|' - r'return|try|while|yield(\s+from)?|as|with)\b', Keyword), + (words(( + 'assert', 'break', 'continue', 'del', 'elif', 'else', 'except', + 'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass', + 'print', 'raise', 'return', 'try', 'while', 'yield', + 'yield from', 'as', 'with'), suffix=r'\b'), + Keyword), ], 'builtins': [ - (r'(?`_ source code. - - .. versionadded:: 2.0 - """ - name = 'Hy' - aliases = ['hylang'] - filenames = ['*.hy'] - mimetypes = ['text/x-hy', 'application/x-hy'] - - special_forms = [ - 'cond', 'for', '->', '->>', 'car', - 'cdr', 'first', 'rest', 'let', 'when', 'unless', - 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator', - ',', 'list_comp', 'kwapply', '~', 'is', 'in', 'is-not', 'not-in', - 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=', - 'foreach', 'while', - 'eval-and-compile', 'eval-when-compile' - ] - - declarations = [ - 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' - ] - - hy_builtins = [] - - hy_core = [ - 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc', - 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?', - 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat', - 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?' - ] - - builtins = hy_builtins + hy_core - - # valid names for identifiers - # well, names can only not consist fully of numbers - # but this should be good enough for now - valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' - - def _multi_escape(entries): - return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries)) - - tokens = { - 'root': [ - # the comments - always starting with semicolon - # and going to the end of the line - (r';.*$', Comment.Single), - - # whitespaces - usually not relevant - (r'[,\s]+', Text), - - # numbers - (r'-?\d+\.\d+', Number.Float), - (r'-?\d+', Number.Integer), - (r'0[0-7]+j?', Number.Oct), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - - # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), - (r"'" + valid_name, String.Symbol), - (r"\\(.|[a-z]+)", String.Char), - (r'^(\s*)([rRuU]{,2}"""(?:.|\n)*?""")', bygroups(Text, String.Doc)), - (r"^(\s*)([rRuU]{,2}'''(?:.|\n)*?''')", bygroups(Text, String.Doc)), - - # keywords - (r'::?' + valid_name, String.Symbol), - - # special operators - (r'~@|[`\'#^~&@]', Operator), - - include('py-keywords'), - include('py-builtins'), - - # highlight the special forms - (_multi_escape(special_forms), Keyword), - - # Technically, only the special forms are 'keywords'. The problem - # is that only treating them as keywords means that things like - # 'defn' and 'ns' need to be highlighted as builtins. This is ugly - # and weird for most styles. So, as a compromise we're going to - # highlight them as Keyword.Declarations. - (_multi_escape(declarations), Keyword.Declaration), - - # highlight the builtins - (_multi_escape(builtins), Name.Builtin), - - # the remaining functions - (r'(?<=\()' + valid_name, Name.Function), - - # find the remaining variables - (valid_name, Name.Variable), - - # Hy accepts vector notation - (r'(\[|\])', Punctuation), - - # Hy accepts map notation - (r'(\{|\})', Punctuation), - - # the famous parentheses! - (r'(\(|\))', Punctuation), - - ], - 'py-keywords': PythonLexer.tokens['keywords'], - 'py-builtins': PythonLexer.tokens['builtins'], - } - - def analyse_text(text): - if '(import ' in text or '(defn ' in text: - return 0.9 - - class DgLexer(RegexLexer): """ Lexer for `dg `_, @@ -766,17 +672,22 @@ class DgLexer(RegexLexer): (r'\b(and|in|is|or|where)\b', Operator.Word), (r'[!$%&*+\-./:<-@\\^|~;,]+', Operator), - (r"(? Date: Fri, 19 Sep 2014 17:17:59 +0200 Subject: More application of words(). --- pygments/lexers/_mapping.py | 2 +- pygments/lexers/agile.py | 3 +- pygments/lexers/c_like/d.py | 11 +- pygments/lexers/lisp.py | 20 ++- pygments/lexers/misc/factor.py | 332 ++++++++++++++++++++--------------------- pygments/lexers/perl.py | 66 ++++---- pygments/lexers/ruby.py | 84 ++++++----- pygments/lexers/scripting.py | 12 +- 8 files changed, 272 insertions(+), 258 deletions(-) diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 9907a9ac..068edc4f 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -199,7 +199,7 @@ LEXERS = { 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), - 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt',), ('text/x-logtalk',)), + 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index e17d82ae..c90d3847 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -15,9 +15,10 @@ from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer from pygments.lexers.ruby import RubyLexer, RubyConsoleLexer, FancyLexer from pygments.lexers.perl import PerlLexer, Perl6Lexer -from pygments.lexers.lua import LuaLexer, MoonScriptLexer from pygments.lexers.c_like.d import CrocLexer, MiniDLexer from pygments.lexers.misc.iolang import IoLexer from pygments.lexers.misc.tcl import TclLexer +from pygments.lexers.misc.factor import FactorLexer +from pygments.lexers.scripting import LuaLexer, MoonScriptLexer __all__ = [] diff --git a/pygments/lexers/c_like/d.py b/pygments/lexers/c_like/d.py index e2440f37..d629673c 100644 --- a/pygments/lexers/c_like/d.py +++ b/pygments/lexers/c_like/d.py @@ -196,10 +196,13 @@ class CrocLexer(RegexLexer): (r'//(.*?)\n', Comment.Single), (r'/\*', Comment.Multiline, 'nestedcomment'), # Keywords - (r'(as|assert|break|case|catch|class|continue|default' - r'|do|else|finally|for|foreach|function|global|namespace' - r'|if|import|in|is|local|module|return|scope|super|switch' - r'|this|throw|try|vararg|while|with|yield)\b', Keyword), + (words(( + 'as', 'assert', 'break', 'case', 'catch', 'class', 'continue', + 'default', 'do', 'else', 'finally', 'for', 'foreach', 'function', + 'global', 'namespace', 'if', 'import', 'in', 'is', 'local', + 'module', 'return', 'scope', 'super', 'switch', 'this', 'throw', + 'try', 'vararg', 'while', 'with', 'yield'), suffix=r'\b'), + Keyword), (r'(false|true|null)\b', Keyword.Constant), # FloatLiteral (r'([0-9][0-9_]*)(?=[.eE])(\.[0-9][0-9_]*)?([eE][+\-]?[0-9_]+)?', diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py index 9e5b037c..f09f2c8e 100644 --- a/pygments/lexers/lisp.py +++ b/pygments/lexers/lisp.py @@ -9,9 +9,7 @@ :license: BSD, see LICENSE for details. """ -import re - -from pygments.lexer import RegexLexer, include, bygroups +from pygments.lexer import RegexLexer, include, bygroups, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation @@ -31,7 +29,7 @@ class HyLexer(RegexLexer): filenames = ['*.hy'] mimetypes = ['text/x-hy', 'application/x-hy'] - special_forms = [ + special_forms = ( 'cond', 'for', '->', '->>', 'car', 'cdr', 'first', 'rest', 'let', 'when', 'unless', 'import', 'do', 'progn', 'get', 'slice', 'assoc', 'with-decorator', @@ -39,20 +37,20 @@ class HyLexer(RegexLexer): 'quasiquote', 'unquote', 'unquote-splice', 'quote', '|', '<<=', '>>=', 'foreach', 'while', 'eval-and-compile', 'eval-when-compile' - ] + ) - declarations = [ + declarations = ( 'def', 'defn', 'defun', 'defmacro', 'defclass', 'lambda', 'fn', 'setv' - ] + ) - hy_builtins = [] + hy_builtins = () - hy_core = [ + hy_core = ( 'cycle', 'dec', 'distinct', 'drop', 'even?', 'filter', 'inc', 'instance?', 'iterable?', 'iterate', 'iterator?', 'neg?', 'none?', 'nth', 'numeric?', 'odd?', 'pos?', 'remove', 'repeat', 'repeatedly', 'take', 'take_nth', 'take_while', 'zero?' - ] + ) builtins = hy_builtins + hy_core @@ -62,7 +60,7 @@ class HyLexer(RegexLexer): valid_name = r'(?!#)[\w!$%*+<=>?/.#-]+' def _multi_escape(entries): - return '(%s)' % ('|'.join(re.escape(entry) + ' ' for entry in entries)) + return words(entries, suffix=' ') tokens = { 'root': [ diff --git a/pygments/lexers/misc/factor.py b/pygments/lexers/misc/factor.py index c51a45a6..04051976 100644 --- a/pygments/lexers/misc/factor.py +++ b/pygments/lexers/misc/factor.py @@ -11,7 +11,7 @@ import re -from pygments.lexer import RegexLexer, bygroups, default +from pygments.lexer import RegexLexer, bygroups, default, words from pygments.token import Text, Comment, Keyword, Name, String, Number __all__ = ['FactorLexer'] @@ -30,174 +30,168 @@ class FactorLexer(RegexLexer): flags = re.MULTILINE | re.UNICODE - builtin_kernel = ( - r'(?:-rot|2bi|2bi@|2bi\*|2curry|2dip|2drop|2dup|2keep|2nip|' - r'2over|2tri|2tri@|2tri\*|3bi|3curry|3dip|3drop|3dup|3keep|' - r'3tri|4dip|4drop|4dup|4keep||=|>boolean|\(clone\)|' - r'\?|\?execute|\?if|and|assert|assert=|assert\?|bi|bi-curry|' - r'bi-curry@|bi-curry\*|bi@|bi\*|boa|boolean|boolean\?|both\?|' - r'build|call|callstack|callstack>array|callstack\?|clear|clone|' - r'compose|compose\?|curry|curry\?|datastack|die|dip|do|drop|' - r'dup|dupd|either\?|eq\?|equal\?|execute|hashcode|hashcode\*|' - r'identity-hashcode|identity-tuple|identity-tuple\?|if|if\*|' - r'keep|loop|most|new|nip|not|null|object|or|over|pick|prepose|' - r'retainstack|rot|same\?|swap|swapd|throw|tri|tri-curry|' - r'tri-curry@|tri-curry\*|tri@|tri\*|tuple|tuple\?|unless|' - r'unless\*|until|when|when\*|while|with|wrapper|wrapper\?|xor)\s' - ) - - builtin_assocs = ( - r'(?:2cache||>alist|\?at|\?of|assoc|assoc-all\?|' - r'assoc-any\?|assoc-clone-like|assoc-combine|assoc-diff|' - r'assoc-diff!|assoc-differ|assoc-each|assoc-empty\?|' - r'assoc-filter|assoc-filter!|assoc-filter-as|assoc-find|' - r'assoc-hashcode|assoc-intersect|assoc-like|assoc-map|' - r'assoc-map-as|assoc-partition|assoc-refine|assoc-size|' - r'assoc-stack|assoc-subset\?|assoc-union|assoc-union!|' - r'assoc=|assoc>map|assoc\?|at|at+|at\*|cache|change-at|' - r'clear-assoc|delete-at|delete-at\*|enum|enum\?|extract-keys|' - r'inc-at|key\?|keys|map>assoc|maybe-set-at|new-assoc|of|' - r'push-at|rename-at|set-at|sift-keys|sift-values|substitute|' - r'unzip|value-at|value-at\*|value\?|values|zip)\s' - ) - - builtin_combinators = ( - r'(?:2cleave|2cleave>quot|3cleave|3cleave>quot|4cleave|' - r'4cleave>quot|alist>quot|call-effect|case|case-find|' - r'case>quot|cleave|cleave>quot|cond|cond>quot|deep-spread>quot|' - r'execute-effect|linear-case-quot|no-case|no-case\?|no-cond|' - r'no-cond\?|recursive-hashcode|shallow-spread>quot|spread|' - r'to-fixed-point|wrong-values|wrong-values\?)\s' - ) - - builtin_math = ( - r'(?:-|/|/f|/i|/mod|2/|2\^|<|<=||>|>=|>bignum|' - r'>fixnum|>float|>integer|\(all-integers\?\)|' - r'\(each-integer\)|\(find-integer\)|\*|\+|\?1\+|' - r'abs|align|all-integers\?|bignum|bignum\?|bit\?|bitand|' - r'bitnot|bitor|bits>double|bits>float|bitxor|complex|' - r'complex\?|denominator|double>bits|each-integer|even\?|' - r'find-integer|find-last-integer|fixnum|fixnum\?|float|' - r'float>bits|float\?|fp-bitwise=|fp-infinity\?|fp-nan-payload|' - r'fp-nan\?|fp-qnan\?|fp-sign|fp-snan\?|fp-special\?|' - r'if-zero|imaginary-part|integer|integer>fixnum|' - r'integer>fixnum-strict|integer\?|log2|log2-expects-positive|' - r'log2-expects-positive\?|mod|neg|neg\?|next-float|' - r'next-power-of-2|number|number=|number\?|numerator|odd\?|' - r'out-of-fixnum-range|out-of-fixnum-range\?|power-of-2\?|' - r'prev-float|ratio|ratio\?|rational|rational\?|real|' - r'real-part|real\?|recip|rem|sgn|shift|sq|times|u<|u<=|u>|' - r'u>=|unless-zero|unordered\?|when-zero|zero\?)\s' - ) - - builtin_sequences = ( - r'(?:1sequence|2all\?|2each|2map|2map-as|2map-reduce|2reduce|' - r'2selector|2sequence|3append|3append-as|3each|3map|3map-as|' - r'3sequence|4sequence||||\?first|' - r'\?last|\?nth|\?second|\?set-nth|accumulate|accumulate!|' - r'accumulate-as|all\?|any\?|append|append!|append-as|' - r'assert-sequence|assert-sequence=|assert-sequence\?|' - r'binary-reduce|bounds-check|bounds-check\?|bounds-error|' - r'bounds-error\?|but-last|but-last-slice|cartesian-each|' - r'cartesian-map|cartesian-product|change-nth|check-slice|' - r'check-slice-error|clone-like|collapse-slice|collector|' - r'collector-for|concat|concat-as|copy|count|cut|cut-slice|' - r'cut\*|delete-all|delete-slice|drop-prefix|each|each-from|' - r'each-index|empty\?|exchange|filter|filter!|filter-as|find|' - r'find-from|find-index|find-index-from|find-last|find-last-from|' - r'first|first2|first3|first4|flip|follow|fourth|glue|halves|' - r'harvest|head|head-slice|head-slice\*|head\*|head\?|' - r'if-empty|immutable|immutable-sequence|immutable-sequence\?|' - r'immutable\?|index|index-from|indices|infimum|infimum-by|' - r'insert-nth|interleave|iota|iota-tuple|iota-tuple\?|join|' - r'join-as|last|last-index|last-index-from|length|lengthen|' - r'like|longer|longer\?|longest|map|map!|map-as|map-find|' - r'map-find-last|map-index|map-integers|map-reduce|map-sum|' - r'max-length|member-eq\?|member\?|midpoint@|min-length|' - r'mismatch|move|new-like|new-resizable|new-sequence|' - r'non-negative-integer-expected|non-negative-integer-expected\?|' - r'nth|nths|pad-head|pad-tail|padding|partition|pop|pop\*|' - r'prefix|prepend|prepend-as|produce|produce-as|product|push|' - r'push-all|push-either|push-if|reduce|reduce-index|remove|' - r'remove!|remove-eq|remove-eq!|remove-nth|remove-nth!|repetition|' - r'repetition\?|replace-slice|replicate|replicate-as|rest|' - r'rest-slice|reverse|reverse!|reversed|reversed\?|second|' - r'selector|selector-for|sequence|sequence-hashcode|sequence=|' - r'sequence\?|set-first|set-fourth|set-last|set-length|set-nth|' - r'set-second|set-third|short|shorten|shorter|shorter\?|' - r'shortest|sift|slice|slice-error|slice-error\?|slice\?|' - r'snip|snip-slice|start|start\*|subseq|subseq\?|suffix|' - r'suffix!|sum|sum-lengths|supremum|supremum-by|surround|tail|' - r'tail-slice|tail-slice\*|tail\*|tail\?|third|trim|' - r'trim-head|trim-head-slice|trim-slice|trim-tail|trim-tail-slice|' - r'unclip|unclip-last|unclip-last-slice|unclip-slice|unless-empty|' - r'virtual-exemplar|virtual-sequence|virtual-sequence\?|virtual@|' - r'when-empty)\s' - ) - - builtin_namespaces = ( - r'(?:\+@|change|change-global|counter|dec|get|get-global|' - r'global|inc|init-namespaces|initialize|is-global|make-assoc|' - r'namespace|namestack|off|on|set|set-global|set-namestack|' - r'toggle|with-global|with-scope|with-variable|with-variables)\s' - ) - - builtin_arrays = ( - r'(?:1array|2array|3array|4array||>array|array|array\?|' - r'pair|pair\?|resize-array)\s' - ) - - builtin_io = ( - r'(?:\(each-stream-block-slice\)|\(each-stream-block\)|' - r'\(stream-contents-by-block\)|\(stream-contents-by-element\)|' - r'\(stream-contents-by-length-or-block\)|' - r'\(stream-contents-by-length\)|\+byte\+|\+character\+|' - r'bad-seek-type|bad-seek-type\?|bl|contents|each-block|' - r'each-block-size|each-block-slice|each-line|each-morsel|' - r'each-stream-block|each-stream-block-slice|each-stream-line|' - r'error-stream|flush|input-stream|input-stream\?|' - r'invalid-read-buffer|invalid-read-buffer\?|lines|nl|' - r'output-stream|output-stream\?|print|read|read-into|' - r'read-partial|read-partial-into|read-until|read1|readln|' - r'seek-absolute|seek-absolute\?|seek-end|seek-end\?|' - r'seek-input|seek-output|seek-relative|seek-relative\?|' - r'stream-bl|stream-contents|stream-contents\*|stream-copy|' - r'stream-copy\*|stream-element-type|stream-flush|' - r'stream-length|stream-lines|stream-nl|stream-print|' - r'stream-read|stream-read-into|stream-read-partial|' - r'stream-read-partial-into|stream-read-partial-unsafe|' - r'stream-read-unsafe|stream-read-until|stream-read1|' - r'stream-readln|stream-seek|stream-seekable\?|stream-tell|' - r'stream-write|stream-write1|tell-input|tell-output|' - r'with-error-stream|with-error-stream\*|with-error>output|' - r'with-input-output\+error-streams|' - r'with-input-output\+error-streams\*|with-input-stream|' - r'with-input-stream\*|with-output-stream|with-output-stream\*|' - r'with-output>error|with-output\+error-stream|' - r'with-output\+error-stream\*|with-streams|with-streams\*|' - r'write|write1)\s' - ) - - builtin_strings = ( - r'(?:1string||>string|resize-string|string|string\?)\s' - ) - - builtin_vectors = ( - r'(?:1vector||>vector|\?push|vector|vector\?)\s' - ) - - builtin_continuations = ( - r'(?:|||attempt-all|' - r'attempt-all-error|attempt-all-error\?|callback-error-hook|' - r'callcc0|callcc1|cleanup|compute-restarts|condition|' - r'condition\?|continuation|continuation\?|continue|' - r'continue-restart|continue-with|current-continuation|' - r'error|error-continuation|error-in-thread|error-thread|' - r'ifcc|ignore-errors|in-callback\?|original-error|recover|' - r'restart|restart\?|restarts|rethrow|rethrow-restarts|' - r'return|return-continuation|thread-error-hook|throw-continue|' - r'throw-restarts|with-datastack|with-return)\s' - ) + builtin_kernel = words(( + '-rot', '2bi', '2bi@', '2bi*', '2curry', '2dip', '2drop', '2dup', '2keep', '2nip', + '2over', '2tri', '2tri@', '2tri*', '3bi', '3curry', '3dip', '3drop', '3dup', '3keep', + '3tri', '4dip', '4drop', '4dup', '4keep', '', '=', '>boolean', 'clone', + '?', '?execute', '?if', 'and', 'assert', 'assert=', 'assert?', 'bi', 'bi-curry', + 'bi-curry@', 'bi-curry*', 'bi@', 'bi*', 'boa', 'boolean', 'boolean?', 'both?', + 'build', 'call', 'callstack', 'callstack>array', 'callstack?', 'clear', '(clone)', + 'compose', 'compose?', 'curry', 'curry?', 'datastack', 'die', 'dip', 'do', 'drop', + 'dup', 'dupd', 'either?', 'eq?', 'equal?', 'execute', 'hashcode', 'hashcode*', + 'identity-hashcode', 'identity-tuple', 'identity-tuple?', 'if', 'if*', + 'keep', 'loop', 'most', 'new', 'nip', 'not', 'null', 'object', 'or', 'over', + 'pick', 'prepose', 'retainstack', 'rot', 'same?', 'swap', 'swapd', 'throw', + 'tri', 'tri-curry', 'tri-curry@', 'tri-curry*', 'tri@', 'tri*', 'tuple', + 'tuple?', 'unless', 'unless*', 'until', 'when', 'when*', 'while', 'with', + 'wrapper', 'wrapper?', 'xor'), suffix=r'\s') + + builtin_assocs = words(( + '2cache', '', '>alist', '?at', '?of', 'assoc', 'assoc-all?', + 'assoc-any?', 'assoc-clone-like', 'assoc-combine', 'assoc-diff', + 'assoc-diff!', 'assoc-differ', 'assoc-each', 'assoc-empty?', + 'assoc-filter', 'assoc-filter!', 'assoc-filter-as', 'assoc-find', + 'assoc-hashcode', 'assoc-intersect', 'assoc-like', 'assoc-map', + 'assoc-map-as', 'assoc-partition', 'assoc-refine', 'assoc-size', + 'assoc-stack', 'assoc-subset?', 'assoc-union', 'assoc-union!', + 'assoc=', 'assoc>map', 'assoc?', 'at', 'at+', 'at*', 'cache', 'change-at', + 'clear-assoc', 'delete-at', 'delete-at*', 'enum', 'enum?', 'extract-keys', + 'inc-at', 'key?', 'keys', 'map>assoc', 'maybe-set-at', 'new-assoc', 'of', + 'push-at', 'rename-at', 'set-at', 'sift-keys', 'sift-values', 'substitute', + 'unzip', 'value-at', 'value-at*', 'value?', 'values', 'zip'), suffix=r'\s') + + builtin_combinators = words(( + '2cleave', '2cleave>quot', '3cleave', '3cleave>quot', '4cleave', + '4cleave>quot', 'alist>quot', 'call-effect', 'case', 'case-find', + 'case>quot', 'cleave', 'cleave>quot', 'cond', 'cond>quot', 'deep-spread>quot', + 'execute-effect', 'linear-case-quot', 'no-case', 'no-case?', 'no-cond', + 'no-cond?', 'recursive-hashcode', 'shallow-spread>quot', 'spread', + 'to-fixed-point', 'wrong-values', 'wrong-values?'), suffix=r'\s') + + builtin_math = words(( + '-', '/', '/f', '/i', '/mod', '2/', '2^', '<', '<=', '', '>', + '>=', '>bignum', '>fixnum', '>float', '>integer', '(all-integers?)', + '(each-integer)', '(find-integer)', '*', '+', '?1+', + 'abs', 'align', 'all-integers?', 'bignum', 'bignum?', 'bit?', 'bitand', + 'bitnot', 'bitor', 'bits>double', 'bits>float', 'bitxor', 'complex', + 'complex?', 'denominator', 'double>bits', 'each-integer', 'even?', + 'find-integer', 'find-last-integer', 'fixnum', 'fixnum?', 'float', + 'float>bits', 'float?', 'fp-bitwise=', 'fp-infinity?', 'fp-nan-payload', + 'fp-nan?', 'fp-qnan?', 'fp-sign', 'fp-snan?', 'fp-special?', + 'if-zero', 'imaginary-part', 'integer', 'integer>fixnum', + 'integer>fixnum-strict', 'integer?', 'log2', 'log2-expects-positive', + 'log2-expects-positive?', 'mod', 'neg', 'neg?', 'next-float', + 'next-power-of-2', 'number', 'number=', 'number?', 'numerator', 'odd?', + 'out-of-fixnum-range', 'out-of-fixnum-range?', 'power-of-2?', + 'prev-float', 'ratio', 'ratio?', 'rational', 'rational?', 'real', + 'real-part', 'real?', 'recip', 'rem', 'sgn', 'shift', 'sq', 'times', + 'u<', 'u<=', 'u>', 'u>=', 'unless-zero', 'unordered?', 'when-zero', + 'zero?'), suffix=r'\s') + + builtin_sequences = words(( + '1sequence', '2all?', '2each', '2map', '2map-as', '2map-reduce', '2reduce', + '2selector', '2sequence', '3append', '3append-as', '3each', '3map', '3map-as', + '3sequence', '4sequence', '', '', '', '?first', + '?last', '?nth', '?second', '?set-nth', 'accumulate', 'accumulate!', + 'accumulate-as', 'all?', 'any?', 'append', 'append!', 'append-as', + 'assert-sequence', 'assert-sequence=', 'assert-sequence?', + 'binary-reduce', 'bounds-check', 'bounds-check?', 'bounds-error', + 'bounds-error?', 'but-last', 'but-last-slice', 'cartesian-each', + 'cartesian-map', 'cartesian-product', 'change-nth', 'check-slice', + 'check-slice-error', 'clone-like', 'collapse-slice', 'collector', + 'collector-for', 'concat', 'concat-as', 'copy', 'count', 'cut', 'cut-slice', + 'cut*', 'delete-all', 'delete-slice', 'drop-prefix', 'each', 'each-from', + 'each-index', 'empty?', 'exchange', 'filter', 'filter!', 'filter-as', 'find', + 'find-from', 'find-index', 'find-index-from', 'find-last', 'find-last-from', + 'first', 'first2', 'first3', 'first4', 'flip', 'follow', 'fourth', 'glue', 'halves', + 'harvest', 'head', 'head-slice', 'head-slice*', 'head*', 'head?', + 'if-empty', 'immutable', 'immutable-sequence', 'immutable-sequence?', + 'immutable?', 'index', 'index-from', 'indices', 'infimum', 'infimum-by', + 'insert-nth', 'interleave', 'iota', 'iota-tuple', 'iota-tuple?', 'join', + 'join-as', 'last', 'last-index', 'last-index-from', 'length', 'lengthen', + 'like', 'longer', 'longer?', 'longest', 'map', 'map!', 'map-as', 'map-find', + 'map-find-last', 'map-index', 'map-integers', 'map-reduce', 'map-sum', + 'max-length', 'member-eq?', 'member?', 'midpoint@', 'min-length', + 'mismatch', 'move', 'new-like', 'new-resizable', 'new-sequence', + 'non-negative-integer-expected', 'non-negative-integer-expected?', + 'nth', 'nths', 'pad-head', 'pad-tail', 'padding', 'partition', 'pop', 'pop*', + 'prefix', 'prepend', 'prepend-as', 'produce', 'produce-as', 'product', 'push', + 'push-all', 'push-either', 'push-if', 'reduce', 'reduce-index', 'remove', + 'remove!', 'remove-eq', 'remove-eq!', 'remove-nth', 'remove-nth!', 'repetition', + 'repetition?', 'replace-slice', 'replicate', 'replicate-as', 'rest', + 'rest-slice', 'reverse', 'reverse!', 'reversed', 'reversed?', 'second', + 'selector', 'selector-for', 'sequence', 'sequence-hashcode', 'sequence=', + 'sequence?', 'set-first', 'set-fourth', 'set-last', 'set-length', 'set-nth', + 'set-second', 'set-third', 'short', 'shorten', 'shorter', 'shorter?', + 'shortest', 'sift', 'slice', 'slice-error', 'slice-error?', 'slice?', + 'snip', 'snip-slice', 'start', 'start*', 'subseq', 'subseq?', 'suffix', + 'suffix!', 'sum', 'sum-lengths', 'supremum', 'supremum-by', 'surround', 'tail', + 'tail-slice', 'tail-slice*', 'tail*', 'tail?', 'third', 'trim', + 'trim-head', 'trim-head-slice', 'trim-slice', 'trim-tail', 'trim-tail-slice', + 'unclip', 'unclip-last', 'unclip-last-slice', 'unclip-slice', 'unless-empty', + 'virtual-exemplar', 'virtual-sequence', 'virtual-sequence?', 'virtual@', + 'when-empty'), suffix=r'\s') + + builtin_namespaces = words(( + '+@', 'change', 'change-global', 'counter', 'dec', 'get', 'get-global', + 'global', 'inc', 'init-namespaces', 'initialize', 'is-global', 'make-assoc', + 'namespace', 'namestack', 'off', 'on', 'set', 'set-global', 'set-namestack', + 'toggle', 'with-global', 'with-scope', 'with-variable', 'with-variables'), + suffix=r'\s') + + builtin_arrays = words(( + '1array', '2array', '3array', '4array', '', '>array', 'array', + 'array?', 'pair', 'pair?', 'resize-array'), suffix=r'\s') + + builtin_io = words(( + '(each-stream-block-slice)', '(each-stream-block)', + '(stream-contents-by-block)', '(stream-contents-by-element)', + '(stream-contents-by-length-or-block)', + '(stream-contents-by-length)', '+byte+', '+character+', + 'bad-seek-type', 'bad-seek-type?', 'bl', 'contents', 'each-block', + 'each-block-size', 'each-block-slice', 'each-line', 'each-morsel', + 'each-stream-block', 'each-stream-block-slice', 'each-stream-line', + 'error-stream', 'flush', 'input-stream', 'input-stream?', + 'invalid-read-buffer', 'invalid-read-buffer?', 'lines', 'nl', + 'output-stream', 'output-stream?', 'print', 'read', 'read-into', + 'read-partial', 'read-partial-into', 'read-until', 'read1', 'readln', + 'seek-absolute', 'seek-absolute?', 'seek-end', 'seek-end?', + 'seek-input', 'seek-output', 'seek-relative', 'seek-relative?', + 'stream-bl', 'stream-contents', 'stream-contents*', 'stream-copy', + 'stream-copy*', 'stream-element-type', 'stream-flush', + 'stream-length', 'stream-lines', 'stream-nl', 'stream-print', + 'stream-read', 'stream-read-into', 'stream-read-partial', + 'stream-read-partial-into', 'stream-read-partial-unsafe', + 'stream-read-unsafe', 'stream-read-until', 'stream-read1', + 'stream-readln', 'stream-seek', 'stream-seekable?', 'stream-tell', + 'stream-write', 'stream-write1', 'tell-input', 'tell-output', + 'with-error-stream', 'with-error-stream*', 'with-error>output', + 'with-input-output+error-streams', + 'with-input-output+error-streams*', 'with-input-stream', + 'with-input-stream*', 'with-output-stream', 'with-output-stream*', + 'with-output>error', 'with-output+error-stream', + 'with-output+error-stream*', 'with-streams', 'with-streams*', + 'write', 'write1'), suffix=r'\s') + + builtin_strings = words(( + '1string', '', '>string', 'resize-string', 'string', + 'string?'), suffix=r'\s') + + builtin_vectors = words(( + '1vector', '', '>vector', '?push', 'vector', 'vector?'), + suffix=r'\s') + + builtin_continuations = words(( + '', '', '', 'attempt-all', + 'attempt-all-error', 'attempt-all-error?', 'callback-error-hook', + 'callcc0', 'callcc1', 'cleanup', 'compute-restarts', 'condition', + 'condition?', 'continuation', 'continuation?', 'continue', + 'continue-restart', 'continue-with', 'current-continuation', + 'error', 'error-continuation', 'error-in-thread', 'error-thread', + 'ifcc', 'ignore-errors', 'in-callback?', 'original-error', 'recover', + 'restart', 'restart?', 'restarts', 'rethrow', 'rethrow-restarts', + 'return', 'return-continuation', 'thread-error-hook', 'throw-continue', + 'throw-restarts', 'with-datastack', 'with-return'), suffix=r'\s') tokens = { 'root': [ diff --git a/pygments/lexers/perl.py b/pygments/lexers/perl.py index fbd1c47f..56c5e0ec 100644 --- a/pygments/lexers/perl.py +++ b/pygments/lexers/perl.py @@ -12,7 +12,7 @@ import re from pygments.lexer import RegexLexer, ExtendedRegexLexer, include, bygroups, \ - using, this, default + using, this, default, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation from pygments.util import shebang_matches @@ -48,9 +48,12 @@ class PerlLexer(RegexLexer): 'root': [ (r'\#.*?$', Comment.Single), (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline), - (r'(case|continue|do|else|elsif|for|foreach|if|last|my|' - r'next|our|redo|reset|then|unless|until|while|use|' - r'print|new|BEGIN|CHECK|INIT|END|return)\b', Keyword), + (words(( + 'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach', + 'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then', + 'unless', 'until', 'while', 'use', 'print', 'new', 'BEGIN', + 'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'), + Keyword), (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)', bygroups(Keyword, Text, Name, Text, Punctuation, Text), 'format'), (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word), @@ -76,33 +79,34 @@ class PerlLexer(RegexLexer): (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*', String.Regex), (r'\s+', Text), - (r'(abs|accept|alarm|atan2|bind|binmode|bless|caller|chdir|' - r'chmod|chomp|chop|chown|chr|chroot|close|closedir|connect|' - r'continue|cos|crypt|dbmclose|dbmopen|defined|delete|die|' - r'dump|each|endgrent|endhostent|endnetent|endprotoent|' - r'endpwent|endservent|eof|eval|exec|exists|exit|exp|fcntl|' - r'fileno|flock|fork|format|formline|getc|getgrent|getgrgid|' - r'getgrnam|gethostbyaddr|gethostbyname|gethostent|getlogin|' - r'getnetbyaddr|getnetbyname|getnetent|getpeername|getpgrp|' - r'getppid|getpriority|getprotobyname|getprotobynumber|' - r'getprotoent|getpwent|getpwnam|getpwuid|getservbyname|' - r'getservbyport|getservent|getsockname|getsockopt|glob|gmtime|' - r'goto|grep|hex|import|index|int|ioctl|join|keys|kill|last|' - r'lc|lcfirst|length|link|listen|local|localtime|log|lstat|' - r'map|mkdir|msgctl|msgget|msgrcv|msgsnd|my|next|no|oct|open|' - r'opendir|ord|our|pack|package|pipe|pop|pos|printf|' - r'prototype|push|quotemeta|rand|read|readdir|' - r'readline|readlink|readpipe|recv|redo|ref|rename|require|' - r'reverse|rewinddir|rindex|rmdir|scalar|seek|seekdir|' - r'select|semctl|semget|semop|send|setgrent|sethostent|setnetent|' - r'setpgrp|setpriority|setprotoent|setpwent|setservent|' - r'setsockopt|shift|shmctl|shmget|shmread|shmwrite|shutdown|' - r'sin|sleep|socket|socketpair|sort|splice|split|sprintf|sqrt|' - r'srand|stat|study|substr|symlink|syscall|sysopen|sysread|' - r'sysseek|system|syswrite|tell|telldir|tie|tied|time|times|tr|' - r'truncate|uc|ucfirst|umask|undef|unlink|unpack|unshift|untie|' - r'utime|values|vec|wait|waitpid|wantarray|warn|write' - r')\b', Name.Builtin), + (words(( + 'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir', + 'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect', + 'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die', + 'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent', + 'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl', + 'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid', + 'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin', + 'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp', + 'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber', + 'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname', + 'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime', + 'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last', + 'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat', + 'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'no', 'oct', 'open', + 'opendir', 'ord', 'our', 'pack', 'package', 'pipe', 'pop', 'pos', 'printf', + 'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir', + 'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename', 'require', + 'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir', + 'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent', + 'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent', + 'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown', + 'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt', + 'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread', + 'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr', + 'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie', + 'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'), + Name.Builtin), (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo), (r'<<([\'"]?)([a-zA-Z_]\w*)\1;?\n.*?\n\2\n', String), (r'__END__', Comment.Preproc, 'end-part'), diff --git a/pygments/lexers/ruby.py b/pygments/lexers/ruby.py index 291674d0..eadb670d 100644 --- a/pygments/lexers/ruby.py +++ b/pygments/lexers/ruby.py @@ -12,7 +12,7 @@ import re from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \ - bygroups, default, LexerContext, do_insertions + bygroups, default, LexerContext, do_insertions, words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error, Generic from pygments.util import shebang_matches @@ -186,10 +186,12 @@ class RubyLexer(ExtendedRegexLexer): (r'#.*?$', Comment.Single), (r'=begin\s.*?\n=end.*?$', Comment.Multiline), # keywords - (r'(BEGIN|END|alias|begin|break|case|defined\?|' - r'do|else|elsif|end|ensure|for|if|in|next|redo|' - r'rescue|raise|retry|return|super|then|undef|unless|until|when|' - r'while|yield)\b', Keyword), + (words(( + 'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?', + 'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo', + 'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef', + 'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'), + Keyword), # start of function, class and module names (r'(module)(\s+)([a-zA-Z_]\w*' r'(?:::[a-zA-Z_]\w*)*)', @@ -198,37 +200,43 @@ class RubyLexer(ExtendedRegexLexer): (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), # special methods - (r'(initialize|new|loop|include|extend|raise|attr_reader|' - r'attr_writer|attr_accessor|attr|catch|throw|private|' - r'module_function|public|protected|true|false|nil)\b', + (words(( + 'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader', + 'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private', + 'module_function', 'public', 'protected', 'true', 'false', 'nil'), + suffix=r'\b'), Keyword.Pseudo), (r'(not|and|or)\b', Operator.Word), - (r'(autoload|block_given|const_defined|eql|equal|frozen|include|' - r'instance_of|is_a|iterator|kind_of|method_defined|nil|' - r'private_method_defined|protected_method_defined|' - r'public_method_defined|respond_to|tainted)\?', Name.Builtin), + (words(( + 'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include', + 'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil', + 'private_method_defined', 'protected_method_defined', + 'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'), + Name.Builtin), (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin), - (r'(?<%])*:', Name.Function), # operators, must be below functions diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index d5ec2405..2d8a58a4 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -11,7 +11,8 @@ import re -from pygments.lexer import RegexLexer, include, bygroups, default, combined +from pygments.lexer import RegexLexer, include, bygroups, default, combined, \ + words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation from pygments.util import get_bool_opt, get_list_opt, iteritems @@ -174,9 +175,12 @@ class MoonScriptLexer(LuaLexer): (r'[;,]', Punctuation), (r'[\[\]\{\}\(\)]', Keyword.Type), (r'[a-zA-Z_]\w*:', Name.Variable), - (r"(class|extends|if|then|super|do|with|import|export|" - r"while|elseif|return|for|in|from|when|using|else|" - r"and|or|not|switch|break)\b", Keyword), + (words(( + 'class', 'extends', 'if', 'then', 'super', 'do', 'with', + 'import', 'export', 'while', 'elseif', 'return', 'for', 'in', + 'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch', + 'break'), suffix=r'\b'), + Keyword), (r'(true|false|nil)\b', Keyword.Constant), (r'(and|or|not)\b', Operator.Word), (r'(self)\b', Name.Builtin.Pseudo), -- cgit v1.2.1 From d1a7453489a4c29d3613e738ecec83bee816d3f2 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 18:48:02 +0200 Subject: reorganization of other.py, part 1 --- CHANGES | 1 + pygments/lexers/_mapping.py | 40 +- pygments/lexers/_robotframeworklexer.py | 558 -------- pygments/lexers/business.py | 479 +++++++ pygments/lexers/cobol.py | 231 ---- pygments/lexers/compiled.py | 2 +- pygments/lexers/configs.py | 105 ++ pygments/lexers/console.py | 41 + pygments/lexers/esoteric.py | 82 ++ pygments/lexers/graphics.py | 484 ++++++- pygments/lexers/misc/rebol.py | 246 ++++ pygments/lexers/misc/snobol.py | 83 ++ pygments/lexers/modeling.py | 114 ++ pygments/lexers/other.py | 2193 ++----------------------------- pygments/lexers/prolog.py | 221 +++- pygments/lexers/robotframework.py | 560 ++++++++ pygments/lexers/scripting.py | 412 +++++- pygments/lexers/testing.py | 135 ++ 18 files changed, 3074 insertions(+), 2913 deletions(-) delete mode 100644 pygments/lexers/_robotframeworklexer.py create mode 100644 pygments/lexers/business.py delete mode 100644 pygments/lexers/cobol.py create mode 100644 pygments/lexers/configs.py create mode 100644 pygments/lexers/console.py create mode 100644 pygments/lexers/esoteric.py create mode 100644 pygments/lexers/misc/rebol.py create mode 100644 pygments/lexers/misc/snobol.py create mode 100644 pygments/lexers/modeling.py create mode 100644 pygments/lexers/robotframework.py create mode 100644 pygments/lexers/testing.py diff --git a/CHANGES b/CHANGES index 08c50ee2..8db78964 100644 --- a/CHANGES +++ b/CHANGES @@ -42,6 +42,7 @@ Version 2.0 * MQL (PR#285) * APL (#969) * Nit (PR#375) + * LSL (PR#296) - Added a helper to "optimize" regular expressions that match one of many literal words; this can save 20% and more lexing time with lexers that diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 068edc4f..fee4096d 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -16,7 +16,7 @@ from __future__ import print_function LEXERS = { - 'ABAPLexer': ('pygments.lexers.other', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)), + 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)), 'APLLexer': ('pygments.lexers.other', 'APL', ('apl',), ('*.apl',), ()), 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), @@ -34,9 +34,9 @@ LEXERS = { 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), 'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), - 'AppleScriptLexer': ('pygments.lexers.other', 'AppleScript', ('applescript',), ('*.applescript',), ()), + 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), - 'AsymptoteLexer': ('pygments.lexers.other', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), + 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), 'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), @@ -45,11 +45,11 @@ LEXERS = { 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')), 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), - 'BefungeLexer': ('pygments.lexers.other', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), + 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), 'BlitzBasicLexer': ('pygments.lexers.misc.blitz', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), 'BlitzMaxLexer': ('pygments.lexers.misc.blitz', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), - 'BrainfuckLexer': ('pygments.lexers.other', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), + 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), 'CLexer': ('pygments.lexers.c_like.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), @@ -71,8 +71,8 @@ LEXERS = { 'ClayLexer': ('pygments.lexers.c_like.other', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), - 'CobolFreeformatLexer': ('pygments.lexers.cobol', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), - 'CobolLexer': ('pygments.lexers.cobol', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), + 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), + 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), @@ -132,8 +132,8 @@ LEXERS = { 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), 'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), - 'GherkinLexer': ('pygments.lexers.other', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), - 'GnuplotLexer': ('pygments.lexers.other', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), + 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), + 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), 'GoLexer': ('pygments.lexers.c_like.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), 'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), @@ -180,10 +180,10 @@ LEXERS = { 'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()), 'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), 'KalLexer': ('pygments.lexers.web', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), - 'KconfigLexer': ('pygments.lexers.other', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), + 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), 'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), - 'LSLLexer': ('pygments.lexers.other', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), + 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), @@ -199,7 +199,7 @@ LEXERS = { 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), - 'LogtalkLexer': ('pygments.lexers.other', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), + 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), @@ -215,7 +215,7 @@ LEXERS = { 'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), 'MatlabSessionLexer': ('pygments.lexers.math', 'Matlab session', ('matlabsession',), (), ()), 'MiniDLexer': ('pygments.lexers.c_like.d', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)), - 'ModelicaLexer': ('pygments.lexers.other', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), + 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), 'MonkeyLexer': ('pygments.lexers.misc.blitz', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), @@ -250,7 +250,7 @@ LEXERS = { 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)), 'OocLexer': ('pygments.lexers.misc.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), - 'OpenEdgeLexer': ('pygments.lexers.other', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), + 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), @@ -259,10 +259,10 @@ LEXERS = { 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like.other', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), - 'PostScriptLexer': ('pygments.lexers.other', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), + 'PostScriptLexer': ('pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)), 'PostgresConsoleLexer': ('pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), 'PostgresLexer': ('pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), - 'PovrayLexer': ('pygments.lexers.other', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), + 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), @@ -289,13 +289,13 @@ LEXERS = { 'RagelRubyLexer': ('pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()), 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), 'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), - 'RebolLexer': ('pygments.lexers.other', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), + 'RebolLexer': ('pygments.lexers.misc.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), 'RedLexer': ('pygments.lexers.other', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()), 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), 'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), - 'RobotFrameworkLexer': ('pygments.lexers.other', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), + 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), @@ -315,7 +315,7 @@ LEXERS = { 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), - 'SnobolLexer': ('pygments.lexers.other', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), + 'SnobolLexer': ('pygments.lexers.misc.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), 'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), @@ -336,7 +336,7 @@ LEXERS = { 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), - 'VCTreeStatusLexer': ('pygments.lexers.other', 'VCTreeStatus', ('vctreestatus',), (), ()), + 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), 'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()), 'ValaLexer': ('pygments.lexers.c_like.other', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), diff --git a/pygments/lexers/_robotframeworklexer.py b/pygments/lexers/_robotframeworklexer.py deleted file mode 100644 index 2889e1b8..00000000 --- a/pygments/lexers/_robotframeworklexer.py +++ /dev/null @@ -1,558 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers._robotframeworklexer - ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Lexer for Robot Framework. - - :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -# Copyright 2012 Nokia Siemens Networks Oyj -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import re - -from pygments.lexer import Lexer -from pygments.token import Token -from pygments.util import text_type - - -HEADING = Token.Generic.Heading -SETTING = Token.Keyword.Namespace -IMPORT = Token.Name.Namespace -TC_KW_NAME = Token.Generic.Subheading -KEYWORD = Token.Name.Function -ARGUMENT = Token.String -VARIABLE = Token.Name.Variable -COMMENT = Token.Comment -SEPARATOR = Token.Punctuation -SYNTAX = Token.Punctuation -GHERKIN = Token.Generic.Emph -ERROR = Token.Error - - -def normalize(string, remove=''): - string = string.lower() - for char in remove + ' ': - if char in string: - string = string.replace(char, '') - return string - - -class RobotFrameworkLexer(Lexer): - """ - For `Robot Framework `_ test data. - - Supports both space and pipe separated plain text formats. - - .. versionadded:: 1.6 - """ - name = 'RobotFramework' - aliases = ['robotframework'] - filenames = ['*.txt', '*.robot'] - mimetypes = ['text/x-robotframework'] - - def __init__(self, **options): - options['tabsize'] = 2 - options['encoding'] = 'UTF-8' - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - row_tokenizer = RowTokenizer() - var_tokenizer = VariableTokenizer() - index = 0 - for row in text.splitlines(): - for value, token in row_tokenizer.tokenize(row): - for value, token in var_tokenizer.tokenize(value, token): - if value: - yield index, token, text_type(value) - index += len(value) - - -class VariableTokenizer(object): - - def tokenize(self, string, token): - var = VariableSplitter(string, identifiers='$@%') - if var.start < 0 or token in (COMMENT, ERROR): - yield string, token - return - for value, token in self._tokenize(var, string, token): - if value: - yield value, token - - def _tokenize(self, var, string, orig_token): - before = string[:var.start] - yield before, orig_token - yield var.identifier + '{', SYNTAX - for value, token in self.tokenize(var.base, VARIABLE): - yield value, token - yield '}', SYNTAX - if var.index: - yield '[', SYNTAX - for value, token in self.tokenize(var.index, VARIABLE): - yield value, token - yield ']', SYNTAX - for value, token in self.tokenize(string[var.end:], orig_token): - yield value, token - - -class RowTokenizer(object): - - def __init__(self): - self._table = UnknownTable() - self._splitter = RowSplitter() - testcases = TestCaseTable() - settings = SettingTable(testcases.set_default_template) - variables = VariableTable() - keywords = KeywordTable() - self._tables = {'settings': settings, 'setting': settings, - 'metadata': settings, - 'variables': variables, 'variable': variables, - 'testcases': testcases, 'testcase': testcases, - 'keywords': keywords, 'keyword': keywords, - 'userkeywords': keywords, 'userkeyword': keywords} - - def tokenize(self, row): - commented = False - heading = False - for index, value in enumerate(self._splitter.split(row)): - # First value, and every second after that, is a separator. - index, separator = divmod(index-1, 2) - if value.startswith('#'): - commented = True - elif index == 0 and value.startswith('*'): - self._table = self._start_table(value) - heading = True - for value, token in self._tokenize(value, index, commented, - separator, heading): - yield value, token - self._table.end_row() - - def _start_table(self, header): - name = normalize(header, remove='*') - return self._tables.get(name, UnknownTable()) - - def _tokenize(self, value, index, commented, separator, heading): - if commented: - yield value, COMMENT - elif separator: - yield value, SEPARATOR - elif heading: - yield value, HEADING - else: - for value, token in self._table.tokenize(value, index): - yield value, token - - -class RowSplitter(object): - _space_splitter = re.compile('( {2,})') - _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))') - - def split(self, row): - splitter = (row.startswith('| ') and self._split_from_pipes - or self._split_from_spaces) - for value in splitter(row): - yield value - yield '\n' - - def _split_from_spaces(self, row): - yield '' # Start with (pseudo)separator similarly as with pipes - for value in self._space_splitter.split(row): - yield value - - def _split_from_pipes(self, row): - _, separator, rest = self._pipe_splitter.split(row, 1) - yield separator - while self._pipe_splitter.search(rest): - cell, separator, rest = self._pipe_splitter.split(rest, 1) - yield cell - yield separator - yield rest - - -class Tokenizer(object): - _tokens = None - - def __init__(self): - self._index = 0 - - def tokenize(self, value): - values_and_tokens = self._tokenize(value, self._index) - self._index += 1 - if isinstance(values_and_tokens, type(Token)): - values_and_tokens = [(value, values_and_tokens)] - return values_and_tokens - - def _tokenize(self, value, index): - index = min(index, len(self._tokens) - 1) - return self._tokens[index] - - def _is_assign(self, value): - if value.endswith('='): - value = value[:-1].strip() - var = VariableSplitter(value, identifiers='$@') - return var.start == 0 and var.end == len(value) - - -class Comment(Tokenizer): - _tokens = (COMMENT,) - - -class Setting(Tokenizer): - _tokens = (SETTING, ARGUMENT) - _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown', - 'suitepostcondition', 'testsetup', 'testprecondition', - 'testteardown', 'testpostcondition', 'testtemplate') - _import_settings = ('library', 'resource', 'variables') - _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags', - 'testtimeout') - _custom_tokenizer = None - - def __init__(self, template_setter=None): - Tokenizer.__init__(self) - self._template_setter = template_setter - - def _tokenize(self, value, index): - if index == 1 and self._template_setter: - self._template_setter(value) - if index == 0: - normalized = normalize(value) - if normalized in self._keyword_settings: - self._custom_tokenizer = KeywordCall(support_assign=False) - elif normalized in self._import_settings: - self._custom_tokenizer = ImportSetting() - elif normalized not in self._other_settings: - return ERROR - elif self._custom_tokenizer: - return self._custom_tokenizer.tokenize(value) - return Tokenizer._tokenize(self, value, index) - - -class ImportSetting(Tokenizer): - _tokens = (IMPORT, ARGUMENT) - - -class TestCaseSetting(Setting): - _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition', - 'template') - _import_settings = () - _other_settings = ('documentation', 'tags', 'timeout') - - def _tokenize(self, value, index): - if index == 0: - type = Setting._tokenize(self, value[1:-1], index) - return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)] - return Setting._tokenize(self, value, index) - - -class KeywordSetting(TestCaseSetting): - _keyword_settings = ('teardown',) - _other_settings = ('documentation', 'arguments', 'return', 'timeout') - - -class Variable(Tokenizer): - _tokens = (SYNTAX, ARGUMENT) - - def _tokenize(self, value, index): - if index == 0 and not self._is_assign(value): - return ERROR - return Tokenizer._tokenize(self, value, index) - - -class KeywordCall(Tokenizer): - _tokens = (KEYWORD, ARGUMENT) - - def __init__(self, support_assign=True): - Tokenizer.__init__(self) - self._keyword_found = not support_assign - self._assigns = 0 - - def _tokenize(self, value, index): - if not self._keyword_found and self._is_assign(value): - self._assigns += 1 - return SYNTAX # VariableTokenizer tokenizes this later. - if self._keyword_found: - return Tokenizer._tokenize(self, value, index - self._assigns) - self._keyword_found = True - return GherkinTokenizer().tokenize(value, KEYWORD) - - -class GherkinTokenizer(object): - _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE) - - def tokenize(self, value, token): - match = self._gherkin_prefix.match(value) - if not match: - return [(value, token)] - end = match.end() - return [(value[:end], GHERKIN), (value[end:], token)] - - -class TemplatedKeywordCall(Tokenizer): - _tokens = (ARGUMENT,) - - -class ForLoop(Tokenizer): - - def __init__(self): - Tokenizer.__init__(self) - self._in_arguments = False - - def _tokenize(self, value, index): - token = self._in_arguments and ARGUMENT or SYNTAX - if value.upper() in ('IN', 'IN RANGE'): - self._in_arguments = True - return token - - -class _Table(object): - _tokenizer_class = None - - def __init__(self, prev_tokenizer=None): - self._tokenizer = self._tokenizer_class() - self._prev_tokenizer = prev_tokenizer - self._prev_values_on_row = [] - - def tokenize(self, value, index): - if self._continues(value, index): - self._tokenizer = self._prev_tokenizer - yield value, SYNTAX - else: - for value_and_token in self._tokenize(value, index): - yield value_and_token - self._prev_values_on_row.append(value) - - def _continues(self, value, index): - return value == '...' and all(self._is_empty(t) - for t in self._prev_values_on_row) - - def _is_empty(self, value): - return value in ('', '\\') - - def _tokenize(self, value, index): - return self._tokenizer.tokenize(value) - - def end_row(self): - self.__init__(prev_tokenizer=self._tokenizer) - - -class UnknownTable(_Table): - _tokenizer_class = Comment - - def _continues(self, value, index): - return False - - -class VariableTable(_Table): - _tokenizer_class = Variable - - -class SettingTable(_Table): - _tokenizer_class = Setting - - def __init__(self, template_setter, prev_tokenizer=None): - _Table.__init__(self, prev_tokenizer) - self._template_setter = template_setter - - def _tokenize(self, value, index): - if index == 0 and normalize(value) == 'testtemplate': - self._tokenizer = Setting(self._template_setter) - return _Table._tokenize(self, value, index) - - def end_row(self): - self.__init__(self._template_setter, prev_tokenizer=self._tokenizer) - - -class TestCaseTable(_Table): - _setting_class = TestCaseSetting - _test_template = None - _default_template = None - - @property - def _tokenizer_class(self): - if self._test_template or (self._default_template and - self._test_template is not False): - return TemplatedKeywordCall - return KeywordCall - - def _continues(self, value, index): - return index > 0 and _Table._continues(self, value, index) - - def _tokenize(self, value, index): - if index == 0: - if value: - self._test_template = None - return GherkinTokenizer().tokenize(value, TC_KW_NAME) - if index == 1 and self._is_setting(value): - if self._is_template(value): - self._test_template = False - self._tokenizer = self._setting_class(self.set_test_template) - else: - self._tokenizer = self._setting_class() - if index == 1 and self._is_for_loop(value): - self._tokenizer = ForLoop() - if index == 1 and self._is_empty(value): - return [(value, SYNTAX)] - return _Table._tokenize(self, value, index) - - def _is_setting(self, value): - return value.startswith('[') and value.endswith(']') - - def _is_template(self, value): - return normalize(value) == '[template]' - - def _is_for_loop(self, value): - return value.startswith(':') and normalize(value, remove=':') == 'for' - - def set_test_template(self, template): - self._test_template = self._is_template_set(template) - - def set_default_template(self, template): - self._default_template = self._is_template_set(template) - - def _is_template_set(self, template): - return normalize(template) not in ('', '\\', 'none', '${empty}') - - -class KeywordTable(TestCaseTable): - _tokenizer_class = KeywordCall - _setting_class = KeywordSetting - - def _is_template(self, value): - return False - - -# Following code copied directly from Robot Framework 2.7.5. - -class VariableSplitter: - - def __init__(self, string, identifiers): - self.identifier = None - self.base = None - self.index = None - self.start = -1 - self.end = -1 - self._identifiers = identifiers - self._may_have_internal_variables = False - try: - self._split(string) - except ValueError: - pass - else: - self._finalize() - - def get_replaced_base(self, variables): - if self._may_have_internal_variables: - return variables.replace_string(self.base) - return self.base - - def _finalize(self): - self.identifier = self._variable_chars[0] - self.base = ''.join(self._variable_chars[2:-1]) - self.end = self.start + len(self._variable_chars) - if self._has_list_variable_index(): - self.index = ''.join(self._list_variable_index_chars[1:-1]) - self.end += len(self._list_variable_index_chars) - - def _has_list_variable_index(self): - return self._list_variable_index_chars\ - and self._list_variable_index_chars[-1] == ']' - - def _split(self, string): - start_index, max_index = self._find_variable(string) - self.start = start_index - self._open_curly = 1 - self._state = self._variable_state - self._variable_chars = [string[start_index], '{'] - self._list_variable_index_chars = [] - self._string = string - start_index += 2 - for index, char in enumerate(string[start_index:]): - index += start_index # Giving start to enumerate only in Py 2.6+ - try: - self._state(char, index) - except StopIteration: - return - if index == max_index and not self._scanning_list_variable_index(): - return - - def _scanning_list_variable_index(self): - return self._state in [self._waiting_list_variable_index_state, - self._list_variable_index_state] - - def _find_variable(self, string): - max_end_index = string.rfind('}') - if max_end_index == -1: - raise ValueError('No variable end found') - if self._is_escaped(string, max_end_index): - return self._find_variable(string[:max_end_index]) - start_index = self._find_start_index(string, 1, max_end_index) - if start_index == -1: - raise ValueError('No variable start found') - return start_index, max_end_index - - def _find_start_index(self, string, start, end): - index = string.find('{', start, end) - 1 - if index < 0: - return -1 - if self._start_index_is_ok(string, index): - return index - return self._find_start_index(string, index+2, end) - - def _start_index_is_ok(self, string, index): - return string[index] in self._identifiers\ - and not self._is_escaped(string, index) - - def _is_escaped(self, string, index): - escaped = False - while index > 0 and string[index-1] == '\\': - index -= 1 - escaped = not escaped - return escaped - - def _variable_state(self, char, index): - self._variable_chars.append(char) - if char == '}' and not self._is_escaped(self._string, index): - self._open_curly -= 1 - if self._open_curly == 0: - if not self._is_list_variable(): - raise StopIteration - self._state = self._waiting_list_variable_index_state - elif char in self._identifiers: - self._state = self._internal_variable_start_state - - def _is_list_variable(self): - return self._variable_chars[0] == '@' - - def _internal_variable_start_state(self, char, index): - self._state = self._variable_state - if char == '{': - self._variable_chars.append(char) - self._open_curly += 1 - self._may_have_internal_variables = True - else: - self._variable_state(char, index) - - def _waiting_list_variable_index_state(self, char, index): - if char != '[': - raise StopIteration - self._list_variable_index_chars.append(char) - self._state = self._list_variable_index_state - - def _list_variable_index_state(self, char, index): - self._list_variable_index_chars.append(char) - if char == ']': - raise StopIteration diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py new file mode 100644 index 00000000..01a15eaa --- /dev/null +++ b/pygments/lexers/business.py @@ -0,0 +1,479 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.business + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for "business-oriented" languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, words, bygroups +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS + +__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer'] + + +class CobolLexer(RegexLexer): + """ + Lexer for OpenCOBOL code. + + .. versionadded:: 1.6 + """ + name = 'COBOL' + aliases = ['cobol'] + filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY'] + mimetypes = ['text/x-cobol'] + flags = re.IGNORECASE | re.MULTILINE + + # Data Types: by PICTURE and USAGE + # Operators: **, *, +, -, /, <, >, <=, >=, =, <> + # Logical (?): NOT, AND, OR + + # Reserved words: + # http://opencobol.add1tocobol.com/#reserved-words + # Intrinsics: + # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions + + tokens = { + 'root': [ + include('comment'), + include('strings'), + include('core'), + include('nums'), + (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable), + # (r'[\s]+', Text), + (r'[ \t]+', Text), + ], + 'comment': [ + (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment), + ], + 'core': [ + # Figurative constants + (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?' + r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)' + r'\s*($|(?=[^0-9a-z_\-]))', + Name.Constant), + + # Reserved words STATEMENTS and other bolds + (words(( + 'ACCEPT', 'ADD', 'ALLOCATE', 'CALL', 'CANCEL', 'CLOSE', 'COMPUTE', + 'CONFIGURATION', 'CONTINUE', 'DATA', 'DELETE', 'DISPLAY', 'DIVIDE', + 'DIVISION', 'ELSE', 'END', 'END-ACCEPT', + 'END-ADD', 'END-CALL', 'END-COMPUTE', 'END-DELETE', 'END-DISPLAY', + 'END-DIVIDE', 'END-EVALUATE', 'END-IF', 'END-MULTIPLY', 'END-OF-PAGE', + 'END-PERFORM', 'END-READ', 'END-RETURN', 'END-REWRITE', 'END-SEARCH', + 'END-START', 'END-STRING', 'END-SUBTRACT', 'END-UNSTRING', 'END-WRITE', + 'ENVIRONMENT', 'EVALUATE', 'EXIT', 'FD', 'FILE', 'FILE-CONTROL', 'FOREVER', + 'FREE', 'GENERATE', 'GO', 'GOBACK', 'IDENTIFICATION', 'IF', 'INITIALIZE', + 'INITIATE', 'INPUT-OUTPUT', 'INSPECT', 'INVOKE', 'I-O-CONTROL', 'LINKAGE', + 'LOCAL-STORAGE', 'MERGE', 'MOVE', 'MULTIPLY', 'OPEN', 'PERFORM', + 'PROCEDURE', 'PROGRAM-ID', 'RAISE', 'READ', 'RELEASE', 'RESUME', + 'RETURN', 'REWRITE', 'SCREEN', 'SD', 'SEARCH', 'SECTION', 'SET', + 'SORT', 'START', 'STOP', 'STRING', 'SUBTRACT', 'SUPPRESS', + 'TERMINATE', 'THEN', 'UNLOCK', 'UNSTRING', 'USE', 'VALIDATE', + 'WORKING-STORAGE', 'WRITE'), prefix=r'(^|(?<=[^0-9a-z_\-]))', + suffix=r'\s*($|(?=[^0-9a-z_\-]))'), + Keyword.Reserved), + + # Reserved words + (words(( + 'ACCESS', 'ADDRESS', 'ADVANCING', 'AFTER', 'ALL', + 'ALPHABET', 'ALPHABETIC', 'ALPHABETIC-LOWER', 'ALPHABETIC-UPPER', + 'ALPHANUMERIC', 'ALPHANUMERIC-EDITED', 'ALSO', 'ALTER', 'ALTERNATE' + 'ANY', 'ARE', 'AREA', 'AREAS', 'ARGUMENT-NUMBER', 'ARGUMENT-VALUE', 'AS', + 'ASCENDING', 'ASSIGN', 'AT', 'AUTO', 'AUTO-SKIP', 'AUTOMATIC', 'AUTOTERMINATE', + 'BACKGROUND-COLOR', 'BASED', 'BEEP', 'BEFORE', 'BELL', + 'BLANK', 'BLINK', 'BLOCK', 'BOTTOM', 'BY', 'BYTE-LENGTH', 'CHAINING', + 'CHARACTER', 'CHARACTERS', 'CLASS', 'CODE', 'CODE-SET', 'COL', 'COLLATING', + 'COLS', 'COLUMN', 'COLUMNS', 'COMMA', 'COMMAND-LINE', 'COMMIT', 'COMMON', + 'CONSTANT', 'CONTAINS', 'CONTENT', 'CONTROL', + 'CONTROLS', 'CONVERTING', 'COPY', 'CORR', 'CORRESPONDING', 'COUNT', 'CRT', + 'CURRENCY', 'CURSOR', 'CYCLE', 'DATE', 'DAY', 'DAY-OF-WEEK', 'DE', 'DEBUGGING', + 'DECIMAL-POINT', 'DECLARATIVES', 'DEFAULT', 'DELIMITED', + 'DELIMITER', 'DEPENDING', 'DESCENDING', 'DETAIL', 'DISK', + 'DOWN', 'DUPLICATES', 'DYNAMIC', 'EBCDIC', + 'ENTRY', 'ENVIRONMENT-NAME', 'ENVIRONMENT-VALUE', 'EOL', 'EOP', + 'EOS', 'ERASE', 'ERROR', 'ESCAPE', 'EXCEPTION', + 'EXCLUSIVE', 'EXTEND', 'EXTERNAL', + 'FILE-ID', 'FILLER', 'FINAL', 'FIRST', 'FIXED', 'FLOAT-LONG', 'FLOAT-SHORT', + 'FOOTING', 'FOR', 'FOREGROUND-COLOR', 'FORMAT', 'FROM', 'FULL', 'FUNCTION', + 'FUNCTION-ID', 'GIVING', 'GLOBAL', 'GROUP', + 'HEADING', 'HIGHLIGHT', 'I-O', 'ID', + 'IGNORE', 'IGNORING', 'IN', 'INDEX', 'INDEXED', 'INDICATE', + 'INITIAL', 'INITIALIZED', 'INPUT', + 'INTO', 'INTRINSIC', 'INVALID', 'IS', 'JUST', 'JUSTIFIED', 'KEY', 'LABEL', + 'LAST', 'LEADING', 'LEFT', 'LENGTH', 'LIMIT', 'LIMITS', 'LINAGE', + 'LINAGE-COUNTER', 'LINE', 'LINES', 'LOCALE', 'LOCK', + 'LOWLIGHT', 'MANUAL', 'MEMORY', 'MINUS', 'MODE', + 'MULTIPLE', 'NATIONAL', 'NATIONAL-EDITED', 'NATIVE', + 'NEGATIVE', 'NEXT', 'NO', 'NULL', 'NULLS', 'NUMBER', 'NUMBERS', 'NUMERIC', + 'NUMERIC-EDITED', 'OBJECT-COMPUTER', 'OCCURS', 'OF', 'OFF', 'OMITTED', 'ON', 'ONLY', + 'OPTIONAL', 'ORDER', 'ORGANIZATION', 'OTHER', 'OUTPUT', 'OVERFLOW', + 'OVERLINE', 'PACKED-DECIMAL', 'PADDING', 'PAGE', 'PARAGRAPH', + 'PLUS', 'POINTER', 'POSITION', 'POSITIVE', 'PRESENT', 'PREVIOUS', + 'PRINTER', 'PRINTING', 'PROCEDURE-POINTER', 'PROCEDURES', + 'PROCEED', 'PROGRAM', 'PROGRAM-POINTER', 'PROMPT', 'QUOTE', + 'QUOTES', 'RANDOM', 'RD', 'RECORD', 'RECORDING', 'RECORDS', 'RECURSIVE', + 'REDEFINES', 'REEL', 'REFERENCE', 'RELATIVE', 'REMAINDER', 'REMOVAL', + 'RENAMES', 'REPLACING', 'REPORT', 'REPORTING', 'REPORTS', 'REPOSITORY', + 'REQUIRED', 'RESERVE', 'RETURNING', 'REVERSE-VIDEO', 'REWIND', + 'RIGHT', 'ROLLBACK', 'ROUNDED', 'RUN', 'SAME', 'SCROLL', + 'SECURE', 'SEGMENT-LIMIT', 'SELECT', 'SENTENCE', 'SEPARATE', + 'SEQUENCE', 'SEQUENTIAL', 'SHARING', 'SIGN', 'SIGNED', 'SIGNED-INT', + 'SIGNED-LONG', 'SIGNED-SHORT', 'SIZE', 'SORT-MERGE', 'SOURCE', + 'SOURCE-COMPUTER', 'SPECIAL-NAMES', 'STANDARD', + 'STANDARD-1', 'STANDARD-2', 'STATUS', 'SUM', + 'SYMBOLIC', 'SYNC', 'SYNCHRONIZED', 'TALLYING', 'TAPE', + 'TEST', 'THROUGH', 'THRU', 'TIME', 'TIMES', 'TO', 'TOP', 'TRAILING', + 'TRANSFORM', 'TYPE', 'UNDERLINE', 'UNIT', 'UNSIGNED', + 'UNSIGNED-INT', 'UNSIGNED-LONG', 'UNSIGNED-SHORT', 'UNTIL', 'UP', + 'UPDATE', 'UPON', 'USAGE', 'USING', 'VALUE', 'VALUES', 'VARYING', + 'WAIT', 'WHEN', 'WITH', 'WORDS', 'YYYYDDD', 'YYYYMMDD'), + prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'), + Keyword.Pseudo), + + # inactive reserved words + (words(( + 'ACTIVE-CLASS', 'ALIGNED', 'ANYCASE', 'ARITHMETIC', 'ATTRIBUTE', 'B-AND', + 'B-NOT', 'B-OR', 'B-XOR', 'BIT', 'BOOLEAN', 'CD', 'CENTER', 'CF', 'CH', 'CHAIN', 'CLASS-ID', + 'CLASSIFICATION', 'COMMUNICATION', 'CONDITION', 'DATA-POINTER', + 'DESTINATION', 'DISABLE', 'EC', 'EGI', 'EMI', 'ENABLE', 'END-RECEIVE', + 'ENTRY-CONVENTION', 'EO', 'ESI', 'EXCEPTION-OBJECT', 'EXPANDS', 'FACTORY', + 'FLOAT-BINARY-16', 'FLOAT-BINARY-34', 'FLOAT-BINARY-7', + 'FLOAT-DECIMAL-16', 'FLOAT-DECIMAL-34', 'FLOAT-EXTENDED', 'FORMAT', + 'FUNCTION-POINTER', 'GET', 'GROUP-USAGE', 'IMPLEMENTS', 'INFINITY', + 'INHERITS', 'INTERFACE', 'INTERFACE-ID', 'INVOKE', 'LC_ALL', 'LC_COLLATE', + 'LC_CTYPE', 'LC_MESSAGES', 'LC_MONETARY', 'LC_NUMERIC', 'LC_TIME', + 'LINE-COUNTER', 'MESSAGE', 'METHOD', 'METHOD-ID', 'NESTED', 'NONE', 'NORMAL', + 'OBJECT', 'OBJECT-REFERENCE', 'OPTIONS', 'OVERRIDE', 'PAGE-COUNTER', 'PF', 'PH', + 'PROPERTY', 'PROTOTYPE', 'PURGE', 'QUEUE', 'RAISE', 'RAISING', 'RECEIVE', + 'RELATION', 'REPLACE', 'REPRESENTS-NOT-A-NUMBER', 'RESET', 'RESUME', 'RETRY', + 'RF', 'RH', 'SECONDS', 'SEGMENT', 'SELF', 'SEND', 'SOURCES', 'STATEMENT', 'STEP', + 'STRONG', 'SUB-QUEUE-1', 'SUB-QUEUE-2', 'SUB-QUEUE-3', 'SUPER', 'SYMBOL', + 'SYSTEM-DEFAULT', 'TABLE', 'TERMINAL', 'TEXT', 'TYPEDEF', 'UCS-4', 'UNIVERSAL', + 'USER-DEFAULT', 'UTF-16', 'UTF-8', 'VAL-STATUS', 'VALID', 'VALIDATE', + 'VALIDATE-STATUS'), + prefix=r'(^|(?<=[^0-9a-z_\-]))', suffix=r'\s*($|(?=[^0-9a-z_\-]))'), + Error), + + # Data Types + (r'(^|(?<=[^0-9a-z_\-]))' + r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|' + r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|' + r'BINARY-C-LONG|' + r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|' + r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type), + + # Operators + (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator), + + # (r'(::)', Keyword.Declaration), + + (r'([(),;:&%.])', Punctuation), + + # Intrinsics + (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|' + r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|' + r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|' + r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|' + r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|' + r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|' + r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|' + r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|' + r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|' + r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|' + r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|' + r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*' + r'($|(?=[^0-9a-z_\-]))', Name.Function), + + # Booleans + (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin), + # Comparing Operators + (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|' + r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word), + ], + + # \"[^\"\n]*\"|\'[^\'\n]*\' + 'strings': [ + # apparently strings can be delimited by EOL if they are continued + # in the next line + (r'"[^"\n]*("|\n)', String.Double), + (r"'[^'\n]*('|\n)", String.Single), + ], + + 'nums': [ + (r'\d+(\s*|\.$|$)', Number.Integer), + (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), + (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), + ], + } + + +class CobolFreeformatLexer(CobolLexer): + """ + Lexer for Free format OpenCOBOL code. + + .. versionadded:: 1.6 + """ + name = 'COBOLFree' + aliases = ['cobolfree'] + filenames = ['*.cbl', '*.CBL'] + mimetypes = [] + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'comment': [ + (r'(\*>.*\n|^\w*\*.*$)', Comment), + ], + } + + +class ABAPLexer(RegexLexer): + """ + Lexer for ABAP, SAP's integrated language. + + .. versionadded:: 1.1 + """ + name = 'ABAP' + aliases = ['abap'] + filenames = ['*.abap'] + mimetypes = ['text/x-abap'] + + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'common': [ + (r'\s+', Text), + (r'^\*.*$', Comment.Single), + (r'\".*?\n', Comment.Single), + ], + 'variable-names': [ + (r'<\S+>', Name.Variable), + (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable), + ], + 'root': [ + include('common'), + # function calls + (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)', + bygroups(Keyword, Text, Name.Function)), + (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|' + r'TRANSACTION|TRANSFORMATION))\b', + Keyword), + (r'(FORM|PERFORM)(\s+)(\w+)', + bygroups(Keyword, Text, Name.Function)), + (r'(PERFORM)(\s+)(\()(\w+)(\))', + bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation)), + (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)', + bygroups(Keyword, Text, Name.Function, Text, Keyword)), + + # method implementation + (r'(METHOD)(\s+)([\w~]+)', + bygroups(Keyword, Text, Name.Function)), + # method calls + (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)', + bygroups(Text, Name.Variable, Operator, Name.Function)), + # call methodnames returning style + (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function), + + # keywords with dashes in them. + # these need to be first, because for instance the -ID part + # of MESSAGE-ID wouldn't get highlighted if MESSAGE was + # first in the list of keywords. + (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|' + r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|' + r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|' + r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|' + r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|' + r'INTERFACE-POOL|INVERTED-DATE|' + r'LOAD-OF-PROGRAM|LOG-POINT|' + r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|' + r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|' + r'OUTPUT-LENGTH|PRINT-CONTROL|' + r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|' + r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|' + r'TYPE-POOL|TYPE-POOLS' + r')\b', Keyword), + + # keyword kombinations + (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|' + r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|' + r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|' + r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|' + r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|' + r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|' + r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|' + r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|' + r'RUN\s+TIME|TIME\s+(STAMP)?)?|' + r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|' + r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|' + r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|' + r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|' + r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|' + r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|' + r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|' + r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|' + r'DATABASE|SHARED\s+(MEMORY|BUFFER))|' + r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|' + r'FREE\s(MEMORY|OBJECT)?|' + r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|' + r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|' + r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|' + r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|' + r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|' + r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|' + r'SCREEN)|COMMENT|FUNCTION\s+KEY|' + r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|' + r'SKIP|ULINE)|' + r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|' + r'TO LIST-PROCESSING|TO TRANSACTION)' + r'(ENDING|STARTING)\s+AT|' + r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|' + r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|' + r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|' + r'(BEGIN|END)\s+OF|' + r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|' + r'COMPARING(\s+ALL\s+FIELDS)?|' + r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|' + r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|' + r'END-OF-(DEFINITION|PAGE|SELECTION)|' + r'WITH\s+FRAME(\s+TITLE)|' + + # simple kombinations + r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|' + r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|' + r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|' + r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|' + r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|' + r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|' + r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword), + + # single word keywords. + (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|' + r'ASSIGN(ING)?|AT(\s+FIRST)?|' + r'BACK|BLOCK|BREAK-POINT|' + r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|' + r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|' + r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|' + r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|' + r'DETAIL|DIRECTORY|DIVIDE|DO|' + r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|' + r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|' + r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|' + r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|' + r'HIDE|' + r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|' + r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|' + r'LENGTH|LINES|LOAD|LOCAL|' + r'JOIN|' + r'KEY|' + r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|' + r'NODES|' + r'OBLIGATORY|OF|OFF|ON|OVERLAY|' + r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|' + r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|' + r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|' + r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|' + r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|' + r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|' + r'ULINE|UNDER|UNPACK|UPDATE|USING|' + r'VALUE|VALUES|VIA|' + r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword), + + # builtins + (r'(abs|acos|asin|atan|' + r'boolc|boolx|bit_set|' + r'char_off|charlen|ceil|cmax|cmin|condense|contains|' + r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|' + r'count|count_any_of|count_any_not_of|' + r'dbmaxlen|distance|' + r'escape|exp|' + r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|' + r'insert|' + r'lines|log|log10|' + r'match|matches|' + r'nmax|nmin|numofchar|' + r'repeat|replace|rescale|reverse|round|' + r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|' + r'substring|substring_after|substring_from|substring_before|substring_to|' + r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|' + r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)), + + (r'&[0-9]', Name), + (r'[0-9]+', Number.Integer), + + # operators which look like variable names before + # parsing variable names. + (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|' + r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|' + r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator), + + include('variable-names'), + + # standard oparators after variable names, + # because < and > are part of field symbols. + (r'[?*<>=\-+]', Operator), + (r"'(''|[^'])*'", String.Single), + (r"`([^`])*`", String.Single), + (r'[/;:()\[\],\.]', Punctuation) + ], + } + + +class OpenEdgeLexer(RegexLexer): + """ + Lexer for `OpenEdge ABL (formerly Progress) + `_ source code. + + .. versionadded:: 1.5 + """ + name = 'OpenEdge ABL' + aliases = ['openedge', 'abl', 'progress'] + filenames = ['*.p', '*.cls'] + mimetypes = ['text/x-openedge', 'application/x-openedge'] + + types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|' + r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|' + r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|' + r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|' + r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))') + + keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(' + + r'|'.join(OPENEDGEKEYWORDS) + + r')\s*($|(?=[^0-9a-z_\-]))') + tokens = { + 'root': [ + (r'/\*', Comment.Multiline, 'comment'), + (r'\{', Comment.Preproc, 'preprocessor'), + (r'\s*&.*', Comment.Preproc), + (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration), + (types, Keyword.Type), + (keywords, Name.Builtin), + (r'"(\\\\|\\"|[^"])*"', String.Double), + (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\s+', Text), + (r'[+*/=-]', Operator), + (r'[.:()]', Punctuation), + (r'.', Name.Variable), # Lazy catch-all + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ], + 'preprocessor': [ + (r'[^{}]', Comment.Preproc), + (r'{', Comment.Preproc, '#push'), + (r'}', Comment.Preproc, '#pop'), + ], + } diff --git a/pygments/lexers/cobol.py b/pygments/lexers/cobol.py deleted file mode 100644 index 55c8e959..00000000 --- a/pygments/lexers/cobol.py +++ /dev/null @@ -1,231 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.cobol - ~~~~~~~~~~~~~~~~~~~~~ - - Lexers for COBOL languages. - - :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from pygments.lexer import RegexLexer, include -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Error - -__all__ = ['CobolLexer', 'CobolFreeformatLexer'] - - -class CobolLexer(RegexLexer): - """ - Lexer for OpenCOBOL code. - - .. versionadded:: 1.6 - """ - name = 'COBOL' - aliases = ['cobol'] - filenames = ['*.cob', '*.COB', '*.cpy', '*.CPY'] - mimetypes = ['text/x-cobol'] - flags = re.IGNORECASE | re.MULTILINE - - # Data Types: by PICTURE and USAGE - # Operators: **, *, +, -, /, <, >, <=, >=, =, <> - # Logical (?): NOT, AND, OR - - # Reserved words: - # http://opencobol.add1tocobol.com/#reserved-words - # Intrinsics: - # http://opencobol.add1tocobol.com/#does-opencobol-implement-any-intrinsic-functions - - tokens = { - 'root': [ - include('comment'), - include('strings'), - include('core'), - include('nums'), - (r'[a-z0-9]([_a-z0-9\-]*[a-z0-9]+)?', Name.Variable), - # (r'[\s]+', Text), - (r'[ \t]+', Text), - ], - 'comment': [ - (r'(^.{6}[*/].*\n|^.{6}|\*>.*\n)', Comment), - ], - 'core': [ - # Figurative constants - (r'(^|(?<=[^0-9a-z_\-]))(ALL\s+)?' - r'((ZEROES)|(HIGH-VALUE|LOW-VALUE|QUOTE|SPACE|ZERO)(S)?)' - r'\s*($|(?=[^0-9a-z_\-]))', - Name.Constant), - - # Reserved words STATEMENTS and other bolds - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACCEPT|ADD|ALLOCATE|CALL|CANCEL|CLOSE|COMPUTE|' - r'CONFIGURATION|CONTINUE|' - r'DATA|DELETE|DISPLAY|DIVIDE|DIVISION|ELSE|END|END-ACCEPT|' - r'END-ADD|END-CALL|END-COMPUTE|END-DELETE|END-DISPLAY|' - r'END-DIVIDE|END-EVALUATE|END-IF|END-MULTIPLY|END-OF-PAGE|' - r'END-PERFORM|END-READ|END-RETURN|END-REWRITE|END-SEARCH|' - r'END-START|END-STRING|END-SUBTRACT|END-UNSTRING|END-WRITE|' - r'ENVIRONMENT|EVALUATE|EXIT|FD|FILE|FILE-CONTROL|FOREVER|' - r'FREE|GENERATE|GO|GOBACK|' - r'IDENTIFICATION|IF|INITIALIZE|' - r'INITIATE|INPUT-OUTPUT|INSPECT|INVOKE|I-O-CONTROL|LINKAGE|' - r'LOCAL-STORAGE|MERGE|MOVE|MULTIPLY|OPEN|' - r'PERFORM|PROCEDURE|PROGRAM-ID|RAISE|READ|RELEASE|RESUME|' - r'RETURN|REWRITE|SCREEN|' - r'SD|SEARCH|SECTION|SET|SORT|START|STOP|STRING|SUBTRACT|' - r'SUPPRESS|TERMINATE|THEN|UNLOCK|UNSTRING|USE|VALIDATE|' - r'WORKING-STORAGE|WRITE)' - r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Reserved), - - # Reserved words - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACCESS|ADDRESS|ADVANCING|AFTER|ALL|' - r'ALPHABET|ALPHABETIC|ALPHABETIC-LOWER|ALPHABETIC-UPPER|' - r'ALPHANUMERIC|ALPHANUMERIC-EDITED|ALSO|ALTER|ALTERNATE' - r'ANY|ARE|AREA|AREAS|ARGUMENT-NUMBER|ARGUMENT-VALUE|AS|' - r'ASCENDING|ASSIGN|AT|AUTO|AUTO-SKIP|AUTOMATIC|AUTOTERMINATE|' - r'BACKGROUND-COLOR|BASED|BEEP|BEFORE|BELL|' - r'BLANK|' - r'BLINK|BLOCK|BOTTOM|BY|BYTE-LENGTH|CHAINING|' - r'CHARACTER|CHARACTERS|CLASS|CODE|CODE-SET|COL|COLLATING|' - r'COLS|COLUMN|COLUMNS|COMMA|COMMAND-LINE|COMMIT|COMMON|' - r'CONSTANT|CONTAINS|CONTENT|CONTROL|' - r'CONTROLS|CONVERTING|COPY|CORR|CORRESPONDING|COUNT|CRT|' - r'CURRENCY|CURSOR|CYCLE|DATE|DAY|DAY-OF-WEEK|DE|DEBUGGING|' - r'DECIMAL-POINT|DECLARATIVES|DEFAULT|DELIMITED|' - r'DELIMITER|DEPENDING|DESCENDING|DETAIL|DISK|' - r'DOWN|DUPLICATES|DYNAMIC|EBCDIC|' - r'ENTRY|ENVIRONMENT-NAME|ENVIRONMENT-VALUE|EOL|EOP|' - r'EOS|ERASE|ERROR|ESCAPE|EXCEPTION|' - r'EXCLUSIVE|EXTEND|EXTERNAL|' - r'FILE-ID|FILLER|FINAL|FIRST|FIXED|FLOAT-LONG|FLOAT-SHORT|' - r'FOOTING|FOR|FOREGROUND-COLOR|FORMAT|FROM|FULL|FUNCTION|' - r'FUNCTION-ID|GIVING|GLOBAL|GROUP|' - r'HEADING|HIGHLIGHT|I-O|ID|' - r'IGNORE|IGNORING|IN|INDEX|INDEXED|INDICATE|' - r'INITIAL|INITIALIZED|INPUT|' - r'INTO|INTRINSIC|INVALID|IS|JUST|JUSTIFIED|KEY|LABEL|' - r'LAST|LEADING|LEFT|LENGTH|LIMIT|LIMITS|LINAGE|' - r'LINAGE-COUNTER|LINE|LINES|LOCALE|LOCK|' - r'LOWLIGHT|MANUAL|MEMORY|MINUS|MODE|' - r'MULTIPLE|NATIONAL|NATIONAL-EDITED|NATIVE|' - r'NEGATIVE|NEXT|NO|NULL|NULLS|NUMBER|NUMBERS|NUMERIC|' - r'NUMERIC-EDITED|OBJECT-COMPUTER|OCCURS|OF|OFF|OMITTED|ON|ONLY|' - r'OPTIONAL|ORDER|ORGANIZATION|OTHER|OUTPUT|OVERFLOW|' - r'OVERLINE|PACKED-DECIMAL|PADDING|PAGE|PARAGRAPH|' - r'PLUS|POINTER|POSITION|POSITIVE|PRESENT|PREVIOUS|' - r'PRINTER|PRINTING|PROCEDURE-POINTER|PROCEDURES|' - r'PROCEED|PROGRAM|PROGRAM-POINTER|PROMPT|QUOTE|' - r'QUOTES|RANDOM|RD|RECORD|RECORDING|RECORDS|RECURSIVE|' - r'REDEFINES|REEL|REFERENCE|RELATIVE|REMAINDER|REMOVAL|' - r'RENAMES|REPLACING|REPORT|REPORTING|REPORTS|REPOSITORY|' - r'REQUIRED|RESERVE|RETURNING|REVERSE-VIDEO|REWIND|' - r'RIGHT|ROLLBACK|ROUNDED|RUN|SAME|SCROLL|' - r'SECURE|SEGMENT-LIMIT|SELECT|SENTENCE|SEPARATE|' - r'SEQUENCE|SEQUENTIAL|SHARING|SIGN|SIGNED|SIGNED-INT|' - r'SIGNED-LONG|SIGNED-SHORT|SIZE|SORT-MERGE|SOURCE|' - r'SOURCE-COMPUTER|SPECIAL-NAMES|STANDARD|' - r'STANDARD-1|STANDARD-2|STATUS|SUM|' - r'SYMBOLIC|SYNC|SYNCHRONIZED|TALLYING|TAPE|' - r'TEST|THROUGH|THRU|TIME|TIMES|TO|TOP|TRAILING|' - r'TRANSFORM|TYPE|UNDERLINE|UNIT|UNSIGNED|' - r'UNSIGNED-INT|UNSIGNED-LONG|UNSIGNED-SHORT|UNTIL|UP|' - r'UPDATE|UPON|USAGE|USING|VALUE|VALUES|VARYING|WAIT|WHEN|' - r'WITH|WORDS|YYYYDDD|YYYYMMDD)' - r'\s*($|(?=[^0-9a-z_\-]))', Keyword.Pseudo), - - # inactive reserved words - (r'(^|(?<=[^0-9a-z_\-]))' - r'(ACTIVE-CLASS|ALIGNED|ANYCASE|ARITHMETIC|ATTRIBUTE|B-AND|' - r'B-NOT|B-OR|B-XOR|BIT|BOOLEAN|CD|CENTER|CF|CH|CHAIN|CLASS-ID|' - r'CLASSIFICATION|COMMUNICATION|CONDITION|DATA-POINTER|' - r'DESTINATION|DISABLE|EC|EGI|EMI|ENABLE|END-RECEIVE|' - r'ENTRY-CONVENTION|EO|ESI|EXCEPTION-OBJECT|EXPANDS|FACTORY|' - r'FLOAT-BINARY-16|FLOAT-BINARY-34|FLOAT-BINARY-7|' - r'FLOAT-DECIMAL-16|FLOAT-DECIMAL-34|FLOAT-EXTENDED|FORMAT|' - r'FUNCTION-POINTER|GET|GROUP-USAGE|IMPLEMENTS|INFINITY|' - r'INHERITS|INTERFACE|INTERFACE-ID|INVOKE|LC_ALL|LC_COLLATE|' - r'LC_CTYPE|LC_MESSAGES|LC_MONETARY|LC_NUMERIC|LC_TIME|' - r'LINE-COUNTER|MESSAGE|METHOD|METHOD-ID|NESTED|NONE|NORMAL|' - r'OBJECT|OBJECT-REFERENCE|OPTIONS|OVERRIDE|PAGE-COUNTER|PF|PH|' - r'PROPERTY|PROTOTYPE|PURGE|QUEUE|RAISE|RAISING|RECEIVE|' - r'RELATION|REPLACE|REPRESENTS-NOT-A-NUMBER|RESET|RESUME|RETRY|' - r'RF|RH|SECONDS|SEGMENT|SELF|SEND|SOURCES|STATEMENT|STEP|' - r'STRONG|SUB-QUEUE-1|SUB-QUEUE-2|SUB-QUEUE-3|SUPER|SYMBOL|' - r'SYSTEM-DEFAULT|TABLE|TERMINAL|TEXT|TYPEDEF|UCS-4|UNIVERSAL|' - r'USER-DEFAULT|UTF-16|UTF-8|VAL-STATUS|VALID|VALIDATE|' - r'VALIDATE-STATUS)\s*($|(?=[^0-9a-z_\-]))', Error), - - # Data Types - (r'(^|(?<=[^0-9a-z_\-]))' - r'(PIC\s+.+?(?=(\s|\.\s))|PICTURE\s+.+?(?=(\s|\.\s))|' - r'(COMPUTATIONAL)(-[1-5X])?|(COMP)(-[1-5X])?|' - r'BINARY-C-LONG|' - r'BINARY-CHAR|BINARY-DOUBLE|BINARY-LONG|BINARY-SHORT|' - r'BINARY)\s*($|(?=[^0-9a-z_\-]))', Keyword.Type), - - # Operators - (r'(\*\*|\*|\+|-|/|<=|>=|<|>|==|/=|=)', Operator), - - # (r'(::)', Keyword.Declaration), - - (r'([(),;:&%.])', Punctuation), - - # Intrinsics - (r'(^|(?<=[^0-9a-z_\-]))(ABS|ACOS|ANNUITY|ASIN|ATAN|BYTE-LENGTH|' - r'CHAR|COMBINED-DATETIME|CONCATENATE|COS|CURRENT-DATE|' - r'DATE-OF-INTEGER|DATE-TO-YYYYMMDD|DAY-OF-INTEGER|DAY-TO-YYYYDDD|' - r'EXCEPTION-(?:FILE|LOCATION|STATEMENT|STATUS)|EXP10|EXP|E|' - r'FACTORIAL|FRACTION-PART|INTEGER-OF-(?:DATE|DAY|PART)|INTEGER|' - r'LENGTH|LOCALE-(?:DATE|TIME(?:-FROM-SECONDS)?)|LOG(?:10)?|' - r'LOWER-CASE|MAX|MEAN|MEDIAN|MIDRANGE|MIN|MOD|NUMVAL(?:-C)?|' - r'ORD(?:-MAX|-MIN)?|PI|PRESENT-VALUE|RANDOM|RANGE|REM|REVERSE|' - r'SECONDS-FROM-FORMATTED-TIME|SECONDS-PAST-MIDNIGHT|SIGN|SIN|SQRT|' - r'STANDARD-DEVIATION|STORED-CHAR-LENGTH|SUBSTITUTE(?:-CASE)?|' - r'SUM|TAN|TEST-DATE-YYYYMMDD|TEST-DAY-YYYYDDD|TRIM|' - r'UPPER-CASE|VARIANCE|WHEN-COMPILED|YEAR-TO-YYYY)\s*' - r'($|(?=[^0-9a-z_\-]))', Name.Function), - - # Booleans - (r'(^|(?<=[^0-9a-z_\-]))(true|false)\s*($|(?=[^0-9a-z_\-]))', Name.Builtin), - # Comparing Operators - (r'(^|(?<=[^0-9a-z_\-]))(equal|equals|ne|lt|le|gt|ge|' - r'greater|less|than|not|and|or)\s*($|(?=[^0-9a-z_\-]))', Operator.Word), - ], - - # \"[^\"\n]*\"|\'[^\'\n]*\' - 'strings': [ - # apparently strings can be delimited by EOL if they are continued - # in the next line - (r'"[^"\n]*("|\n)', String.Double), - (r"'[^'\n]*('|\n)", String.Single), - ], - - 'nums': [ - (r'\d+(\s*|\.$|$)', Number.Integer), - (r'[+-]?\d*\.\d+([eE][-+]?\d+)?', Number.Float), - (r'[+-]?\d+\.\d*([eE][-+]?\d+)?', Number.Float), - ], - } - - -class CobolFreeformatLexer(CobolLexer): - """ - Lexer for Free format OpenCOBOL code. - - .. versionadded:: 1.6 - """ - name = 'COBOLFree' - aliases = ['cobolfree'] - filenames = ['*.cbl', '*.CBL'] - mimetypes = [] - flags = re.IGNORECASE | re.MULTILINE - - tokens = { - 'comment': [ - (r'(\*>.*\n|^\w*\*.*$)', Comment), - ], - } diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index a5601b17..e72bd9c7 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -19,7 +19,7 @@ from pygments.lexers.c_like.go import GoLexer from pygments.lexers.c_like.rust import RustLexer from pygments.lexers.c_like.other import ECLexer, ValaLexer, CudaLexer from pygments.lexers.pascal import DelphiLexer, Modula2Lexer, AdaLexer -from pygments.lexers.cobol import CobolLexer, CobolFreeformatLexer +from pygments.lexers.business import CobolLexer, CobolFreeformatLexer from pygments.lexers.fortran import FortranLexer from pygments.lexers.prolog import PrologLexer from pygments.lexers.python import CythonLexer diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py new file mode 100644 index 00000000..e4f18803 --- /dev/null +++ b/pygments/lexers/configs.py @@ -0,0 +1,105 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.configs + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for configuration file formats. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['KconfigLexer'] + + +def _rx_indent(level): + # Kconfig *always* interprets a tab as 8 spaces, so this is the default. + # Edit this if you are in an environment where KconfigLexer gets expanded + # input (tabs expanded to spaces) and the expansion tab width is != 8, + # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width). + # Value range here is 2 <= {tab_width} <= 8. + tab_width = 8 + # Regex matching a given indentation {level}, assuming that indentation is + # a multiple of {tab_width}. In other cases there might be problems. + return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level) + + +class KconfigLexer(RegexLexer): + """ + For Linux-style Kconfig files. + + .. versionadded:: 1.6 + """ + + name = 'Kconfig' + aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config'] + # Adjust this if new kconfig file names appear in your environment + filenames = ['Kconfig', '*Config.in*', 'external.in*', + 'standard-modules.in'] + mimetypes = ['text/x-kconfig'] + # No re.MULTILINE, indentation-aware help text needs line-by-line handling + flags = 0 + + def call_indent(level): + # If indentation >= {level} is detected, enter state 'indent{level}' + return (_rx_indent(level), String.Doc, 'indent%s' % level) + + def do_indent(level): + # Print paragraphs of indentation level >= {level} as String.Doc, + # ignoring blank lines. Then return to 'root' state. + return [ + (_rx_indent(level), String.Doc), + (r'\s*\n', Text), + default('#pop:2') + ] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'#.*?\n', Comment.Single), + (words(( + 'mainmenu', 'config', 'menuconfig', 'choice', 'endchoice', + 'comment', 'menu', 'endmenu', 'visible if', 'if', 'endif', + 'source', 'prompt', 'select', 'depends on', 'default', + 'range', 'option'), suffix=r'\b'), + Keyword), + (r'(---help---|help)[\t ]*\n', Keyword, 'help'), + (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b', + Name.Builtin), + (r'[!=&|]', Operator), + (r'[()]', Punctuation), + (r'[0-9]+', Number.Integer), + (r"'(''|[^'])*'", String.Single), + (r'"(""|[^"])*"', String.Double), + (r'\S+', Text), + ], + # Help text is indented, multi-line and ends when a lower indentation + # level is detected. + 'help': [ + # Skip blank lines after help token, if any + (r'\s*\n', Text), + # Determine the first help line's indentation level heuristically(!). + # Attention: this is not perfect, but works for 99% of "normal" + # indentation schemes up to a max. indentation level of 7. + call_indent(7), + call_indent(6), + call_indent(5), + call_indent(4), + call_indent(3), + call_indent(2), + call_indent(1), + ('', Text, '#pop'), # for incomplete help sections without text + ], + # Handle text for indentation levels 7 to 1 + 'indent7': do_indent(7), + 'indent6': do_indent(6), + 'indent5': do_indent(5), + 'indent4': do_indent(4), + 'indent3': do_indent(3), + 'indent2': do_indent(2), + 'indent1': do_indent(1), + } diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py new file mode 100644 index 00000000..f259ab9f --- /dev/null +++ b/pygments/lexers/console.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.console + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for misc console output. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer +from pygments.token import Generic, Comment, String, Text + +__all__ = ['VCTreeStatusLexer'] + + +class VCTreeStatusLexer(RegexLexer): + """ + For colorizing output of version control status commans, like "hg + status" or "svn status". + + .. versionadded:: 2.0 + """ + name = 'VCTreeStatus' + aliases = ['vctreestatus'] + filenames = [] + mimetypes = [] + + tokens = { + 'root': [ + (r'^A \+ C\s+', Generic.Error), + (r'^A\s+\+?\s+', String), + (r'^M\s+', Generic.Inserted), + (r'^C\s+', Generic.Error), + (r'^D\s+', Generic.Deleted), + (r'^[\?!]\s+', Comment.Preproc), + (r' >\s+.*\n', Comment.Preproc), + (r'.*\n', Text) + ] + } diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py new file mode 100644 index 00000000..775945fd --- /dev/null +++ b/pygments/lexers/esoteric.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.esoteric + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for esoteric languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, inherit, default, words +from pygments.util import get_bool_opt +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['BrainfuckLexer', 'BefungeLexer'] + + +class BrainfuckLexer(RegexLexer): + """ + Lexer for the esoteric `BrainFuck `_ + language. + """ + + name = 'Brainfuck' + aliases = ['brainfuck', 'bf'] + filenames = ['*.bf', '*.b'] + mimetypes = ['application/x-brainfuck'] + + tokens = { + 'common': [ + # use different colors for different instruction types + (r'[.,]+', Name.Tag), + (r'[+-]+', Name.Builtin), + (r'[<>]+', Name.Variable), + (r'[^.,+\-<>\[\]]+', Comment), + ], + 'root': [ + (r'\[', Keyword, 'loop'), + (r'\]', Error), + include('common'), + ], + 'loop': [ + (r'\[', Keyword, '#push'), + (r'\]', Keyword, '#pop'), + include('common'), + ] + } + + +class BefungeLexer(RegexLexer): + """ + Lexer for the esoteric `Befunge `_ + language. + + .. versionadded:: 0.7 + """ + name = 'Befunge' + aliases = ['befunge'] + filenames = ['*.befunge'] + mimetypes = ['application/x-befunge'] + + tokens = { + 'root': [ + (r'[0-9a-f]', Number), + (r'[\+\*/%!`-]', Operator), # Traditional math + (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives + (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives + (r'[|_mw]', Keyword), + (r'[{}]', Name.Tag), # Befunge-98 stack ops + (r'".*?"', String.Double), # Strings don't appear to allow escapes + (r'\'.', String.Single), # Single character + (r'[#;]', Comment), # Trampoline... depends on direction hit + (r'[pg&~=@iotsy]', Keyword), # Misc + (r'[()A-Z]', Comment), # Fingerprints + (r'\s+', Text), # Whitespace doesn't matter + ], + } diff --git a/pygments/lexers/graphics.py b/pygments/lexers/graphics.py index de7db6ba..7591cf55 100644 --- a/pygments/lexers/graphics.py +++ b/pygments/lexers/graphics.py @@ -3,17 +3,18 @@ pygments.lexers.graphics ~~~~~~~~~~~~~~~~~~~~~~~~ - Lexers for computer graphics related languages. + Lexers for computer graphics and plotting related languages. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, words +from pygments.lexer import RegexLexer, words, include, bygroups, using, this from pygments.token import Text, Comment, Operator, Keyword, Name, \ - Number, Punctuation + Number, Punctuation, String -__all__ = ['GLShaderLexer'] +__all__ = ['GLShaderLexer', 'PostScriptLexer', 'AsymptoteLexer', 'GnuplotLexer', + 'PovrayLexer'] class GLShaderLexer(RegexLexer): @@ -71,3 +72,478 @@ class GLShaderLexer(RegexLexer): (r'\s+', Text), ], } + + +class PostScriptLexer(RegexLexer): + """ + Lexer for PostScript files. + + The PostScript Language Reference published by Adobe at + + is the authority for this. + + .. versionadded:: 1.4 + """ + name = 'PostScript' + aliases = ['postscript', 'postscr'] + filenames = ['*.ps', '*.eps'] + mimetypes = ['application/postscript'] + + delimiter = r'\(\)\<\>\[\]\{\}\/\%\s' + delimiter_end = r'(?=[%s])' % delimiter + + valid_name_chars = r'[^%s]' % delimiter + valid_name = r"%s+%s" % (valid_name_chars, delimiter_end) + + tokens = { + 'root': [ + # All comment types + (r'^%!.+\n', Comment.Preproc), + (r'%%.*\n', Comment.Special), + (r'(^%.*\n){2,}', Comment.Multiline), + (r'%.*\n', Comment.Single), + + # String literals are awkward; enter separate state. + (r'\(', String, 'stringliteral'), + + (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation), + + # Numbers + (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex), + # Slight abuse: use Oct to signify any explicit base system + (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)' + r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct), + (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?' + + delimiter_end, Number.Float), + (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer), + + # References + (r'\/%s' % valid_name, Name.Variable), + + # Names + (valid_name, Name.Function), # Anything else is executed + + # These keywords taken from + # + # Is there an authoritative list anywhere that doesn't involve + # trawling documentation? + + (r'(false|true)' + delimiter_end, Keyword.Constant), + + # Conditionals / flow control + (r'(eq|ne|g[et]|l[et]|and|or|not|if(?:else)?|for(?:all)?)' + + delimiter_end, Keyword.Reserved), + + (words(( + 'abs', 'add', 'aload', 'arc', 'arcn', 'array', 'atan', 'begin', + 'bind', 'ceiling', 'charpath', 'clip', 'closepath', 'concat', + 'concatmatrix', 'copy', 'cos', 'currentlinewidth', 'currentmatrix', + 'currentpoint', 'curveto', 'cvi', 'cvs', 'def', 'defaultmatrix', + 'dict', 'dictstackoverflow', 'div', 'dtransform', 'dup', 'end', + 'exch', 'exec', 'exit', 'exp', 'fill', 'findfont', 'floor', 'get', + 'getinterval', 'grestore', 'gsave', 'gt', 'identmatrix', 'idiv', + 'idtransform', 'index', 'invertmatrix', 'itransform', 'length', + 'lineto', 'ln', 'load', 'log', 'loop', 'matrix', 'mod', 'moveto', + 'mul', 'neg', 'newpath', 'pathforall', 'pathbbox', 'pop', 'print', + 'pstack', 'put', 'quit', 'rand', 'rangecheck', 'rcurveto', 'repeat', + 'restore', 'rlineto', 'rmoveto', 'roll', 'rotate', 'round', 'run', + 'save', 'scale', 'scalefont', 'setdash', 'setfont', 'setgray', + 'setlinecap', 'setlinejoin', 'setlinewidth', 'setmatrix', + 'setrgbcolor', 'shfill', 'show', 'showpage', 'sin', 'sqrt', + 'stack', 'stringwidth', 'stroke', 'strokepath', 'sub', 'syntaxerror', + 'transform', 'translate', 'truncate', 'typecheck', 'undefined', + 'undefinedfilename', 'undefinedresult'), suffix=delimiter_end), + Name.Builtin), + + (r'\s+', Text), + ], + + 'stringliteral': [ + (r'[^\(\)\\]+', String), + (r'\\', String.Escape, 'escape'), + (r'\(', String, '#push'), + (r'\)', String, '#pop'), + ], + + 'escape': [ + (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'), + ], + } + + +class AsymptoteLexer(RegexLexer): + """ + For `Asymptote `_ source code. + + .. versionadded:: 1.2 + """ + name = 'Asymptote' + aliases = ['asy', 'asymptote'] + filenames = ['*.asy'] + mimetypes = ['text/x-asymptote'] + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+' + + tokens = { + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment), + ], + 'statements': [ + # simple string (TeX friendly) + (r'"(\\\\|\\"|[^"])*"', String), + # C style string (with character escapes) + (r"'", String, 'string'), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), + (r'0[0-7]+[Ll]?', Number.Oct), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'[()\[\],.]', Punctuation), + (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)), + (r'(and|controls|tension|atleast|curl|if|else|while|for|do|' + r'return|break|continue|struct|typedef|new|access|import|' + r'unravel|from|include|quote|static|public|private|restricted|' + r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword), + # Since an asy-type-name can be also an asy-function-name, + # in the following we test if the string " [a-zA-Z]" follows + # the Keyword.Type. + # Of course it is not perfect ! + (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|' + r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|' + r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|' + r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|' + r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|' + r'path3|pen|picture|point|position|projection|real|revolution|' + r'scaleT|scientific|segment|side|slice|splitface|string|surface|' + r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|' + r'transformation|tree|triangle|trilinear|triple|vector|' + r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type), + # Now the asy-type-name which are not asy-function-name + # except yours ! + # Perhaps useless + (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|' + r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|' + r'picture|position|real|revolution|slice|splitface|ticksgridT|' + r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type), + ('[a-zA-Z_]\w*:(?!:)', Name.Label), + ('[a-zA-Z_]\w*', Name), + ], + 'root': [ + include('whitespace'), + # functions + (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')({)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation), + 'function'), + # function declarations + (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments + r'([a-zA-Z_]\w*)' # method name + r'(\s*\([^;]*?\))' # signature + r'(' + _ws + r')(;)', + bygroups(using(this), Name.Function, using(this), using(this), + Punctuation)), + ('', Text, 'statement'), + ], + 'statement': [ + include('whitespace'), + include('statements'), + ('[{}]', Punctuation), + (';', Punctuation, '#pop'), + ], + 'function': [ + include('whitespace'), + include('statements'), + (';', Punctuation), + ('{', Punctuation, '#push'), + ('}', Punctuation, '#pop'), + ], + 'string': [ + (r"'", String, '#pop'), + (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'\n', String), + (r"[^\\'\n]+", String), # all other characters + (r'\\\n', String), + (r'\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + } + + def get_tokens_unprocessed(self, text): + from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME + for index, token, value in \ + RegexLexer.get_tokens_unprocessed(self, text): + if token is Name and value in ASYFUNCNAME: + token = Name.Function + elif token is Name and value in ASYVARNAME: + token = Name.Variable + yield index, token, value + + +def _shortened(word): + dpos = word.find('$') + return '|'.join(word[:dpos] + word[dpos+1:i] + r'\b' + for i in range(len(word), dpos, -1)) + + +def _shortened_many(*words): + return '|'.join(map(_shortened, words)) + + +class GnuplotLexer(RegexLexer): + """ + For `Gnuplot `_ plotting scripts. + + .. versionadded:: 0.11 + """ + + name = 'Gnuplot' + aliases = ['gnuplot'] + filenames = ['*.plot', '*.plt'] + mimetypes = ['text/x-gnuplot'] + + tokens = { + 'root': [ + include('whitespace'), + (_shortened('bi$nd'), Keyword, 'bind'), + (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'), + (_shortened('f$it'), Keyword, 'fit'), + (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'), + (r'else\b', Keyword), + (_shortened('pa$use'), Keyword, 'pause'), + (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'), + (_shortened('sa$ve'), Keyword, 'save'), + (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')), + (_shortened_many('sh$ow', 'uns$et'), + Keyword, ('noargs', 'optionarg')), + (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear', + 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int', + 'pwd$', 're$read', 'res$et', 'scr$eendump', + 'she$ll', 'sy$stem', 'up$date'), + Keyword, 'genericargs'), + (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump', + 'she$ll', 'test$'), + Keyword, 'noargs'), + ('([a-zA-Z_]\w*)(\s*)(=)', + bygroups(Name.Variable, Text, Operator), 'genericargs'), + ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)', + bygroups(Name.Function, Text, Operator), 'genericargs'), + (r'@[a-zA-Z_]\w*', Name.Constant), # macros + (r';', Keyword), + ], + 'comment': [ + (r'[^\\\n]', Comment), + (r'\\\n', Comment), + (r'\\', Comment), + # don't add the newline to the Comment token + ('', Comment, '#pop'), + ], + 'whitespace': [ + ('#', Comment, 'comment'), + (r'[ \t\v\f]+', Text), + ], + 'noargs': [ + include('whitespace'), + # semicolon and newline end the argument list + (r';', Punctuation, '#pop'), + (r'\n', Text, '#pop'), + ], + 'dqstring': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + (r'\n', String, '#pop'), # newline ends the string too + ], + 'sqstring': [ + (r"''", String), # escaped single quote + (r"'", String, '#pop'), + (r"[^\\'\n]+", String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # normal backslash + (r'\n', String, '#pop'), # newline ends the string too + ], + 'genericargs': [ + include('noargs'), + (r'"', String, 'dqstring'), + (r"'", String, 'sqstring'), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), + (r'(\d+\.\d*|\.\d+)', Number.Float), + (r'-?\d+', Number.Integer), + ('[,.~!%^&*+=|?:<>/-]', Operator), + ('[{}()\[\]]', Punctuation), + (r'(eq|ne)\b', Operator.Word), + (r'([a-zA-Z_]\w*)(\s*)(\()', + bygroups(Name.Function, Text, Punctuation)), + (r'[a-zA-Z_]\w*', Name), + (r'@[a-zA-Z_]\w*', Name.Constant), # macros + (r'\\\n', Text), + ], + 'optionarg': [ + include('whitespace'), + (_shortened_many( + "a$ll", "an$gles", "ar$row", "au$toscale", "b$ars", "bor$der", + "box$width", "cl$abel", "c$lip", "cn$trparam", "co$ntour", "da$ta", + "data$file", "dg$rid3d", "du$mmy", "enc$oding", "dec$imalsign", + "fit$", "font$path", "fo$rmat", "fu$nction", "fu$nctions", "g$rid", + "hid$den3d", "his$torysize", "is$osamples", "k$ey", "keyt$itle", + "la$bel", "li$nestyle", "ls$", "loa$dpath", "loc$ale", "log$scale", + "mac$ros", "map$ping", "map$ping3d", "mar$gin", "lmar$gin", + "rmar$gin", "tmar$gin", "bmar$gin", "mo$use", "multi$plot", + "mxt$ics", "nomxt$ics", "mx2t$ics", "nomx2t$ics", "myt$ics", + "nomyt$ics", "my2t$ics", "nomy2t$ics", "mzt$ics", "nomzt$ics", + "mcbt$ics", "nomcbt$ics", "of$fsets", "or$igin", "o$utput", + "pa$rametric", "pm$3d", "pal$ette", "colorb$ox", "p$lot", + "poi$ntsize", "pol$ar", "pr$int", "obj$ect", "sa$mples", "si$ze", + "st$yle", "su$rface", "table$", "t$erminal", "termo$ptions", "ti$cs", + "ticsc$ale", "ticsl$evel", "timef$mt", "tim$estamp", "tit$le", + "v$ariables", "ve$rsion", "vi$ew", "xyp$lane", "xda$ta", "x2da$ta", + "yda$ta", "y2da$ta", "zda$ta", "cbda$ta", "xl$abel", "x2l$abel", + "yl$abel", "y2l$abel", "zl$abel", "cbl$abel", "xti$cs", "noxti$cs", + "x2ti$cs", "nox2ti$cs", "yti$cs", "noyti$cs", "y2ti$cs", "noy2ti$cs", + "zti$cs", "nozti$cs", "cbti$cs", "nocbti$cs", "xdti$cs", "noxdti$cs", + "x2dti$cs", "nox2dti$cs", "ydti$cs", "noydti$cs", "y2dti$cs", + "noy2dti$cs", "zdti$cs", "nozdti$cs", "cbdti$cs", "nocbdti$cs", + "xmti$cs", "noxmti$cs", "x2mti$cs", "nox2mti$cs", "ymti$cs", + "noymti$cs", "y2mti$cs", "noy2mti$cs", "zmti$cs", "nozmti$cs", + "cbmti$cs", "nocbmti$cs", "xr$ange", "x2r$ange", "yr$ange", + "y2r$ange", "zr$ange", "cbr$ange", "rr$ange", "tr$ange", "ur$ange", + "vr$ange", "xzeroa$xis", "x2zeroa$xis", "yzeroa$xis", "y2zeroa$xis", + "zzeroa$xis", "zeroa$xis", "z$ero"), Name.Builtin, '#pop'), + ], + 'bind': [ + ('!', Keyword, '#pop'), + (_shortened('all$windows'), Name.Builtin), + include('genericargs'), + ], + 'quit': [ + (r'gnuplot\b', Keyword), + include('noargs'), + ], + 'fit': [ + (r'via\b', Name.Builtin), + include('plot'), + ], + 'if': [ + (r'\)', Punctuation, '#pop'), + include('genericargs'), + ], + 'pause': [ + (r'(mouse|any|button1|button2|button3)\b', Name.Builtin), + (_shortened('key$press'), Name.Builtin), + include('genericargs'), + ], + 'plot': [ + (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex', + 'mat$rix', 's$mooth', 'thru$', 't$itle', + 'not$itle', 'u$sing', 'w$ith'), + Name.Builtin), + include('genericargs'), + ], + 'save': [ + (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'), + Name.Builtin), + include('genericargs'), + ], + } + + +class PovrayLexer(RegexLexer): + """ + For `Persistence of Vision Raytracer `_ files. + + .. versionadded:: 0.11 + """ + name = 'POVRay' + aliases = ['pov'] + filenames = ['*.pov', '*.inc'] + mimetypes = ['text/x-povray'] + + tokens = { + 'root': [ + (r'/\*[\w\W]*?\*/', Comment.Multiline), + (r'//.*\n', Comment.Single), + (r'(?s)"(?:\\.|[^"\\])+"', String.Double), + (words(( + 'debug', 'default', 'else', 'end', 'error', 'fclose', 'fopen', 'ifdef', + 'ifndef', 'include', 'range', 'read', 'render', 'statistics', 'switch', + 'undef', 'version', 'warning', 'while', 'write', 'define', 'macro', + 'local', 'declare'), prefix=r'#', suffix=r'\b'), + Comment.Preproc), + (words(( + 'aa_level', 'aa_threshold', 'abs', 'acos', 'acosh', 'adaptive', 'adc_bailout', + 'agate', 'agate_turb', 'all', 'alpha', 'ambient', 'ambient_light', 'angle', + 'aperture', 'arc_angle', 'area_light', 'asc', 'asin', 'asinh', 'assumed_gamma', + 'atan', 'atan2', 'atanh', 'atmosphere', 'atmospheric_attenuation', + 'attenuating', 'average', 'background', 'black_hole', 'blue', 'blur_samples', + 'bounded_by', 'box_mapping', 'bozo', 'break', 'brick', 'brick_size', + 'brightness', 'brilliance', 'bumps', 'bumpy1', 'bumpy2', 'bumpy3', 'bump_map', + 'bump_size', 'case', 'caustics', 'ceil', 'checker', 'chr', 'clipped_by', 'clock', + 'color', 'color_map', 'colour', 'colour_map', 'component', 'composite', 'concat', + 'confidence', 'conic_sweep', 'constant', 'control0', 'control1', 'cos', 'cosh', + 'count', 'crackle', 'crand', 'cube', 'cubic_spline', 'cylindrical_mapping', + 'debug', 'declare', 'default', 'degrees', 'dents', 'diffuse', 'direction', + 'distance', 'distance_maximum', 'div', 'dust', 'dust_type', 'eccentricity', + 'else', 'emitting', 'end', 'error', 'error_bound', 'exp', 'exponent', + 'fade_distance', 'fade_power', 'falloff', 'falloff_angle', 'false', + 'file_exists', 'filter', 'finish', 'fisheye', 'flatness', 'flip', 'floor', + 'focal_point', 'fog', 'fog_alt', 'fog_offset', 'fog_type', 'frequency', 'gif', + 'global_settings', 'glowing', 'gradient', 'granite', 'gray_threshold', + 'green', 'halo', 'hexagon', 'hf_gray_16', 'hierarchy', 'hollow', 'hypercomplex', + 'if', 'ifdef', 'iff', 'image_map', 'incidence', 'include', 'int', 'interpolate', + 'inverse', 'ior', 'irid', 'irid_wavelength', 'jitter', 'lambda', 'leopard', + 'linear', 'linear_spline', 'linear_sweep', 'location', 'log', 'looks_like', + 'look_at', 'low_error_factor', 'mandel', 'map_type', 'marble', 'material_map', + 'matrix', 'max', 'max_intersections', 'max_iteration', 'max_trace_level', + 'max_value', 'metallic', 'min', 'minimum_reuse', 'mod', 'mortar', + 'nearest_count', 'no', 'normal', 'normal_map', 'no_shadow', 'number_of_waves', + 'octaves', 'off', 'offset', 'omega', 'omnimax', 'on', 'once', 'onion', 'open', + 'orthographic', 'panoramic', 'pattern1', 'pattern2', 'pattern3', + 'perspective', 'pgm', 'phase', 'phong', 'phong_size', 'pi', 'pigment', + 'pigment_map', 'planar_mapping', 'png', 'point_at', 'pot', 'pow', 'ppm', + 'precision', 'pwr', 'quadratic_spline', 'quaternion', 'quick_color', + 'quick_colour', 'quilted', 'radial', 'radians', 'radiosity', 'radius', 'rainbow', + 'ramp_wave', 'rand', 'range', 'reciprocal', 'recursion_limit', 'red', + 'reflection', 'refraction', 'render', 'repeat', 'rgb', 'rgbf', 'rgbft', 'rgbt', + 'right', 'ripples', 'rotate', 'roughness', 'samples', 'scale', 'scallop_wave', + 'scattering', 'seed', 'shadowless', 'sin', 'sine_wave', 'sinh', 'sky', 'sky_sphere', + 'slice', 'slope_map', 'smooth', 'specular', 'spherical_mapping', 'spiral', + 'spiral1', 'spiral2', 'spotlight', 'spotted', 'sqr', 'sqrt', 'statistics', 'str', + 'strcmp', 'strength', 'strlen', 'strlwr', 'strupr', 'sturm', 'substr', 'switch', 'sys', + 't', 'tan', 'tanh', 'test_camera_1', 'test_camera_2', 'test_camera_3', + 'test_camera_4', 'texture', 'texture_map', 'tga', 'thickness', 'threshold', + 'tightness', 'tile2', 'tiles', 'track', 'transform', 'translate', 'transmit', + 'triangle_wave', 'true', 'ttf', 'turbulence', 'turb_depth', 'type', + 'ultra_wide_angle', 'up', 'use_color', 'use_colour', 'use_index', 'u_steps', + 'val', 'variance', 'vaxis_rotate', 'vcross', 'vdot', 'version', 'vlength', + 'vnormalize', 'volume_object', 'volume_rendered', 'vol_with_light', + 'vrotate', 'v_steps', 'warning', 'warp', 'water_level', 'waves', 'while', 'width', + 'wood', 'wrinkles', 'yes'), prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + 'bicubic_patch', 'blob', 'box', 'camera', 'cone', 'cubic', 'cylinder', 'difference', + 'disc', 'height_field', 'intersection', 'julia_fractal', 'lathe', + 'light_source', 'merge', 'mesh', 'object', 'plane', 'poly', 'polygon', 'prism', + 'quadric', 'quartic', 'smooth_triangle', 'sor', 'sphere', 'superellipsoid', + 'text', 'torus', 'triangle', 'union'), suffix=r'\b'), + Name.Builtin), + # TODO: <=, etc + (r'[\[\](){}<>;,]', Punctuation), + (r'[-+*/=]', Operator), + (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo), + (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), + (r'[0-9]+\.[0-9]*', Number.Float), + (r'\.[0-9]+', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\s+', Text), + ] + } diff --git a/pygments/lexers/misc/rebol.py b/pygments/lexers/misc/rebol.py new file mode 100644 index 00000000..565f26bd --- /dev/null +++ b/pygments/lexers/misc/rebol.py @@ -0,0 +1,246 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.rebol + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the REBOL language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Generic + +__all__ = ['RebolLexer'] + + +class RebolLexer(RegexLexer): + """ + A `REBOL `_ lexer. + + .. versionadded:: 1.1 + """ + name = 'REBOL' + aliases = ['rebol'] + filenames = ['*.r', '*.r3', '*.reb'] + mimetypes = ['text/x-rebol'] + + flags = re.IGNORECASE | re.MULTILINE + + re.IGNORECASE + + escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)' + + def word_callback(lexer, match): + word = match.group() + + if re.match(".*:$", word): + yield match.start(), Generic.Subheading, word + elif re.match( + r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|' + r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|' + r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|' + r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|' + r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|' + r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|' + r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|' + r'while|compress|decompress|secure|open|close|read|read-io|' + r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|' + r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|' + r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|' + r'browse|launch|stats|get-modes|set-modes|to-local-file|' + r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|' + r'hide|draw|show|size-text|textinfo|offset-to-caret|' + r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|' + r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|' + r'dsa-make-key|dsa-generate-key|dsa-make-signature|' + r'dsa-verify-signature|rsa-make-key|rsa-generate-key|' + r'rsa-encrypt)$', word): + yield match.start(), Name.Builtin, word + elif re.match( + r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|' + r'minimum|maximum|negate|complement|absolute|random|head|tail|' + r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|' + r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|' + r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|' + r'copy)$', word): + yield match.start(), Name.Function, word + elif re.match( + r'(error|source|input|license|help|install|echo|Usage|with|func|' + r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|' + r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|' + r'remold|charset|array|replace|move|extract|forskip|forall|alter|' + r'first+|also|take|for|forever|dispatch|attempt|what-dir|' + r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|' + r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|' + r'build-tag|process-source|build-markup|decode-cgi|read-cgi|' + r'write-user|save-user|set-user-name|protect-system|parse-xml|' + r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|' + r'scroll-para|get-face|alert|set-face|uninstall|unfocus|' + r'request-dir|center-face|do-events|net-error|decode-url|' + r'parse-header|parse-header-date|parse-email-addrs|import-email|' + r'send|build-attach-body|resend|show-popup|hide-popup|open-events|' + r'find-key-face|do-face|viewtop|confine|find-window|' + r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|' + r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|' + r'read-thru|load-thru|do-thru|launch-thru|load-image|' + r'request-download|do-face-alt|set-font|set-para|get-style|' + r'set-style|make-face|stylize|choose|hilight-text|hilight-all|' + r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|' + r'resize-face|load-stock|load-stock-block|notify|request|flash|' + r'request-color|request-pass|request-text|request-list|' + r'request-date|request-file|dbug|editor|link-relative-path|' + r'emailer|parse-error)$', word): + yield match.start(), Keyword.Namespace, word + elif re.match( + r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|' + r'return|exit|break)$', word): + yield match.start(), Name.Exception, word + elif re.match('REBOL$', word): + yield match.start(), Generic.Heading, word + elif re.match("to-.*", word): + yield match.start(), Keyword, word + elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$', + word): + yield match.start(), Operator, word + elif re.match(".*\?$", word): + yield match.start(), Keyword, word + elif re.match(".*\!$", word): + yield match.start(), Keyword.Type, word + elif re.match("'.*", word): + yield match.start(), Name.Variable.Instance, word # lit-word + elif re.match("#.*", word): + yield match.start(), Name.Label, word # issue + elif re.match("%.*", word): + yield match.start(), Name.Decorator, word # file + else: + yield match.start(), Name.Variable, word + + tokens = { + 'root': [ + (r'[^R]+', Comment), + (r'REBOL\s+\[', Generic.Strong, 'script'), + (r'R', Comment) + ], + 'script': [ + (r'\s+', Text), + (r'#"', String.Char, 'char'), + (r'#{[0-9a-f]*}', Number.Hex), + (r'2#{', Number.Hex, 'bin2'), + (r'64#{[0-9a-z+/=\s]*}', Number.Hex), + (r'"', String, 'string'), + (r'{', String, 'string2'), + (r';#+.*\n', Comment.Special), + (r';\*+.*\n', Comment.Preproc), + (r';.*\n', Comment), + (r'%"', Name.Decorator, 'stringFile'), + (r'%[^(\^{^")\s\[\]]+', Name.Decorator), + (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money + (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time + (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?' + r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date + (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple + (r'\d+[xX]\d+', Keyword.Constant), # pair + (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float), + (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float), + (r'[+-]?\d+(\'\d+)?', Number), + (r'[\[\]\(\)]', Generic.Strong), + (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url + (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url + (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email + (r'comment\s"', Comment, 'commentString1'), + (r'comment\s{', Comment, 'commentString2'), + (r'comment\s\[', Comment, 'commentBlock'), + (r'comment\s[^(\s{\"\[]+', Comment), + (r'/[^(\^{^")\s/[\]]*', Name.Attribute), + (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback), + (r'<[\w:.-]*>', Name.Tag), + (r'<[^(<>\s")]+', Name.Tag, 'tag'), + (r'([^(\^{^")\s]+)', Text), + ], + 'string': [ + (r'[^(\^")]+', String), + (escape_re, String.Escape), + (r'[\(|\)]+', String), + (r'\^.', String.Escape), + (r'"', String, '#pop'), + ], + 'string2': [ + (r'[^(\^{^})]+', String), + (escape_re, String.Escape), + (r'[\(|\)]+', String), + (r'\^.', String.Escape), + (r'{', String, '#push'), + (r'}', String, '#pop'), + ], + 'stringFile': [ + (r'[^(\^")]+', Name.Decorator), + (escape_re, Name.Decorator), + (r'\^.', Name.Decorator), + (r'"', Name.Decorator, '#pop'), + ], + 'char': [ + (escape_re + '"', String.Char, '#pop'), + (r'\^."', String.Char, '#pop'), + (r'."', String.Char, '#pop'), + ], + 'tag': [ + (escape_re, Name.Tag), + (r'"', Name.Tag, 'tagString'), + (r'[^(<>\r\n")]+', Name.Tag), + (r'>', Name.Tag, '#pop'), + ], + 'tagString': [ + (r'[^(\^")]+', Name.Tag), + (escape_re, Name.Tag), + (r'[\(|\)]+', Name.Tag), + (r'\^.', Name.Tag), + (r'"', Name.Tag, '#pop'), + ], + 'tuple': [ + (r'(\d+\.)+', Keyword.Constant), + (r'\d+', Keyword.Constant, '#pop'), + ], + 'bin2': [ + (r'\s+', Number.Hex), + (r'([0-1]\s*){8}', Number.Hex), + (r'}', Number.Hex, '#pop'), + ], + 'commentString1': [ + (r'[^(\^")]+', Comment), + (escape_re, Comment), + (r'[\(|\)]+', Comment), + (r'\^.', Comment), + (r'"', Comment, '#pop'), + ], + 'commentString2': [ + (r'[^(\^{^})]+', Comment), + (escape_re, Comment), + (r'[\(|\)]+', Comment), + (r'\^.', Comment), + (r'{', Comment, '#push'), + (r'}', Comment, '#pop'), + ], + 'commentBlock': [ + (r'\[', Comment, '#push'), + (r'\]', Comment, '#pop'), + (r'"', Comment, "commentString1"), + (r'{', Comment, "commentString2"), + (r'[^(\[\]\"{)]+', Comment), + ], + } + + def analyse_text(text): + """ + Check if code contains REBOL header and so it probably not R code + """ + if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): + # The code starts with REBOL header + return 1.0 + elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): + # The code contains REBOL header but also some text before it + return 0.5 diff --git a/pygments/lexers/misc/snobol.py b/pygments/lexers/misc/snobol.py new file mode 100644 index 00000000..97f614bd --- /dev/null +++ b/pygments/lexers/misc/snobol.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.snobol + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the SNOBOL language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, bygroups +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['SnobolLexer'] + + +class SnobolLexer(RegexLexer): + """ + Lexer for the SNOBOL4 programming language. + + Recognizes the common ASCII equivalents of the original SNOBOL4 operators. + Does not require spaces around binary operators. + + .. versionadded:: 1.5 + """ + + name = "Snobol" + aliases = ["snobol"] + filenames = ['*.snobol'] + mimetypes = ['text/x-snobol'] + + tokens = { + # root state, start of line + # comments, continuation lines, and directives start in column 1 + # as do labels + 'root': [ + (r'\*.*\n', Comment), + (r'[\+\.] ', Punctuation, 'statement'), + (r'-.*\n', Comment), + (r'END\s*\n', Name.Label, 'heredoc'), + (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'), + (r'\s+', Text, 'statement'), + ], + # statement state, line after continuation or label + 'statement': [ + (r'\s*\n', Text, '#pop'), + (r'\s+', Text), + (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|' + r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|' + r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|' + r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])', + Name.Builtin), + (r'[A-Za-z][\w\.]*', Name), + # ASCII equivalents of original operators + # | for the EBCDIC equivalent, ! likewise + # \ for EBCDIC negation + (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator), + (r'"[^"]*"', String), + (r"'[^']*'", String), + # Accept SPITBOL syntax for real numbers + # as well as Macro SNOBOL4 + (r'[0-9]+(?=[^\.EeDd])', Number.Integer), + (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float), + # Goto + (r':', Punctuation, 'goto'), + (r'[\(\)<>,;]', Punctuation), + ], + # Goto block + 'goto': [ + (r'\s*\n', Text, "#pop:2"), + (r'\s+', Text), + (r'F|S', Keyword), + (r'(\()([A-Za-z][\w.]*)(\))', + bygroups(Punctuation, Name.Label, Punctuation)) + ], + # everything after the END statement is basically one + # big heredoc. + 'heredoc': [ + (r'.*\n', String.Heredoc) + ] + } diff --git a/pygments/lexers/modeling.py b/pygments/lexers/modeling.py new file mode 100644 index 00000000..3cebebe6 --- /dev/null +++ b/pygments/lexers/modeling.py @@ -0,0 +1,114 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.modeling + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for modeling languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +from pygments.lexers.web import HtmlLexer + +__all__ = ['ModelicaLexer'] + + +class ModelicaLexer(RegexLexer): + """ + For `Modelica `_ source code. + + .. versionadded:: 1.1 + """ + name = 'Modelica' + aliases = ['modelica'] + filenames = ['*.mo'] + mimetypes = ['text/x-modelica'] + + flags = re.IGNORECASE | re.DOTALL + + tokens = { + 'whitespace': [ + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'//(\n|(.|\n)*?[^\\]\n)', Comment), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment), + ], + 'statements': [ + (r'"', String, 'string'), + (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float), + (r'(\d+\.\d*|\.\d+)', Number.Float), + (r'\d+[Ll]?', Number.Integer), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin), + (r'([a-z_][\w]*|\'[^\']+\')' + r'([\[\d,:\]]*)' + r'(\.([a-z_][\w]*|\'[^\']+\'))+' + r'([\[\d,:\]]*)', Name.Class), + (r'(\'[\w\+\-\*\/\^]+\'|\w+)', Name), + (r'[()\[\]{},.;]', Punctuation), + (r'\'', Name, 'quoted_ident'), + ], + 'root': [ + include('whitespace'), + include('classes'), + include('functions'), + include('keywords'), + include('operators'), + (r'("|)', Name.Tag, 'html-content'), + include('statements'), + ], + 'keywords': [ + (r'(algorithm|annotation|break|connect|constant|constrainedby|' + r'discrete|each|end|else|elseif|elsewhen|encapsulated|enumeration|' + r'equation|exit|expandable|extends|' + r'external|false|final|flow|for|if|import|impure|in|initial\sequation|' + r'inner|input|loop|nondiscrete|outer|output|parameter|partial|' + r'protected|public|pure|redeclare|replaceable|stream|time|then|true|' + r'when|while|within)\b', Keyword), + ], + 'functions': [ + (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|' + r'cross|diagonal|div|exp|fill|floor|getInstanceName|identity|' + r'linspace|log|log10|matrix|mod|max|min|ndims|ones|outerProduct|' + r'product|rem|scalar|semiLinear|skew|sign|sin|sinh|size|' + r'spatialDistribution|sum|sqrt|symmetric|tan|tanh|transpose|' + r'vector|zeros)\b', Name.Function), + ], + 'operators': [ + (r'(actualStream|and|assert|backSample|cardinality|change|Clock|' + r'delay|der|edge|hold|homotopy|initial|inStream|noClock|noEvent|' + r'not|or|pre|previous|reinit|return|sample|smooth|' + r'spatialDistribution|shiftSample|subSample|superSample|terminal|' + r'terminate)\b', Name.Builtin), + ], + 'classes': [ + (r'(operator)?(\s+)?(block|class|connector|end|function|model|' + r'operator|package|record|type)(\s+)' + r'((?!if|for|when|while)[a-z_]\w*|\'[^\']+\')([;]?)', + bygroups(Keyword, Text, Keyword, Text, Name.Class, Text)) + ], + 'quoted_ident': [ + (r'\'', Name, '#pop'), + (r'[^\']+', Name), # all other characters + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', + String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'html-content': [ + (r'<\s*/\s*html\s*>"', Name.Tag, '#pop'), + (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)), + ] + } diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 37deca6e..834f0b27 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -10,117 +10,37 @@ """ import re - -from pygments.lexer import RegexLexer, include, bygroups, using, \ - this, combined, ExtendedRegexLexer, default -from pygments.token import Error, Punctuation, Literal, Token, \ - Text, Comment, Operator, Keyword, Name, String, Number, Generic, \ - Whitespace -from pygments.util import get_bool_opt -from pygments.lexers.web import HtmlLexer - -from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS -from pygments.lexers._robotframeworklexer import RobotFrameworkLexer +from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \ + default, using, this, combined +from pygments.token import Generic, Comment, String, Text, Number, Keyword, Name, \ + Error, Operator, Punctuation, Literal, Whitespace # backwards compatibility from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \ - TcshLexer - -__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer', 'MOOCodeLexer', - 'SmalltalkLexer', 'LogtalkLexer', 'GnuplotLexer', 'PovrayLexer', - 'AppleScriptLexer', 'ModelicaLexer', 'RebolLexer', 'ABAPLexer', - 'NewspeakLexer', 'GherkinLexer', 'AsymptoteLexer', 'PostScriptLexer', + TcshLexer +from pygments.lexers.robotframework import RobotFrameworkLexer +from pygments.lexers.testing import GherkinLexer +from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer +from pygments.lexers.prolog import LogtalkLexer +from pygments.lexers.misc.snobol import SnobolLexer +from pygments.lexers.misc.rebol import RebolLexer +from pygments.lexers.configs import KconfigLexer +from pygments.lexers.modeling import ModelicaLexer +from pygments.lexers.scripting import AppleScriptLexer +from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \ + AsymptoteLexer, PovrayLexer +from pygments.lexers.business import ABAPLexer, OpenEdgeLexer + + +__all__ = ['RedcodeLexer', 'MOOCodeLexer', 'SmalltalkLexer', 'NewspeakLexer', 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer', - 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'SnobolLexer', - 'ECLLexer', 'UrbiscriptLexer', 'OpenEdgeLexer', 'BroLexer', - 'MscgenLexer', 'KconfigLexer', 'VGLLexer', 'SourcePawnLexer', - 'RobotFrameworkLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', + 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'ECLLexer', + 'UrbiscriptLexer', 'BroLexer', 'MscgenLexer', 'VGLLexer', + 'SourcePawnLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'APLLexer', - 'LSLLexer', 'AmbientTalkLexer', 'PawnLexer', 'VCTreeStatusLexer', - 'RslLexer', 'PanLexer', 'RedLexer', 'AlloyLexer'] - - -class LSLLexer(RegexLexer): - """ - For Second Life's Linden Scripting Language source code. - """ - - name = 'LSL' - aliases = ['lsl'] - filenames = ['*.lsl'] - mimetypes = ['text/x-lsl'] - - flags = re.MULTILINE - - lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b' - lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b' - lsl_states = r'\b(?:(?:state)\s+\w+|default)\b' - lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b' - lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b' - lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b' - lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[ABCD]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b' - lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b' - lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b' - lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b' - lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b' - lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b' - lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b' - lsl_invalid_illegal = r'\b(?:event)\b' - lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b' - lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b' - lsl_reserved_log = r'\b(?:print)\b' - lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-\/]=?' - - tokens = { - 'root': - [ - (r'//.*?\n', Comment.Single), - (r'/\*', Comment.Multiline, 'comment'), - (r'"', String.Double, 'string'), - (lsl_keywords, Keyword), - (lsl_types, Keyword.Type), - (lsl_states, Name.Class), - (lsl_events, Name.Builtin), - (lsl_functions_builtin, Name.Function), - (lsl_constants_float, Keyword.Constant), - (lsl_constants_integer, Keyword.Constant), - (lsl_constants_integer_boolean, Keyword.Constant), - (lsl_constants_rotation, Keyword.Constant), - (lsl_constants_string, Keyword.Constant), - (lsl_constants_vector, Keyword.Constant), - (lsl_invalid_broken, Error), - (lsl_invalid_deprecated, Error), - (lsl_invalid_illegal, Error), - (lsl_invalid_unimplemented, Error), - (lsl_reserved_godmode, Keyword.Reserved), - (lsl_reserved_log, Keyword.Reserved), - (r'\b([a-zA-Z_]\w*)\b', Name.Variable), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float), - (r'(\d+\.\d*|\.\d+)', Number.Float), - (r'0[xX][0-9a-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), - (lsl_operators, Operator), - (r':=?', Error), - (r'[,;{}\(\)\[\]]', Punctuation), - (r'\n+', Whitespace), - (r'\s+', Whitespace) - ], - 'comment': - [ - (r'[^*/]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ], - 'string': - [ - (r'\\([nt"\\])', String.Escape), - (r'"', String.Double, '#pop'), - (r'\\.', Error), - (r'[^"\\]+', String.Double), - ] - } + 'AmbientTalkLexer', 'PawnLexer', 'RslLexer', 'PanLexer', 'RedLexer', + 'AlloyLexer'] class ECLLexer(RegexLexer): @@ -223,68 +143,6 @@ class ECLLexer(RegexLexer): } -class BrainfuckLexer(RegexLexer): - """ - Lexer for the esoteric `BrainFuck `_ - language. - """ - - name = 'Brainfuck' - aliases = ['brainfuck', 'bf'] - filenames = ['*.bf', '*.b'] - mimetypes = ['application/x-brainfuck'] - - tokens = { - 'common': [ - # use different colors for different instruction types - (r'[.,]+', Name.Tag), - (r'[+-]+', Name.Builtin), - (r'[<>]+', Name.Variable), - (r'[^.,+\-<>\[\]]+', Comment), - ], - 'root': [ - (r'\[', Keyword, 'loop'), - (r'\]', Error), - include('common'), - ], - 'loop': [ - (r'\[', Keyword, '#push'), - (r'\]', Keyword, '#pop'), - include('common'), - ] - } - - -class BefungeLexer(RegexLexer): - """ - Lexer for the esoteric `Befunge `_ - language. - - .. versionadded:: 0.7 - """ - name = 'Befunge' - aliases = ['befunge'] - filenames = ['*.befunge'] - mimetypes = ['application/x-befunge'] - - tokens = { - 'root': [ - (r'[0-9a-f]', Number), - (r'[\+\*/%!`-]', Operator), # Traditional math - (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives - (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives - (r'[|_mw]', Keyword), - (r'[{}]', Name.Tag), # Befunge-98 stack ops - (r'".*?"', String.Double), # Strings don't appear to allow escapes - (r'\'.', String.Single), # Single character - (r'[#;]', Comment), # Trampoline... depends on direction hit - (r'[pg&~=@iotsy]', Keyword), # Misc - (r'[()A-Z]', Comment), # Fingerprints - (r'\s+', Text), # Whitespace doesn't matter - ], - } - - class RedcodeLexer(RegexLexer): """ A simple Redcode lexer based on ICWS'94. @@ -418,1372 +276,69 @@ class SmalltalkLexer(RegexLexer): (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol), # literals (r"'(''|[^'])*'", String), - (r'\$.', String.Char), - (r'#*\(', String.Symbol, 'inner_parenth'), - ], - 'parenth' : [ - # This state is a bit tricky since - # we can't just pop this state - (r'\)', String.Symbol, ('root', 'afterobject')), - include('_parenth_helper'), - ], - 'inner_parenth': [ - (r'\)', String.Symbol, '#pop'), - include('_parenth_helper'), - ], - 'whitespaces' : [ - # skip whitespace and comments - (r'\s+', Text), - (r'"(""|[^"])*"', Comment), - ], - 'objects' : [ - (r'\[', Text, 'blockvariables'), - (r'\]', Text, 'afterobject'), - (r'\b(self|super|true|false|nil|thisContext)\b', - Name.Builtin.Pseudo, 'afterobject'), - (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'), - (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'), - (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)', - String.Symbol, 'afterobject'), - include('literals'), - ], - 'afterobject' : [ - (r'! !$', Keyword , '#pop'), # squeak chunk delimiter - include('whitespaces'), - (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)', - Name.Builtin, '#pop'), - (r'\b(new\b(?!:))', Name.Builtin), - (r'\:=|\_', Operator, '#pop'), - (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'), - (r'\b[a-zA-Z]+\w*', Name.Function), - (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'), - (r'\.', Punctuation, '#pop'), - (r';', Punctuation), - (r'[\])}]', Text), - (r'[\[({]', Text, '#pop'), - ], - 'squeak fileout' : [ - # Squeak fileout format (optional) - (r'^"(""|[^"])*"!', Keyword), - (r"^'(''|[^'])*'!", Keyword), - (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)', - bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)), - (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)", - bygroups(Keyword, Name.Class, Keyword, String, Keyword)), - (r'^(\w+)( subclass: )(#\w+)' - r'(\s+instanceVariableNames: )(.*?)' - r'(\s+classVariableNames: )(.*?)' - r'(\s+poolDictionaries: )(.*?)' - r'(\s+category: )(.*?)(!)', - bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword, - String, Keyword, String, Keyword, String, Keyword)), - (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)', - bygroups(Name.Class, Keyword, String, Keyword)), - (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)), - (r'! !$', Keyword), - ], - } - - -class LogtalkLexer(RegexLexer): - """ - For `Logtalk `_ source code. - - .. versionadded:: 0.10 - """ - - name = 'Logtalk' - aliases = ['logtalk'] - filenames = ['*.lgt', '*.logtalk'] - mimetypes = ['text/x-logtalk'] - - tokens = { - 'root': [ - # Directives - (r'^\s*:-\s',Punctuation,'directive'), - # Comments - (r'%.*?\n', Comment), - (r'/\*(.|\n)*?\*/',Comment), - # Whitespace - (r'\n', Text), - (r'\s+', Text), - # Numbers - (r"0'.", Number), - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), - # Variables - (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), - # Event handlers - (r'(after|before)(?=[(])', Keyword), - # Message forwarding handler - (r'forward(?=[(])', Keyword), - # Execution-context methods - (r'(parameter|this|se(lf|nder))(?=[(])', Keyword), - # Reflection - (r'(current_predicate|predicate_property)(?=[(])', Keyword), - # DCGs and term expansion - (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword), - # Entity - (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword), - (r'(object|protocol|category)_property(?=[(])', Keyword), - # Entity relations - (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword), - (r'extends_(object|protocol|category)(?=[(])', Keyword), - (r'imp(lements_protocol|orts_category)(?=[(])', Keyword), - (r'(instantiat|specializ)es_class(?=[(])', Keyword), - # Events - (r'(current_event|(abolish|define)_events)(?=[(])', Keyword), - # Flags - (r'(current|set)_logtalk_flag(?=[(])', Keyword), - # Compiling, loading, and library paths - (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword), - (r'\blogtalk_make\b', Keyword), - # Database - (r'(clause|retract(all)?)(?=[(])', Keyword), - (r'a(bolish|ssert(a|z))(?=[(])', Keyword), - # Control constructs - (r'(ca(ll|tch)|throw)(?=[(])', Keyword), - (r'(fa(il|lse)|true)\b', Keyword), - # All solutions - (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword), - # Multi-threading meta-predicates - (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), - # Term unification - (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword), - # Term creation and decomposition - (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword), - # Evaluable functors - (r'(rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword), - (r'float(_(integer|fractional)_part)?(?=[(])', Keyword), - (r'(floor|truncate|round|ceiling)(?=[(])', Keyword), - # Other arithmetic functors - (r'(cos|a(cos|sin|tan)|exp|log|s(in|qrt))(?=[(])', Keyword), - # Term testing - (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword), - # Term comparison - (r'compare(?=[(])', Keyword), - # Stream selection and control - (r'(curren|se)t_(in|out)put(?=[(])', Keyword), - (r'(open|close)(?=[(])', Keyword), - (r'flush_output(?=[(])', Keyword), - (r'(at_end_of_stream|flush_output)\b', Keyword), - (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword), - # Character and byte input/output - (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword), - (r'\bnl\b', Keyword), - # Term input/output - (r'read(_term)?(?=[(])', Keyword), - (r'write(q|_(canonical|term))?(?=[(])', Keyword), - (r'(current_)?op(?=[(])', Keyword), - (r'(current_)?char_conversion(?=[(])', Keyword), - # Atomic term processing - (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword), - (r'(char_code|sub_atom)(?=[(])', Keyword), - (r'number_c(har|ode)s(?=[(])', Keyword), - # Implementation defined hooks functions - (r'(se|curren)t_prolog_flag(?=[(])', Keyword), - (r'\bhalt\b', Keyword), - (r'halt(?=[(])', Keyword), - # Message sending operators - (r'(::|:|\^\^)', Operator), - # External call - (r'[{}]', Keyword), - # Logic and control - (r'(ignore|once)(?=[(])', Keyword), - (r'\brepeat\b', Keyword), - # Sorting - (r'(key)?sort(?=[(])', Keyword), - # Bitwise functors - (r'(>>|<<|/\\|\\\\|\\)', Operator), - # Predicate aliases - (r'\bas\b', Operator), - # Arithemtic evaluation - (r'\bis\b', Keyword), - # Arithemtic comparison - (r'(=:=|=\\=|<|=<|>=|>)', Operator), - # Term creation and decomposition - (r'=\.\.', Operator), - # Term unification - (r'(=|\\=)', Operator), - # Term comparison - (r'(==|\\==|@=<|@<|@>=|@>)', Operator), - # Evaluable functors - (r'(//|[-+*/])', Operator), - (r'\b(e|pi|mod|rem)\b', Operator), - # Other arithemtic functors - (r'\b\*\*\b', Operator), - # DCG rules - (r'-->', Operator), - # Control constructs - (r'([!;]|->)', Operator), - # Logic and control - (r'\\+', Operator), - # Mode operators - (r'[?@]', Operator), - # Existential quantifier - (r'\^', Operator), - # Strings - (r'"(\\\\|\\"|[^"])*"', String), - # Ponctuation - (r'[()\[\],.|]', Text), - # Atoms - (r"[a-z][a-zA-Z0-9_]*", Text), - (r"[']", String, 'quoted_atom'), - ], - - 'quoted_atom': [ - (r"['][']", String), - (r"[']", String, '#pop'), - (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape), - (r"[^\\'\n]+", String), - (r'\\', String), - ], - - 'directive': [ - # Conditional compilation directives - (r'(el)?if(?=[(])', Keyword, 'root'), - (r'(e(lse|ndif))[.]', Keyword, 'root'), - # Entity directives - (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'), - (r'(end_(category|object|protocol))[.]',Keyword, 'root'), - # Predicate scope directives - (r'(public|protected|private)(?=[(])', Keyword, 'root'), - # Other directives - (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'), - (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'), - (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'), - (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), - (r'op(?=[(])', Keyword, 'root'), - (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'), - (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), - (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'), - ], - - 'entityrelations': [ - (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), - # Numbers - (r"0'.", Number), - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), - # Variables - (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), - # Atoms - (r"[a-z][a-zA-Z0-9_]*", Text), - (r"[']", String, 'quoted_atom'), - # Strings - (r'"(\\\\|\\"|[^"])*"', String), - # End of entity-opening directive - (r'([)]\.)', Text, 'root'), - # Scope operator - (r'(::)', Operator), - # Ponctuation - (r'[()\[\],.|]', Text), - # Comments - (r'%.*?\n', Comment), - (r'/\*(.|\n)*?\*/',Comment), - # Whitespace - (r'\n', Text), - (r'\s+', Text), - ] - } - - def analyse_text(text): - if ':- object(' in text: - return 1.0 - elif ':- protocol(' in text: - return 1.0 - elif ':- category(' in text: - return 1.0 - elif re.search('^:-\s[a-z]', text, re.M): - return 0.9 - else: - return 0.0 - - -def _shortened(word): - dpos = word.find('$') - return '|'.join([word[:dpos] + word[dpos+1:i] + r'\b' - for i in range(len(word), dpos, -1)]) -def _shortened_many(*words): - return '|'.join(map(_shortened, words)) - -class GnuplotLexer(RegexLexer): - """ - For `Gnuplot `_ plotting scripts. - - .. versionadded:: 0.11 - """ - - name = 'Gnuplot' - aliases = ['gnuplot'] - filenames = ['*.plot', '*.plt'] - mimetypes = ['text/x-gnuplot'] - - tokens = { - 'root': [ - include('whitespace'), - (_shortened('bi$nd'), Keyword, 'bind'), - (_shortened_many('ex$it', 'q$uit'), Keyword, 'quit'), - (_shortened('f$it'), Keyword, 'fit'), - (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation), 'if'), - (r'else\b', Keyword), - (_shortened('pa$use'), Keyword, 'pause'), - (_shortened_many('p$lot', 'rep$lot', 'sp$lot'), Keyword, 'plot'), - (_shortened('sa$ve'), Keyword, 'save'), - (_shortened('se$t'), Keyword, ('genericargs', 'optionarg')), - (_shortened_many('sh$ow', 'uns$et'), - Keyword, ('noargs', 'optionarg')), - (_shortened_many('low$er', 'ra$ise', 'ca$ll', 'cd$', 'cl$ear', - 'h$elp', '\\?$', 'hi$story', 'l$oad', 'pr$int', - 'pwd$', 're$read', 'res$et', 'scr$eendump', - 'she$ll', 'sy$stem', 'up$date'), - Keyword, 'genericargs'), - (_shortened_many('pwd$', 're$read', 'res$et', 'scr$eendump', - 'she$ll', 'test$'), - Keyword, 'noargs'), - ('([a-zA-Z_]\w*)(\s*)(=)', - bygroups(Name.Variable, Text, Operator), 'genericargs'), - ('([a-zA-Z_]\w*)(\s*\(.*?\)\s*)(=)', - bygroups(Name.Function, Text, Operator), 'genericargs'), - (r'@[a-zA-Z_]\w*', Name.Constant), # macros - (r';', Keyword), - ], - 'comment': [ - (r'[^\\\n]', Comment), - (r'\\\n', Comment), - (r'\\', Comment), - # don't add the newline to the Comment token - ('', Comment, '#pop'), - ], - 'whitespace': [ - ('#', Comment, 'comment'), - (r'[ \t\v\f]+', Text), - ], - 'noargs': [ - include('whitespace'), - # semicolon and newline end the argument list - (r';', Punctuation, '#pop'), - (r'\n', Text, '#pop'), - ], - 'dqstring': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - (r'\n', String, '#pop'), # newline ends the string too - ], - 'sqstring': [ - (r"''", String), # escaped single quote - (r"'", String, '#pop'), - (r"[^\\'\n]+", String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # normal backslash - (r'\n', String, '#pop'), # newline ends the string too - ], - 'genericargs': [ - include('noargs'), - (r'"', String, 'dqstring'), - (r"'", String, 'sqstring'), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float), - (r'(\d+\.\d*|\.\d+)', Number.Float), - (r'-?\d+', Number.Integer), - ('[,.~!%^&*+=|?:<>/-]', Operator), - ('[{}()\[\]]', Punctuation), - (r'(eq|ne)\b', Operator.Word), - (r'([a-zA-Z_]\w*)(\s*)(\()', - bygroups(Name.Function, Text, Punctuation)), - (r'[a-zA-Z_]\w*', Name), - (r'@[a-zA-Z_]\w*', Name.Constant), # macros - (r'\\\n', Text), - ], - 'optionarg': [ - include('whitespace'), - (_shortened_many( - "a$ll","an$gles","ar$row","au$toscale","b$ars","bor$der", - "box$width","cl$abel","c$lip","cn$trparam","co$ntour","da$ta", - "data$file","dg$rid3d","du$mmy","enc$oding","dec$imalsign", - "fit$","font$path","fo$rmat","fu$nction","fu$nctions","g$rid", - "hid$den3d","his$torysize","is$osamples","k$ey","keyt$itle", - "la$bel","li$nestyle","ls$","loa$dpath","loc$ale","log$scale", - "mac$ros","map$ping","map$ping3d","mar$gin","lmar$gin", - "rmar$gin","tmar$gin","bmar$gin","mo$use","multi$plot", - "mxt$ics","nomxt$ics","mx2t$ics","nomx2t$ics","myt$ics", - "nomyt$ics","my2t$ics","nomy2t$ics","mzt$ics","nomzt$ics", - "mcbt$ics","nomcbt$ics","of$fsets","or$igin","o$utput", - "pa$rametric","pm$3d","pal$ette","colorb$ox","p$lot", - "poi$ntsize","pol$ar","pr$int","obj$ect","sa$mples","si$ze", - "st$yle","su$rface","table$","t$erminal","termo$ptions","ti$cs", - "ticsc$ale","ticsl$evel","timef$mt","tim$estamp","tit$le", - "v$ariables","ve$rsion","vi$ew","xyp$lane","xda$ta","x2da$ta", - "yda$ta","y2da$ta","zda$ta","cbda$ta","xl$abel","x2l$abel", - "yl$abel","y2l$abel","zl$abel","cbl$abel","xti$cs","noxti$cs", - "x2ti$cs","nox2ti$cs","yti$cs","noyti$cs","y2ti$cs","noy2ti$cs", - "zti$cs","nozti$cs","cbti$cs","nocbti$cs","xdti$cs","noxdti$cs", - "x2dti$cs","nox2dti$cs","ydti$cs","noydti$cs","y2dti$cs", - "noy2dti$cs","zdti$cs","nozdti$cs","cbdti$cs","nocbdti$cs", - "xmti$cs","noxmti$cs","x2mti$cs","nox2mti$cs","ymti$cs", - "noymti$cs","y2mti$cs","noy2mti$cs","zmti$cs","nozmti$cs", - "cbmti$cs","nocbmti$cs","xr$ange","x2r$ange","yr$ange", - "y2r$ange","zr$ange","cbr$ange","rr$ange","tr$ange","ur$ange", - "vr$ange","xzeroa$xis","x2zeroa$xis","yzeroa$xis","y2zeroa$xis", - "zzeroa$xis","zeroa$xis","z$ero"), Name.Builtin, '#pop'), - ], - 'bind': [ - ('!', Keyword, '#pop'), - (_shortened('all$windows'), Name.Builtin), - include('genericargs'), - ], - 'quit': [ - (r'gnuplot\b', Keyword), - include('noargs'), - ], - 'fit': [ - (r'via\b', Name.Builtin), - include('plot'), - ], - 'if': [ - (r'\)', Punctuation, '#pop'), - include('genericargs'), - ], - 'pause': [ - (r'(mouse|any|button1|button2|button3)\b', Name.Builtin), - (_shortened('key$press'), Name.Builtin), - include('genericargs'), - ], - 'plot': [ - (_shortened_many('ax$es', 'axi$s', 'bin$ary', 'ev$ery', 'i$ndex', - 'mat$rix', 's$mooth', 'thru$', 't$itle', - 'not$itle', 'u$sing', 'w$ith'), - Name.Builtin), - include('genericargs'), - ], - 'save': [ - (_shortened_many('f$unctions', 's$et', 't$erminal', 'v$ariables'), - Name.Builtin), - include('genericargs'), - ], - } - - -class PovrayLexer(RegexLexer): - """ - For `Persistence of Vision Raytracer `_ files. - - .. versionadded:: 0.11 - """ - name = 'POVRay' - aliases = ['pov'] - filenames = ['*.pov', '*.inc'] - mimetypes = ['text/x-povray'] - - tokens = { - 'root': [ - (r'/\*[\w\W]*?\*/', Comment.Multiline), - (r'//.*\n', Comment.Single), - (r'(?s)"(?:\\.|[^"\\])+"', String.Double), - (r'#(debug|default|else|end|error|fclose|fopen|ifdef|ifndef|' - r'include|range|read|render|statistics|switch|undef|version|' - r'warning|while|write|define|macro|local|declare)\b', - Comment.Preproc), - (r'\b(aa_level|aa_threshold|abs|acos|acosh|adaptive|adc_bailout|' - r'agate|agate_turb|all|alpha|ambient|ambient_light|angle|' - r'aperture|arc_angle|area_light|asc|asin|asinh|assumed_gamma|' - r'atan|atan2|atanh|atmosphere|atmospheric_attenuation|' - r'attenuating|average|background|black_hole|blue|blur_samples|' - r'bounded_by|box_mapping|bozo|break|brick|brick_size|' - r'brightness|brilliance|bumps|bumpy1|bumpy2|bumpy3|bump_map|' - r'bump_size|case|caustics|ceil|checker|chr|clipped_by|clock|' - r'color|color_map|colour|colour_map|component|composite|concat|' - r'confidence|conic_sweep|constant|control0|control1|cos|cosh|' - r'count|crackle|crand|cube|cubic_spline|cylindrical_mapping|' - r'debug|declare|default|degrees|dents|diffuse|direction|' - r'distance|distance_maximum|div|dust|dust_type|eccentricity|' - r'else|emitting|end|error|error_bound|exp|exponent|' - r'fade_distance|fade_power|falloff|falloff_angle|false|' - r'file_exists|filter|finish|fisheye|flatness|flip|floor|' - r'focal_point|fog|fog_alt|fog_offset|fog_type|frequency|gif|' - r'global_settings|glowing|gradient|granite|gray_threshold|' - r'green|halo|hexagon|hf_gray_16|hierarchy|hollow|hypercomplex|' - r'if|ifdef|iff|image_map|incidence|include|int|interpolate|' - r'inverse|ior|irid|irid_wavelength|jitter|lambda|leopard|' - r'linear|linear_spline|linear_sweep|location|log|looks_like|' - r'look_at|low_error_factor|mandel|map_type|marble|material_map|' - r'matrix|max|max_intersections|max_iteration|max_trace_level|' - r'max_value|metallic|min|minimum_reuse|mod|mortar|' - r'nearest_count|no|normal|normal_map|no_shadow|number_of_waves|' - r'octaves|off|offset|omega|omnimax|on|once|onion|open|' - r'orthographic|panoramic|pattern1|pattern2|pattern3|' - r'perspective|pgm|phase|phong|phong_size|pi|pigment|' - r'pigment_map|planar_mapping|png|point_at|pot|pow|ppm|' - r'precision|pwr|quadratic_spline|quaternion|quick_color|' - r'quick_colour|quilted|radial|radians|radiosity|radius|rainbow|' - r'ramp_wave|rand|range|reciprocal|recursion_limit|red|' - r'reflection|refraction|render|repeat|rgb|rgbf|rgbft|rgbt|' - r'right|ripples|rotate|roughness|samples|scale|scallop_wave|' - r'scattering|seed|shadowless|sin|sine_wave|sinh|sky|sky_sphere|' - r'slice|slope_map|smooth|specular|spherical_mapping|spiral|' - r'spiral1|spiral2|spotlight|spotted|sqr|sqrt|statistics|str|' - r'strcmp|strength|strlen|strlwr|strupr|sturm|substr|switch|sys|' - r't|tan|tanh|test_camera_1|test_camera_2|test_camera_3|' - r'test_camera_4|texture|texture_map|tga|thickness|threshold|' - r'tightness|tile2|tiles|track|transform|translate|transmit|' - r'triangle_wave|true|ttf|turbulence|turb_depth|type|' - r'ultra_wide_angle|up|use_color|use_colour|use_index|u_steps|' - r'val|variance|vaxis_rotate|vcross|vdot|version|vlength|' - r'vnormalize|volume_object|volume_rendered|vol_with_light|' - r'vrotate|v_steps|warning|warp|water_level|waves|while|width|' - r'wood|wrinkles|yes)\b', Keyword), - (r'(bicubic_patch|blob|box|camera|cone|cubic|cylinder|difference|' - r'disc|height_field|intersection|julia_fractal|lathe|' - r'light_source|merge|mesh|object|plane|poly|polygon|prism|' - r'quadric|quartic|smooth_triangle|sor|sphere|superellipsoid|' - r'text|torus|triangle|union)\b', Name.Builtin), - # TODO: <=, etc - (r'[\[\](){}<>;,]', Punctuation), - (r'[-+*/=]', Operator), - (r'\b(x|y|z|u|v)\b', Name.Builtin.Pseudo), - (r'[a-zA-Z_][a-zA-Z_0-9]*', Name), - (r'[0-9]+\.[0-9]*', Number.Float), - (r'\.[0-9]+', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\s+', Text), - ] - } - - -class AppleScriptLexer(RegexLexer): - """ - For `AppleScript source code - `_, - including `AppleScript Studio - `_. - Contributed by Andreas Amann . - """ - - name = 'AppleScript' - aliases = ['applescript'] - filenames = ['*.applescript'] - - flags = re.MULTILINE | re.DOTALL - - Identifiers = r'[a-zA-Z]\w*' - Literals = ['AppleScript', 'current application', 'false', 'linefeed', - 'missing value', 'pi','quote', 'result', 'return', 'space', - 'tab', 'text item delimiters', 'true', 'version'] - Classes = ['alias ', 'application ', 'boolean ', 'class ', 'constant ', - 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ', - 'real ', 'record ', 'reference ', 'RGB color ', 'script ', - 'text ', 'unit types', '(?:Unicode )?text', 'string'] - BuiltIn = ['attachment', 'attribute run', 'character', 'day', 'month', - 'paragraph', 'word', 'year'] - HandlerParams = ['about', 'above', 'against', 'apart from', 'around', - 'aside from', 'at', 'below', 'beneath', 'beside', - 'between', 'for', 'given', 'instead of', 'on', 'onto', - 'out of', 'over', 'since'] - Commands = ['ASCII (character|number)', 'activate', 'beep', 'choose URL', - 'choose application', 'choose color', 'choose file( name)?', - 'choose folder', 'choose from list', - 'choose remote application', 'clipboard info', - 'close( access)?', 'copy', 'count', 'current date', 'delay', - 'delete', 'display (alert|dialog)', 'do shell script', - 'duplicate', 'exists', 'get eof', 'get volume settings', - 'info for', 'launch', 'list (disks|folder)', 'load script', - 'log', 'make', 'mount volume', 'new', 'offset', - 'open( (for access|location))?', 'path to', 'print', 'quit', - 'random number', 'read', 'round', 'run( script)?', - 'say', 'scripting components', - 'set (eof|the clipboard to|volume)', 'store script', - 'summarize', 'system attribute', 'system info', - 'the clipboard', 'time to GMT', 'write', 'quoted form'] - References = ['(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)', - 'first', 'second', 'third', 'fourth', 'fifth', 'sixth', - 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back', - 'before', 'behind', 'every', 'front', 'index', 'last', - 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose'] - Operators = ["and", "or", "is equal", "equals", "(is )?equal to", "is not", - "isn't", "isn't equal( to)?", "is not equal( to)?", - "doesn't equal", "does not equal", "(is )?greater than", - "comes after", "is not less than or equal( to)?", - "isn't less than or equal( to)?", "(is )?less than", - "comes before", "is not greater than or equal( to)?", - "isn't greater than or equal( to)?", - "(is )?greater than or equal( to)?", "is not less than", - "isn't less than", "does not come before", - "doesn't come before", "(is )?less than or equal( to)?", - "is not greater than", "isn't greater than", - "does not come after", "doesn't come after", "starts? with", - "begins? with", "ends? with", "contains?", "does not contain", - "doesn't contain", "is in", "is contained by", "is not in", - "is not contained by", "isn't contained by", "div", "mod", - "not", "(a )?(ref( to)?|reference to)", "is", "does"] - Control = ['considering', 'else', 'error', 'exit', 'from', 'if', - 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to', - 'try', 'until', 'using terms from', 'while', 'whith', - 'with timeout( of)?', 'with transaction', 'by', 'continue', - 'end', 'its?', 'me', 'my', 'return', 'of' , 'as'] - Declarations = ['global', 'local', 'prop(erty)?', 'set', 'get'] - Reserved = ['but', 'put', 'returning', 'the'] - StudioClasses = ['action cell', 'alert reply', 'application', 'box', - 'browser( cell)?', 'bundle', 'button( cell)?', 'cell', - 'clip view', 'color well', 'color-panel', - 'combo box( item)?', 'control', - 'data( (cell|column|item|row|source))?', 'default entry', - 'dialog reply', 'document', 'drag info', 'drawer', - 'event', 'font(-panel)?', 'formatter', - 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item', - 'movie( view)?', 'open-panel', 'outline view', 'panel', - 'pasteboard', 'plugin', 'popup button', - 'progress indicator', 'responder', 'save-panel', - 'scroll view', 'secure text field( cell)?', 'slider', - 'sound', 'split view', 'stepper', 'tab view( item)?', - 'table( (column|header cell|header view|view))', - 'text( (field( cell)?|view))?', 'toolbar( item)?', - 'user-defaults', 'view', 'window'] - StudioEvents = ['accept outline drop', 'accept table drop', 'action', - 'activated', 'alert ended', 'awake from nib', 'became key', - 'became main', 'begin editing', 'bounds changed', - 'cell value', 'cell value changed', 'change cell value', - 'change item value', 'changed', 'child of item', - 'choose menu item', 'clicked', 'clicked toolbar item', - 'closed', 'column clicked', 'column moved', - 'column resized', 'conclude drop', 'data representation', - 'deminiaturized', 'dialog ended', 'document nib name', - 'double clicked', 'drag( (entered|exited|updated))?', - 'drop', 'end editing', 'exposed', 'idle', 'item expandable', - 'item value', 'item value changed', 'items changed', - 'keyboard down', 'keyboard up', 'launched', - 'load data representation', 'miniaturized', 'mouse down', - 'mouse dragged', 'mouse entered', 'mouse exited', - 'mouse moved', 'mouse up', 'moved', - 'number of browser rows', 'number of items', - 'number of rows', 'open untitled', 'opened', 'panel ended', - 'parameters updated', 'plugin loaded', 'prepare drop', - 'prepare outline drag', 'prepare outline drop', - 'prepare table drag', 'prepare table drop', - 'read from file', 'resigned active', 'resigned key', - 'resigned main', 'resized( sub views)?', - 'right mouse down', 'right mouse dragged', - 'right mouse up', 'rows changed', 'scroll wheel', - 'selected tab view item', 'selection changed', - 'selection changing', 'should begin editing', - 'should close', 'should collapse item', - 'should end editing', 'should expand item', - 'should open( untitled)?', - 'should quit( after last window closed)?', - 'should select column', 'should select item', - 'should select row', 'should select tab view item', - 'should selection change', 'should zoom', 'shown', - 'update menu item', 'update parameters', - 'update toolbar item', 'was hidden', 'was miniaturized', - 'will become active', 'will close', 'will dismiss', - 'will display browser cell', 'will display cell', - 'will display item cell', 'will display outline cell', - 'will finish launching', 'will hide', 'will miniaturize', - 'will move', 'will open', 'will pop up', 'will quit', - 'will resign active', 'will resize( sub views)?', - 'will select tab view item', 'will show', 'will zoom', - 'write to file', 'zoomed'] - StudioCommands = ['animate', 'append', 'call method', 'center', - 'close drawer', 'close panel', 'display', - 'display alert', 'display dialog', 'display panel', 'go', - 'hide', 'highlight', 'increment', 'item for', - 'load image', 'load movie', 'load nib', 'load panel', - 'load sound', 'localized string', 'lock focus', 'log', - 'open drawer', 'path for', 'pause', 'perform action', - 'play', 'register', 'resume', 'scroll', 'select( all)?', - 'show', 'size to fit', 'start', 'step back', - 'step forward', 'stop', 'synchronize', 'unlock focus', - 'update'] - StudioProperties = ['accepts arrow key', 'action method', 'active', - 'alignment', 'allowed identifiers', - 'allows branch selection', 'allows column reordering', - 'allows column resizing', 'allows column selection', - 'allows customization', - 'allows editing text attributes', - 'allows empty selection', 'allows mixed state', - 'allows multiple selection', 'allows reordering', - 'allows undo', 'alpha( value)?', 'alternate image', - 'alternate increment value', 'alternate title', - 'animation delay', 'associated file name', - 'associated object', 'auto completes', 'auto display', - 'auto enables items', 'auto repeat', - 'auto resizes( outline column)?', - 'auto save expanded items', 'auto save name', - 'auto save table columns', 'auto saves configuration', - 'auto scroll', 'auto sizes all columns to fit', - 'auto sizes cells', 'background color', 'bezel state', - 'bezel style', 'bezeled', 'border rect', 'border type', - 'bordered', 'bounds( rotation)?', 'box type', - 'button returned', 'button type', - 'can choose directories', 'can choose files', - 'can draw', 'can hide', - 'cell( (background color|size|type))?', 'characters', - 'class', 'click count', 'clicked( data)? column', - 'clicked data item', 'clicked( data)? row', - 'closeable', 'collating', 'color( (mode|panel))', - 'command key down', 'configuration', - 'content(s| (size|view( margins)?))?', 'context', - 'continuous', 'control key down', 'control size', - 'control tint', 'control view', - 'controller visible', 'coordinate system', - 'copies( on scroll)?', 'corner view', 'current cell', - 'current column', 'current( field)? editor', - 'current( menu)? item', 'current row', - 'current tab view item', 'data source', - 'default identifiers', 'delta (x|y|z)', - 'destination window', 'directory', 'display mode', - 'displayed cell', 'document( (edited|rect|view))?', - 'double value', 'dragged column', 'dragged distance', - 'dragged items', 'draws( cell)? background', - 'draws grid', 'dynamically scrolls', 'echos bullets', - 'edge', 'editable', 'edited( data)? column', - 'edited data item', 'edited( data)? row', 'enabled', - 'enclosing scroll view', 'ending page', - 'error handling', 'event number', 'event type', - 'excluded from windows menu', 'executable path', - 'expanded', 'fax number', 'field editor', 'file kind', - 'file name', 'file type', 'first responder', - 'first visible column', 'flipped', 'floating', - 'font( panel)?', 'formatter', 'frameworks path', - 'frontmost', 'gave up', 'grid color', 'has data items', - 'has horizontal ruler', 'has horizontal scroller', - 'has parent data item', 'has resize indicator', - 'has shadow', 'has sub menu', 'has vertical ruler', - 'has vertical scroller', 'header cell', 'header view', - 'hidden', 'hides when deactivated', 'highlights by', - 'horizontal line scroll', 'horizontal page scroll', - 'horizontal ruler view', 'horizontally resizable', - 'icon image', 'id', 'identifier', - 'ignores multiple clicks', - 'image( (alignment|dims when disabled|frame style|' - 'scaling))?', - 'imports graphics', 'increment value', - 'indentation per level', 'indeterminate', 'index', - 'integer value', 'intercell spacing', 'item height', - 'key( (code|equivalent( modifier)?|window))?', - 'knob thickness', 'label', 'last( visible)? column', - 'leading offset', 'leaf', 'level', 'line scroll', - 'loaded', 'localized sort', 'location', 'loop mode', - 'main( (bunde|menu|window))?', 'marker follows cell', - 'matrix mode', 'maximum( content)? size', - 'maximum visible columns', - 'menu( form representation)?', 'miniaturizable', - 'miniaturized', 'minimized image', 'minimized title', - 'minimum column width', 'minimum( content)? size', - 'modal', 'modified', 'mouse down state', - 'movie( (controller|file|rect))?', 'muted', 'name', - 'needs display', 'next state', 'next text', - 'number of tick marks', 'only tick mark values', - 'opaque', 'open panel', 'option key down', - 'outline table column', 'page scroll', 'pages across', - 'pages down', 'palette label', 'pane splitter', - 'parent data item', 'parent window', 'pasteboard', - 'path( (names|separator))?', 'playing', - 'plays every frame', 'plays selection only', 'position', - 'preferred edge', 'preferred type', 'pressure', - 'previous text', 'prompt', 'properties', - 'prototype cell', 'pulls down', 'rate', - 'released when closed', 'repeated', - 'requested print time', 'required file type', - 'resizable', 'resized column', 'resource path', - 'returns records', 'reuses columns', 'rich text', - 'roll over', 'row height', 'rulers visible', - 'save panel', 'scripts path', 'scrollable', - 'selectable( identifiers)?', 'selected cell', - 'selected( data)? columns?', 'selected data items?', - 'selected( data)? rows?', 'selected item identifier', - 'selection by rect', 'send action on arrow key', - 'sends action when done editing', 'separates columns', - 'separator item', 'sequence number', 'services menu', - 'shared frameworks path', 'shared support path', - 'sheet', 'shift key down', 'shows alpha', - 'shows state by', 'size( mode)?', - 'smart insert delete enabled', 'sort case sensitivity', - 'sort column', 'sort order', 'sort type', - 'sorted( data rows)?', 'sound', 'source( mask)?', - 'spell checking enabled', 'starting page', 'state', - 'string value', 'sub menu', 'super menu', 'super view', - 'tab key traverses cells', 'tab state', 'tab type', - 'tab view', 'table view', 'tag', 'target( printer)?', - 'text color', 'text container insert', - 'text container origin', 'text returned', - 'tick mark position', 'time stamp', - 'title(d| (cell|font|height|position|rect))?', - 'tool tip', 'toolbar', 'trailing offset', 'transparent', - 'treat packages as directories', 'truncated labels', - 'types', 'unmodified characters', 'update views', - 'use sort indicator', 'user defaults', - 'uses data source', 'uses ruler', - 'uses threaded animation', - 'uses title from previous column', 'value wraps', - 'version', - 'vertical( (line scroll|page scroll|ruler view))?', - 'vertically resizable', 'view', - 'visible( document rect)?', 'volume', 'width', 'window', - 'windows menu', 'wraps', 'zoomable', 'zoomed'] - - tokens = { - 'root': [ - (r'\s+', Text), - (u'¬\\n', String.Escape), - (r"'s\s+", Text), # This is a possessive, consider moving - (r'(--|#).*?$', Comment), - (r'\(\*', Comment.Multiline, 'comment'), - (r'[\(\){}!,.:]', Punctuation), - (u'(«)([^»]+)(»)', - bygroups(Text, Name.Builtin, Text)), - (r'\b((?:considering|ignoring)\s*)' - r'(application responses|case|diacriticals|hyphens|' - r'numeric strings|punctuation|white space)', - bygroups(Keyword, Name.Builtin)), - (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator), - (r"\b(%s)\b" % '|'.join(Operators), Operator.Word), - (r'^(\s*(?:on|end)\s+)' - r'(%s)' % '|'.join(StudioEvents[::-1]), - bygroups(Keyword, Name.Function)), - (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)), - (r'\b(as )(%s)\b' % '|'.join(Classes), - bygroups(Keyword, Name.Class)), - (r'\b(%s)\b' % '|'.join(Literals), Name.Constant), - (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin), - (r'\b(%s)\b' % '|'.join(Control), Keyword), - (r'\b(%s)\b' % '|'.join(Declarations), Keyword), - (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin), - (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin), - (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin), - (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute), - (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin), - (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin), - (r'\b(%s)\b' % '|'.join(References), Name.Builtin), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r'\b(%s)\b' % Identifiers, Name.Variable), - (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float), - (r'[-+]?\d+', Number.Integer), - ], - 'comment': [ - ('\(\*', Comment.Multiline, '#push'), - ('\*\)', Comment.Multiline, '#pop'), - ('[^*(]+', Comment.Multiline), - ('[*(]', Comment.Multiline), - ], - } - - -class ModelicaLexer(RegexLexer): - """ - For `Modelica `_ source code. - - .. versionadded:: 1.1 - """ - name = 'Modelica' - aliases = ['modelica'] - filenames = ['*.mo'] - mimetypes = ['text/x-modelica'] - - flags = re.IGNORECASE | re.DOTALL - - tokens = { - 'whitespace': [ - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuation - (r'//(\n|(.|\n)*?[^\\]\n)', Comment), - (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment), - ], - 'statements': [ - (r'"', String, 'string'), - (r'(\d+\.\d*|\.\d+|\d+|\d.)[eE][+-]?\d+[lL]?', Number.Float), - (r'(\d+\.\d*|\.\d+)', Number.Float), - (r'\d+[Ll]?', Number.Integer), - (r'[~!%^&*+=|?:<>/-]', Operator), - (r'(true|false|NULL|Real|Integer|Boolean)\b', Name.Builtin), - (r'([a-z_][\w]*|\'[^\']+\')' - r'([\[\d,:\]]*)' - r'(\.([a-z_][\w]*|\'[^\']+\'))+' - r'([\[\d,:\]]*)', Name.Class), - (r'(\'[\w\+\-\*\/\^]+\'|\w+)', Name), - (r'[()\[\]{},.;]', Punctuation), - (r'\'', Name, 'quoted_ident'), - ], - 'root': [ - include('whitespace'), - include('classes'), - include('functions'), - include('keywords'), - include('operators'), - (r'("|)', Name.Tag, 'html-content'), - include('statements'), - ], - 'keywords': [ - (r'(algorithm|annotation|break|connect|constant|constrainedby|' - r'discrete|each|end|else|elseif|elsewhen|encapsulated|enumeration|' - r'equation|exit|expandable|extends|' - r'external|false|final|flow|for|if|import|impure|in|initial\sequation|' - r'inner|input|loop|nondiscrete|outer|output|parameter|partial|' - r'protected|public|pure|redeclare|replaceable|stream|time|then|true|' - r'when|while|within)\b', Keyword), - ], - 'functions': [ - (r'(abs|acos|acosh|asin|asinh|atan|atan2|atan3|ceil|cos|cosh|' - r'cross|diagonal|div|exp|fill|floor|getInstanceName|identity|' - r'linspace|log|log10|matrix|mod|max|min|ndims|ones|outerProduct|' - r'product|rem|scalar|semiLinear|skew|sign|sin|sinh|size|' - r'spatialDistribution|sum|sqrt|symmetric|tan|tanh|transpose|' - r'vector|zeros)\b', Name.Function), - ], - 'operators': [ - (r'(actualStream|and|assert|backSample|cardinality|change|Clock|' - r'delay|der|edge|hold|homotopy|initial|inStream|noClock|noEvent|' - r'not|or|pre|previous|reinit|return|sample|smooth|' - r'spatialDistribution|shiftSample|subSample|superSample|terminal|' - r'terminate)\b', Name.Builtin), - ], - 'classes': [ - (r'(operator)?(\s+)?(block|class|connector|end|function|model|' - r'operator|package|record|type)(\s+)' - r'((?!if|for|when|while)[a-z_]\w*|\'[^\']+\')([;]?)', - bygroups(Keyword, Text, Keyword, Text, Name.Class, Text)) - ], - 'quoted_ident': [ - (r'\'', Name, '#pop'), - (r'[^\']+', Name), # all other characters - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-f0-9]{2,4}|[0-7]{1,3})', - String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ], - 'html-content': [ - (r'<\s*/\s*html\s*>"', Name.Tag, '#pop'), - (r'.+?(?=<\s*/\s*html\s*>)', using(HtmlLexer)), - ] - } - - -class RebolLexer(RegexLexer): - """ - A `REBOL `_ lexer. - - .. versionadded:: 1.1 - """ - name = 'REBOL' - aliases = ['rebol'] - filenames = ['*.r', '*.r3', '*.reb'] - mimetypes = ['text/x-rebol'] - - flags = re.IGNORECASE | re.MULTILINE - - re.IGNORECASE - - escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)' - - def word_callback(lexer, match): - word = match.group() - - if re.match(".*:$", word): - yield match.start(), Generic.Subheading, word - elif re.match( - r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|' - r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|' - r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|' - r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|' - r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|' - r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|' - r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|' - r'while|compress|decompress|secure|open|close|read|read-io|' - r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|' - r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|' - r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|' - r'browse|launch|stats|get-modes|set-modes|to-local-file|' - r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|' - r'hide|draw|show|size-text|textinfo|offset-to-caret|' - r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|' - r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|' - r'dsa-make-key|dsa-generate-key|dsa-make-signature|' - r'dsa-verify-signature|rsa-make-key|rsa-generate-key|' - r'rsa-encrypt)$', word): - yield match.start(), Name.Builtin, word - elif re.match( - r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|' - r'minimum|maximum|negate|complement|absolute|random|head|tail|' - r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|' - r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|' - r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|' - r'copy)$', word): - yield match.start(), Name.Function, word - elif re.match( - r'(error|source|input|license|help|install|echo|Usage|with|func|' - r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|' - r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|' - r'remold|charset|array|replace|move|extract|forskip|forall|alter|' - r'first+|also|take|for|forever|dispatch|attempt|what-dir|' - r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|' - r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|' - r'build-tag|process-source|build-markup|decode-cgi|read-cgi|' - r'write-user|save-user|set-user-name|protect-system|parse-xml|' - r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|' - r'scroll-para|get-face|alert|set-face|uninstall|unfocus|' - r'request-dir|center-face|do-events|net-error|decode-url|' - r'parse-header|parse-header-date|parse-email-addrs|import-email|' - r'send|build-attach-body|resend|show-popup|hide-popup|open-events|' - r'find-key-face|do-face|viewtop|confine|find-window|' - r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|' - r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|' - r'read-thru|load-thru|do-thru|launch-thru|load-image|' - r'request-download|do-face-alt|set-font|set-para|get-style|' - r'set-style|make-face|stylize|choose|hilight-text|hilight-all|' - r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|' - r'resize-face|load-stock|load-stock-block|notify|request|flash|' - r'request-color|request-pass|request-text|request-list|' - r'request-date|request-file|dbug|editor|link-relative-path|' - r'emailer|parse-error)$', word): - yield match.start(), Keyword.Namespace, word - elif re.match( - r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|' - r'return|exit|break)$', word): - yield match.start(), Name.Exception, word - elif re.match('REBOL$', word): - yield match.start(), Generic.Heading, word - elif re.match("to-.*", word): - yield match.start(), Keyword, word - elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$', - word): - yield match.start(), Operator, word - elif re.match(".*\?$", word): - yield match.start(), Keyword, word - elif re.match(".*\!$", word): - yield match.start(), Keyword.Type, word - elif re.match("'.*", word): - yield match.start(), Name.Variable.Instance, word # lit-word - elif re.match("#.*", word): - yield match.start(), Name.Label, word # issue - elif re.match("%.*", word): - yield match.start(), Name.Decorator, word # file - else: - yield match.start(), Name.Variable, word - - tokens = { - 'root': [ - (r'[^R]+', Comment), - (r'REBOL\s+\[', Generic.Strong, 'script'), - (r'R', Comment) - ], - 'script': [ - (r'\s+', Text), - (r'#"', String.Char, 'char'), - (r'#{[0-9a-f]*}', Number.Hex), - (r'2#{', Number.Hex, 'bin2'), - (r'64#{[0-9a-z+/=\s]*}', Number.Hex), - (r'"', String, 'string'), - (r'{', String, 'string2'), - (r';#+.*\n', Comment.Special), - (r';\*+.*\n', Comment.Preproc), - (r';.*\n', Comment), - (r'%"', Name.Decorator, 'stringFile'), - (r'%[^(\^{^")\s\[\]]+', Name.Decorator), - (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money - (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time - (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?' - r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date - (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple - (r'\d+[xX]\d+', Keyword.Constant), # pair - (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float), - (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float), - (r'[+-]?\d+(\'\d+)?', Number), - (r'[\[\]\(\)]', Generic.Strong), - (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url - (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url - (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email - (r'comment\s"', Comment, 'commentString1'), - (r'comment\s{', Comment, 'commentString2'), - (r'comment\s\[', Comment, 'commentBlock'), - (r'comment\s[^(\s{\"\[]+', Comment), - (r'/[^(\^{^")\s/[\]]*', Name.Attribute), - (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback), - (r'<[\w:.-]*>', Name.Tag), - (r'<[^(<>\s")]+', Name.Tag, 'tag'), - (r'([^(\^{^")\s]+)', Text), - ], - 'string': [ - (r'[^(\^")]+', String), - (escape_re, String.Escape), - (r'[\(|\)]+', String), - (r'\^.', String.Escape), - (r'"', String, '#pop'), - ], - 'string2': [ - (r'[^(\^{^})]+', String), - (escape_re, String.Escape), - (r'[\(|\)]+', String), - (r'\^.', String.Escape), - (r'{', String, '#push'), - (r'}', String, '#pop'), - ], - 'stringFile': [ - (r'[^(\^")]+', Name.Decorator), - (escape_re, Name.Decorator), - (r'\^.', Name.Decorator), - (r'"', Name.Decorator, '#pop'), - ], - 'char': [ - (escape_re + '"', String.Char, '#pop'), - (r'\^."', String.Char, '#pop'), - (r'."', String.Char, '#pop'), - ], - 'tag': [ - (escape_re, Name.Tag), - (r'"', Name.Tag, 'tagString'), - (r'[^(<>\r\n")]+', Name.Tag), - (r'>', Name.Tag, '#pop'), - ], - 'tagString': [ - (r'[^(\^")]+', Name.Tag), - (escape_re, Name.Tag), - (r'[\(|\)]+', Name.Tag), - (r'\^.', Name.Tag), - (r'"', Name.Tag, '#pop'), - ], - 'tuple': [ - (r'(\d+\.)+', Keyword.Constant), - (r'\d+', Keyword.Constant, '#pop'), - ], - 'bin2': [ - (r'\s+', Number.Hex), - (r'([0-1]\s*){8}', Number.Hex), - (r'}', Number.Hex, '#pop'), - ], - 'commentString1': [ - (r'[^(\^")]+', Comment), - (escape_re, Comment), - (r'[\(|\)]+', Comment), - (r'\^.', Comment), - (r'"', Comment, '#pop'), - ], - 'commentString2': [ - (r'[^(\^{^})]+', Comment), - (escape_re, Comment), - (r'[\(|\)]+', Comment), - (r'\^.', Comment), - (r'{', Comment, '#push'), - (r'}', Comment, '#pop'), + (r'\$.', String.Char), + (r'#*\(', String.Symbol, 'inner_parenth'), ], - 'commentBlock': [ - (r'\[', Comment, '#push'), - (r'\]', Comment, '#pop'), - (r'"', Comment, "commentString1"), - (r'{', Comment, "commentString2"), - (r'[^(\[\]\"{)]+', Comment), + 'parenth' : [ + # This state is a bit tricky since + # we can't just pop this state + (r'\)', String.Symbol, ('root', 'afterobject')), + include('_parenth_helper'), ], - } - def analyse_text(text): - """ - Check if code contains REBOL header and so it probably not R code - """ - if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE): - # The code starts with REBOL header - return 1.0 - elif re.search(r'\s*REBOL\s*[', text, re.IGNORECASE): - # The code contains REBOL header but also some text before it - return 0.5 - - -class ABAPLexer(RegexLexer): - """ - Lexer for ABAP, SAP's integrated language. - - .. versionadded:: 1.1 - """ - name = 'ABAP' - aliases = ['abap'] - filenames = ['*.abap'] - mimetypes = ['text/x-abap'] - - flags = re.IGNORECASE | re.MULTILINE - - tokens = { - 'common': [ + 'inner_parenth': [ + (r'\)', String.Symbol, '#pop'), + include('_parenth_helper'), + ], + 'whitespaces' : [ + # skip whitespace and comments (r'\s+', Text), - (r'^\*.*$', Comment.Single), - (r'\".*?\n', Comment.Single), - ], - 'variable-names': [ - (r'<\S+>', Name.Variable), - (r'\w[\w~]*(?:(\[\])|->\*)?', Name.Variable), - ], - 'root': [ - include('common'), - #function calls - (r'(CALL\s+(?:BADI|CUSTOMER-FUNCTION|FUNCTION))(\s+)(\'?\S+\'?)', - bygroups(Keyword, Text, Name.Function)), - (r'(CALL\s+(?:DIALOG|SCREEN|SUBSCREEN|SELECTION-SCREEN|' - r'TRANSACTION|TRANSFORMATION))\b', - Keyword), - (r'(FORM|PERFORM)(\s+)(\w+)', - bygroups(Keyword, Text, Name.Function)), - (r'(PERFORM)(\s+)(\()(\w+)(\))', - bygroups(Keyword, Text, Punctuation, Name.Variable, Punctuation )), - (r'(MODULE)(\s+)(\S+)(\s+)(INPUT|OUTPUT)', - bygroups(Keyword, Text, Name.Function, Text, Keyword)), - - # method implementation - (r'(METHOD)(\s+)([\w~]+)', - bygroups(Keyword, Text, Name.Function)), - # method calls - (r'(\s+)([\w\-]+)([=\-]>)([\w\-~]+)', - bygroups(Text, Name.Variable, Operator, Name.Function)), - # call methodnames returning style - (r'(?<=(=|-)>)([\w\-~]+)(?=\()', Name.Function), - - # keywords with dashes in them. - # these need to be first, because for instance the -ID part - # of MESSAGE-ID wouldn't get highlighted if MESSAGE was - # first in the list of keywords. - (r'(ADD-CORRESPONDING|AUTHORITY-CHECK|' - r'CLASS-DATA|CLASS-EVENTS|CLASS-METHODS|CLASS-POOL|' - r'DELETE-ADJACENT|DIVIDE-CORRESPONDING|' - r'EDITOR-CALL|ENHANCEMENT-POINT|ENHANCEMENT-SECTION|EXIT-COMMAND|' - r'FIELD-GROUPS|FIELD-SYMBOLS|FUNCTION-POOL|' - r'INTERFACE-POOL|INVERTED-DATE|' - r'LOAD-OF-PROGRAM|LOG-POINT|' - r'MESSAGE-ID|MOVE-CORRESPONDING|MULTIPLY-CORRESPONDING|' - r'NEW-LINE|NEW-PAGE|NEW-SECTION|NO-EXTENSION|' - r'OUTPUT-LENGTH|PRINT-CONTROL|' - r'SELECT-OPTIONS|START-OF-SELECTION|SUBTRACT-CORRESPONDING|' - r'SYNTAX-CHECK|SYSTEM-EXCEPTIONS|' - r'TYPE-POOL|TYPE-POOLS' - r')\b', Keyword), - - # keyword kombinations - (r'CREATE\s+(PUBLIC|PRIVATE|DATA|OBJECT)|' - r'((PUBLIC|PRIVATE|PROTECTED)\s+SECTION|' - r'(TYPE|LIKE)(\s+(LINE\s+OF|REF\s+TO|' - r'(SORTED|STANDARD|HASHED)\s+TABLE\s+OF))?|' - r'FROM\s+(DATABASE|MEMORY)|CALL\s+METHOD|' - r'(GROUP|ORDER) BY|HAVING|SEPARATED BY|' - r'GET\s+(BADI|BIT|CURSOR|DATASET|LOCALE|PARAMETER|' - r'PF-STATUS|(PROPERTY|REFERENCE)\s+OF|' - r'RUN\s+TIME|TIME\s+(STAMP)?)?|' - r'SET\s+(BIT|BLANK\s+LINES|COUNTRY|CURSOR|DATASET|EXTENDED\s+CHECK|' - r'HANDLER|HOLD\s+DATA|LANGUAGE|LEFT\s+SCROLL-BOUNDARY|' - r'LOCALE|MARGIN|PARAMETER|PF-STATUS|PROPERTY\s+OF|' - r'RUN\s+TIME\s+(ANALYZER|CLOCK\s+RESOLUTION)|SCREEN|' - r'TITLEBAR|UPADTE\s+TASK\s+LOCAL|USER-COMMAND)|' - r'CONVERT\s+((INVERTED-)?DATE|TIME|TIME\s+STAMP|TEXT)|' - r'(CLOSE|OPEN)\s+(DATASET|CURSOR)|' - r'(TO|FROM)\s+(DATA BUFFER|INTERNAL TABLE|MEMORY ID|' - r'DATABASE|SHARED\s+(MEMORY|BUFFER))|' - r'DESCRIBE\s+(DISTANCE\s+BETWEEN|FIELD|LIST|TABLE)|' - r'FREE\s(MEMORY|OBJECT)?|' - r'PROCESS\s+(BEFORE\s+OUTPUT|AFTER\s+INPUT|' - r'ON\s+(VALUE-REQUEST|HELP-REQUEST))|' - r'AT\s+(LINE-SELECTION|USER-COMMAND|END\s+OF|NEW)|' - r'AT\s+SELECTION-SCREEN(\s+(ON(\s+(BLOCK|(HELP|VALUE)-REQUEST\s+FOR|' - r'END\s+OF|RADIOBUTTON\s+GROUP))?|OUTPUT))?|' - r'SELECTION-SCREEN:?\s+((BEGIN|END)\s+OF\s+((TABBED\s+)?BLOCK|LINE|' - r'SCREEN)|COMMENT|FUNCTION\s+KEY|' - r'INCLUDE\s+BLOCKS|POSITION|PUSHBUTTON|' - r'SKIP|ULINE)|' - r'LEAVE\s+(LIST-PROCESSING|PROGRAM|SCREEN|' - r'TO LIST-PROCESSING|TO TRANSACTION)' - r'(ENDING|STARTING)\s+AT|' - r'FORMAT\s+(COLOR|INTENSIFIED|INVERSE|HOTSPOT|INPUT|FRAMES|RESET)|' - r'AS\s+(CHECKBOX|SUBSCREEN|WINDOW)|' - r'WITH\s+(((NON-)?UNIQUE)?\s+KEY|FRAME)|' - r'(BEGIN|END)\s+OF|' - r'DELETE(\s+ADJACENT\s+DUPLICATES\sFROM)?|' - r'COMPARING(\s+ALL\s+FIELDS)?|' - r'INSERT(\s+INITIAL\s+LINE\s+INTO|\s+LINES\s+OF)?|' - r'IN\s+((BYTE|CHARACTER)\s+MODE|PROGRAM)|' - r'END-OF-(DEFINITION|PAGE|SELECTION)|' - r'WITH\s+FRAME(\s+TITLE)|' - - # simple kombinations - r'AND\s+(MARK|RETURN)|CLIENT\s+SPECIFIED|CORRESPONDING\s+FIELDS\s+OF|' - r'IF\s+FOUND|FOR\s+EVENT|INHERITING\s+FROM|LEAVE\s+TO\s+SCREEN|' - r'LOOP\s+AT\s+(SCREEN)?|LOWER\s+CASE|MATCHCODE\s+OBJECT|MODIF\s+ID|' - r'MODIFY\s+SCREEN|NESTING\s+LEVEL|NO\s+INTERVALS|OF\s+STRUCTURE|' - r'RADIOBUTTON\s+GROUP|RANGE\s+OF|REF\s+TO|SUPPRESS DIALOG|' - r'TABLE\s+OF|UPPER\s+CASE|TRANSPORTING\s+NO\s+FIELDS|' - r'VALUE\s+CHECK|VISIBLE\s+LENGTH|HEADER\s+LINE)\b', Keyword), - - # single word keywords. - (r'(^|(?<=(\s|\.)))(ABBREVIATED|ADD|ALIASES|APPEND|ASSERT|' - r'ASSIGN(ING)?|AT(\s+FIRST)?|' - r'BACK|BLOCK|BREAK-POINT|' - r'CASE|CATCH|CHANGING|CHECK|CLASS|CLEAR|COLLECT|COLOR|COMMIT|' - r'CREATE|COMMUNICATION|COMPONENTS?|COMPUTE|CONCATENATE|CONDENSE|' - r'CONSTANTS|CONTEXTS|CONTINUE|CONTROLS|' - r'DATA|DECIMALS|DEFAULT|DEFINE|DEFINITION|DEFERRED|DEMAND|' - r'DETAIL|DIRECTORY|DIVIDE|DO|' - r'ELSE(IF)?|ENDAT|ENDCASE|ENDCLASS|ENDDO|ENDFORM|ENDFUNCTION|' - r'ENDIF|ENDLOOP|ENDMETHOD|ENDMODULE|ENDSELECT|ENDTRY|' - r'ENHANCEMENT|EVENTS|EXCEPTIONS|EXIT|EXPORT|EXPORTING|EXTRACT|' - r'FETCH|FIELDS?|FIND|FOR|FORM|FORMAT|FREE|FROM|' - r'HIDE|' - r'ID|IF|IMPORT|IMPLEMENTATION|IMPORTING|IN|INCLUDE|INCLUDING|' - r'INDEX|INFOTYPES|INITIALIZATION|INTERFACE|INTERFACES|INTO|' - r'LENGTH|LINES|LOAD|LOCAL|' - r'JOIN|' - r'KEY|' - r'MAXIMUM|MESSAGE|METHOD[S]?|MINIMUM|MODULE|MODIFY|MOVE|MULTIPLY|' - r'NODES|' - r'OBLIGATORY|OF|OFF|ON|OVERLAY|' - r'PACK|PARAMETERS|PERCENTAGE|POSITION|PROGRAM|PROVIDE|PUBLIC|PUT|' - r'RAISE|RAISING|RANGES|READ|RECEIVE|REFRESH|REJECT|REPORT|RESERVE|' - r'RESUME|RETRY|RETURN|RETURNING|RIGHT|ROLLBACK|' - r'SCROLL|SEARCH|SELECT|SHIFT|SINGLE|SKIP|SORT|SPLIT|STATICS|STOP|' - r'SUBMIT|SUBTRACT|SUM|SUMMARY|SUMMING|SUPPLY|' - r'TABLE|TABLES|TIMES|TITLE|TO|TOP-OF-PAGE|TRANSFER|TRANSLATE|TRY|TYPES|' - r'ULINE|UNDER|UNPACK|UPDATE|USING|' - r'VALUE|VALUES|VIA|' - r'WAIT|WHEN|WHERE|WHILE|WITH|WINDOW|WRITE)\b', Keyword), - - # builtins - (r'(abs|acos|asin|atan|' - r'boolc|boolx|bit_set|' - r'char_off|charlen|ceil|cmax|cmin|condense|contains|' - r'contains_any_of|contains_any_not_of|concat_lines_of|cos|cosh|' - r'count|count_any_of|count_any_not_of|' - r'dbmaxlen|distance|' - r'escape|exp|' - r'find|find_end|find_any_of|find_any_not_of|floor|frac|from_mixed|' - r'insert|' - r'lines|log|log10|' - r'match|matches|' - r'nmax|nmin|numofchar|' - r'repeat|replace|rescale|reverse|round|' - r'segment|shift_left|shift_right|sign|sin|sinh|sqrt|strlen|' - r'substring|substring_after|substring_from|substring_before|substring_to|' - r'tan|tanh|to_upper|to_lower|to_mixed|translate|trunc|' - r'xstrlen)(\()\b', bygroups(Name.Builtin, Punctuation)), - - (r'&[0-9]', Name), - (r'[0-9]+', Number.Integer), - - # operators which look like variable names before - # parsing variable names. - (r'(?<=(\s|.))(AND|EQ|NE|GT|LT|GE|LE|CO|CN|CA|NA|CS|NOT|NS|CP|NP|' - r'BYTE-CO|BYTE-CN|BYTE-CA|BYTE-NA|BYTE-CS|BYTE-NS|' - r'IS\s+(NOT\s+)?(INITIAL|ASSIGNED|REQUESTED|BOUND))\b', Operator), - - include('variable-names'), - - # standard oparators after variable names, - # because < and > are part of field symbols. - (r'[?*<>=\-+]', Operator), - (r"'(''|[^'])*'", String.Single), - (r"`([^`])*`", String.Single), - (r'[/;:()\[\],\.]', Punctuation) + (r'"(""|[^"])*"', Comment), + ], + 'objects' : [ + (r'\[', Text, 'blockvariables'), + (r'\]', Text, 'afterobject'), + (r'\b(self|super|true|false|nil|thisContext)\b', + Name.Builtin.Pseudo, 'afterobject'), + (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'), + (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'), + (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)', + String.Symbol, 'afterobject'), + include('literals'), + ], + 'afterobject' : [ + (r'! !$', Keyword , '#pop'), # squeak chunk delimiter + include('whitespaces'), + (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)', + Name.Builtin, '#pop'), + (r'\b(new\b(?!:))', Name.Builtin), + (r'\:=|\_', Operator, '#pop'), + (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'), + (r'\b[a-zA-Z]+\w*', Name.Function), + (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'), + (r'\.', Punctuation, '#pop'), + (r';', Punctuation), + (r'[\])}]', Text), + (r'[\[({]', Text, '#pop'), + ], + 'squeak fileout' : [ + # Squeak fileout format (optional) + (r'^"(""|[^"])*"!', Keyword), + (r"^'(''|[^'])*'!", Keyword), + (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)', + bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)), + (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)", + bygroups(Keyword, Name.Class, Keyword, String, Keyword)), + (r'^(\w+)( subclass: )(#\w+)' + r'(\s+instanceVariableNames: )(.*?)' + r'(\s+classVariableNames: )(.*?)' + r'(\s+poolDictionaries: )(.*?)' + r'(\s+category: )(.*?)(!)', + bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword, + String, Keyword, String, Keyword, String, Keyword)), + (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)', + bygroups(Name.Class, Keyword, String, Keyword)), + (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)), + (r'! !$', Keyword), ], } @@ -1845,330 +400,6 @@ class NewspeakLexer(RegexLexer): } -class GherkinLexer(RegexLexer): - """ - For `Gherkin ` syntax. - - .. versionadded:: 1.2 - """ - name = 'Gherkin' - aliases = ['cucumber', 'gherkin'] - filenames = ['*.feature'] - mimetypes = ['text/x-gherkin'] - - feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' - feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' - examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' - step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' - - tokens = { - 'comments': [ - (r'^\s*#.*$', Comment), - ], - 'feature_elements' : [ - (step_keywords, Keyword, "step_content_stack"), - include('comments'), - (r"(\s|.)", Name.Function), - ], - 'feature_elements_on_stack' : [ - (step_keywords, Keyword, "#pop:2"), - include('comments'), - (r"(\s|.)", Name.Function), - ], - 'examples_table': [ - (r"\s+\|", Keyword, 'examples_table_header'), - include('comments'), - (r"(\s|.)", Name.Function), - ], - 'examples_table_header': [ - (r"\s+\|\s*$", Keyword, "#pop:2"), - include('comments'), - (r"\\\|", Name.Variable), - (r"\s*\|", Keyword), - (r"[^\|]", Name.Variable), - ], - 'scenario_sections_on_stack': [ - (feature_element_keywords, - bygroups(Name.Function, Keyword, Keyword, Name.Function), - "feature_elements_on_stack"), - ], - 'narrative': [ - include('scenario_sections_on_stack'), - include('comments'), - (r"(\s|.)", Name.Function), - ], - 'table_vars': [ - (r'(<[^>]+>)', Name.Variable), - ], - 'numbers': [ - (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String), - ], - 'string': [ - include('table_vars'), - (r'(\s|.)', String), - ], - 'py_string': [ - (r'"""', Keyword, "#pop"), - include('string'), - ], - 'step_content_root':[ - (r"$", Keyword, "#pop"), - include('step_content'), - ], - 'step_content_stack':[ - (r"$", Keyword, "#pop:2"), - include('step_content'), - ], - 'step_content':[ - (r'"', Name.Function, "double_string"), - include('table_vars'), - include('numbers'), - include('comments'), - (r'(\s|.)', Name.Function), - ], - 'table_content': [ - (r"\s+\|\s*$", Keyword, "#pop"), - include('comments'), - (r"\\\|", String), - (r"\s*\|", Keyword), - include('string'), - ], - 'double_string': [ - (r'"', Name.Function, "#pop"), - include('string'), - ], - 'root': [ - (r'\n', Name.Function), - include('comments'), - (r'"""', Keyword, "py_string"), - (r'\s+\|', Keyword, 'table_content'), - (r'"', Name.Function, "double_string"), - include('table_vars'), - include('numbers'), - (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)), - (step_keywords, bygroups(Name.Function, Keyword), - 'step_content_root'), - (feature_keywords, bygroups(Keyword, Keyword, Name.Function), - 'narrative'), - (feature_element_keywords, - bygroups(Name.Function, Keyword, Keyword, Name.Function), - 'feature_elements'), - (examples_keywords, - bygroups(Name.Function, Keyword, Keyword, Name.Function), - 'examples_table'), - (r'(\s|.)', Name.Function), - ] - } - -class AsymptoteLexer(RegexLexer): - """ - For `Asymptote `_ source code. - - .. versionadded:: 1.2 - """ - name = 'Asymptote' - aliases = ['asy', 'asymptote'] - filenames = ['*.asy'] - mimetypes = ['text/x-asymptote'] - - #: optional Comment or Whitespace - _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+' - - tokens = { - 'whitespace': [ - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuation - (r'//(\n|(.|\n)*?[^\\]\n)', Comment), - (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment), - ], - 'statements': [ - # simple string (TeX friendly) - (r'"(\\\\|\\"|[^"])*"', String), - # C style string (with character escapes) - (r"'", String, 'string'), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), - (r'0[0-7]+[Ll]?', Number.Oct), - (r'\d+[Ll]?', Number.Integer), - (r'[~!%^&*+=|?:<>/-]', Operator), - (r'[()\[\],.]', Punctuation), - (r'\b(case)(.+?)(:)', bygroups(Keyword, using(this), Text)), - (r'(and|controls|tension|atleast|curl|if|else|while|for|do|' - r'return|break|continue|struct|typedef|new|access|import|' - r'unravel|from|include|quote|static|public|private|restricted|' - r'this|explicit|true|false|null|cycle|newframe|operator)\b', Keyword), - # Since an asy-type-name can be also an asy-function-name, - # in the following we test if the string " [a-zA-Z]" follows - # the Keyword.Type. - # Of course it is not perfect ! - (r'(Braid|FitResult|Label|Legend|TreeNode|abscissa|arc|arrowhead|' - r'binarytree|binarytreeNode|block|bool|bool3|bounds|bqe|circle|' - r'conic|coord|coordsys|cputime|ellipse|file|filltype|frame|grid3|' - r'guide|horner|hsv|hyperbola|indexedTransform|int|inversion|key|' - r'light|line|linefit|marginT|marker|mass|object|pair|parabola|path|' - r'path3|pen|picture|point|position|projection|real|revolution|' - r'scaleT|scientific|segment|side|slice|splitface|string|surface|' - r'tensionSpecifier|ticklocate|ticksgridT|tickvalues|transform|' - r'transformation|tree|triangle|trilinear|triple|vector|' - r'vertex|void)(?=([ ]{1,}[a-zA-Z]))', Keyword.Type), - # Now the asy-type-name which are not asy-function-name - # except yours ! - # Perhaps useless - (r'(Braid|FitResult|TreeNode|abscissa|arrowhead|block|bool|bool3|' - r'bounds|coord|frame|guide|horner|int|linefit|marginT|pair|pen|' - r'picture|position|real|revolution|slice|splitface|ticksgridT|' - r'tickvalues|tree|triple|vertex|void)\b', Keyword.Type), - ('[a-zA-Z_]\w*:(?!:)', Name.Label), - ('[a-zA-Z_]\w*', Name), - ], - 'root': [ - include('whitespace'), - # functions - (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments - r'([a-zA-Z_]\w*)' # method name - r'(\s*\([^;]*?\))' # signature - r'(' + _ws + r')({)', - bygroups(using(this), Name.Function, using(this), using(this), - Punctuation), - 'function'), - # function declarations - (r'((?:[\w*\s])+?(?:\s|\*))' # return arguments - r'([a-zA-Z_]\w*)' # method name - r'(\s*\([^;]*?\))' # signature - r'(' + _ws + r')(;)', - bygroups(using(this), Name.Function, using(this), using(this), - Punctuation)), - ('', Text, 'statement'), - ], - 'statement' : [ - include('whitespace'), - include('statements'), - ('[{}]', Punctuation), - (';', Punctuation, '#pop'), - ], - 'function': [ - include('whitespace'), - include('statements'), - (';', Punctuation), - ('{', Punctuation, '#push'), - ('}', Punctuation, '#pop'), - ], - 'string': [ - (r"'", String, '#pop'), - (r'\\([\\abfnrtv"\'?]|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'\n', String), - (r"[^\\'\n]+", String), # all other characters - (r'\\\n', String), - (r'\\n', String), # line continuation - (r'\\', String), # stray backslash - ] - } - - def get_tokens_unprocessed(self, text): - from pygments.lexers._asybuiltins import ASYFUNCNAME, ASYVARNAME - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): - if token is Name and value in ASYFUNCNAME: - token = Name.Function - elif token is Name and value in ASYVARNAME: - token = Name.Variable - yield index, token, value - - -class PostScriptLexer(RegexLexer): - """ - Lexer for PostScript files. - - The PostScript Language Reference published by Adobe at - - is the authority for this. - - .. versionadded:: 1.4 - """ - name = 'PostScript' - aliases = ['postscript', 'postscr'] - filenames = ['*.ps', '*.eps'] - mimetypes = ['application/postscript'] - - delimiter = r'\(\)\<\>\[\]\{\}\/\%\s' - delimiter_end = r'(?=[%s])' % delimiter - - valid_name_chars = r'[^%s]' % delimiter - valid_name = r"%s+%s" % (valid_name_chars, delimiter_end) - - tokens = { - 'root': [ - # All comment types - (r'^%!.+\n', Comment.Preproc), - (r'%%.*\n', Comment.Special), - (r'(^%.*\n){2,}', Comment.Multiline), - (r'%.*\n', Comment.Single), - - # String literals are awkward; enter separate state. - (r'\(', String, 'stringliteral'), - - (r'[\{\}(\<\<)(\>\>)\[\]]', Punctuation), - - # Numbers - (r'<[0-9A-Fa-f]+>' + delimiter_end, Number.Hex), - # Slight abuse: use Oct to signify any explicit base system - (r'[0-9]+\#(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)' - r'((e|E)[0-9]+)?' + delimiter_end, Number.Oct), - (r'(\-|\+)?([0-9]+\.?|[0-9]*\.[0-9]+|[0-9]+\.[0-9]*)((e|E)[0-9]+)?' - + delimiter_end, Number.Float), - (r'(\-|\+)?[0-9]+' + delimiter_end, Number.Integer), - - # References - (r'\/%s' % valid_name, Name.Variable), - - # Names - (valid_name, Name.Function), # Anything else is executed - - # These keywords taken from - # - # Is there an authoritative list anywhere that doesn't involve - # trawling documentation? - - (r'(false|true)' + delimiter_end, Keyword.Constant), - - # Conditionals / flow control - (r'(eq|ne|ge|gt|le|lt|and|or|not|if|ifelse|for|forall)' - + delimiter_end, Keyword.Reserved), - - ('(abs|add|aload|arc|arcn|array|atan|begin|bind|ceiling|charpath|' - 'clip|closepath|concat|concatmatrix|copy|cos|currentlinewidth|' - 'currentmatrix|currentpoint|curveto|cvi|cvs|def|defaultmatrix|' - 'dict|dictstackoverflow|div|dtransform|dup|end|exch|exec|exit|exp|' - 'fill|findfont|floor|get|getinterval|grestore|gsave|gt|' - 'identmatrix|idiv|idtransform|index|invertmatrix|itransform|' - 'length|lineto|ln|load|log|loop|matrix|mod|moveto|mul|neg|newpath|' - 'pathforall|pathbbox|pop|print|pstack|put|quit|rand|rangecheck|' - 'rcurveto|repeat|restore|rlineto|rmoveto|roll|rotate|round|run|' - 'save|scale|scalefont|setdash|setfont|setgray|setlinecap|' - 'setlinejoin|setlinewidth|setmatrix|setrgbcolor|shfill|show|' - 'showpage|sin|sqrt|stack|stringwidth|stroke|strokepath|sub|' - 'syntaxerror|transform|translate|truncate|typecheck|undefined|' - 'undefinedfilename|undefinedresult)' + delimiter_end, - Name.Builtin), - - (r'\s+', Text), - ], - - 'stringliteral': [ - (r'[^\(\)\\]+', String), - (r'\\', String.Escape, 'escape'), - (r'\(', String, '#push'), - (r'\)', String, '#pop'), - ], - - 'escape': [ - (r'([0-8]{3}|n|r|t|b|f|\\|\(|\))?', String.Escape, '#pop'), - ], - } - - class AutohotkeyLexer(RegexLexer): """ For `autohotkey `_ source code. @@ -2395,7 +626,7 @@ class MaqlLexer(RegexLexer): # Comments (r'#.*', Comment.Single), # Punctuation - (r'[,;\(\)]', Token.Punctuation), + (r'[,;\(\)]', Punctuation), # Space is not significant (r'\s+', Text) ], @@ -2428,15 +659,15 @@ class GoodDataCLLexer(RegexLexer): # Function call (r'[a-z]\w*', Name.Function), # Argument list - (r'\(', Token.Punctuation, 'args-list'), + (r'\(', Punctuation, 'args-list'), # Punctuation - (r';', Token.Punctuation), + (r';', Punctuation), # Space is not significant (r'\s+', Text) ], 'args-list': [ - (r'\)', Token.Punctuation, '#pop'), - (r',', Token.Punctuation), + (r'\)', Punctuation, '#pop'), + (r',', Punctuation), (r'[a-z]\w*', Name.Variable), (r'=', Operator), (r'"', Literal.String, 'string-literal'), @@ -2698,73 +929,6 @@ class Cfengine3Lexer(RegexLexer): } -class SnobolLexer(RegexLexer): - """ - Lexer for the SNOBOL4 programming language. - - Recognizes the common ASCII equivalents of the original SNOBOL4 operators. - Does not require spaces around binary operators. - - .. versionadded:: 1.5 - """ - - name = "Snobol" - aliases = ["snobol"] - filenames = ['*.snobol'] - mimetypes = ['text/x-snobol'] - - tokens = { - # root state, start of line - # comments, continuation lines, and directives start in column 1 - # as do labels - 'root': [ - (r'\*.*\n', Comment), - (r'[\+\.] ', Punctuation, 'statement'), - (r'-.*\n', Comment), - (r'END\s*\n', Name.Label, 'heredoc'), - (r'[A-Za-z\$][\w$]*', Name.Label, 'statement'), - (r'\s+', Text, 'statement'), - ], - # statement state, line after continuation or label - 'statement': [ - (r'\s*\n', Text, '#pop'), - (r'\s+', Text), - (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|' - r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|' - r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|' - r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])', - Name.Builtin), - (r'[A-Za-z][\w\.]*', Name), - # ASCII equivalents of original operators - # | for the EBCDIC equivalent, ! likewise - # \ for EBCDIC negation - (r'\*\*|[\?\$\.!%\*/#+\-@\|&\\=]', Operator), - (r'"[^"]*"', String), - (r"'[^']*'", String), - # Accept SPITBOL syntax for real numbers - # as well as Macro SNOBOL4 - (r'[0-9]+(?=[^\.EeDd])', Number.Integer), - (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float), - # Goto - (r':', Punctuation, 'goto'), - (r'[\(\)<>,;]', Punctuation), - ], - # Goto block - 'goto': [ - (r'\s*\n', Text, "#pop:2"), - (r'\s+', Text), - (r'F|S', Keyword), - (r'(\()([A-Za-z][\w.]*)(\))', - bygroups(Punctuation, Name.Label, Punctuation)) - ], - # everything after the END statement is basically one - # big heredoc. - 'heredoc': [ - (r'.*\n', String.Heredoc) - ] - } - - class UrbiscriptLexer(ExtendedRegexLexer): """ For UrbiScript source code. @@ -2871,59 +1035,6 @@ class UrbiscriptLexer(ExtendedRegexLexer): } -class OpenEdgeLexer(RegexLexer): - """ - Lexer for `OpenEdge ABL (formerly Progress) - `_ source code. - - .. versionadded:: 1.5 - """ - name = 'OpenEdge ABL' - aliases = ['openedge', 'abl', 'progress'] - filenames = ['*.p', '*.cls'] - mimetypes = ['text/x-openedge', 'application/x-openedge'] - - types = (r'(?i)(^|(?<=[^0-9a-z_\-]))(CHARACTER|CHAR|CHARA|CHARAC|CHARACT|CHARACTE|' - r'COM-HANDLE|DATE|DATETIME|DATETIME-TZ|' - r'DECIMAL|DEC|DECI|DECIM|DECIMA|HANDLE|' - r'INT64|INTEGER|INT|INTE|INTEG|INTEGE|' - r'LOGICAL|LONGCHAR|MEMPTR|RAW|RECID|ROWID)\s*($|(?=[^0-9a-z_\-]))') - - keywords = (r'(?i)(^|(?<=[^0-9a-z_\-]))(' + - r'|'.join(OPENEDGEKEYWORDS) + - r')\s*($|(?=[^0-9a-z_\-]))') - tokens = { - 'root': [ - (r'/\*', Comment.Multiline, 'comment'), - (r'\{', Comment.Preproc, 'preprocessor'), - (r'\s*&.*', Comment.Preproc), - (r'0[xX][0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'(?i)(DEFINE|DEF|DEFI|DEFIN)\b', Keyword.Declaration), - (types, Keyword.Type), - (keywords, Name.Builtin), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), - (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\s+', Text), - (r'[+*/=-]', Operator), - (r'[.:()]', Punctuation), - (r'.', Name.Variable), # Lazy catch-all - ], - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ], - 'preprocessor': [ - (r'[^{}]', Comment.Preproc), - (r'{', Comment.Preproc, '#push'), - (r'}', Comment.Preproc, '#pop'), - ], - } - - class BroLexer(RegexLexer): """ For `Bro `_ scripts. @@ -3089,92 +1200,6 @@ class MscgenLexer(RegexLexer): } -def _rx_indent(level): - # Kconfig *always* interprets a tab as 8 spaces, so this is the default. - # Edit this if you are in an environment where KconfigLexer gets expanded - # input (tabs expanded to spaces) and the expansion tab width is != 8, - # e.g. in connection with Trac (trac.ini, [mimeviewer], tab_width). - # Value range here is 2 <= {tab_width} <= 8. - tab_width = 8 - # Regex matching a given indentation {level}, assuming that indentation is - # a multiple of {tab_width}. In other cases there might be problems. - return r'(?:\t| {1,%s}\t| {%s}){%s}.*\n' % (tab_width-1, tab_width, level) - - -class KconfigLexer(RegexLexer): - """ - For Linux-style Kconfig files. - - .. versionadded:: 1.6 - """ - - name = 'Kconfig' - aliases = ['kconfig', 'menuconfig', 'linux-config', 'kernel-config'] - # Adjust this if new kconfig file names appear in your environment - filenames = ['Kconfig', '*Config.in*', 'external.in*', - 'standard-modules.in'] - mimetypes = ['text/x-kconfig'] - # No re.MULTILINE, indentation-aware help text needs line-by-line handling - flags = 0 - - def call_indent(level): - # If indentation >= {level} is detected, enter state 'indent{level}' - return (_rx_indent(level), String.Doc, 'indent%s' % level) - - def do_indent(level): - # Print paragraphs of indentation level >= {level} as String.Doc, - # ignoring blank lines. Then return to 'root' state. - return [ - (_rx_indent(level), String.Doc), - (r'\s*\n', Text), - default('#pop:2') - ] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'#.*?\n', Comment.Single), - (r'(mainmenu|config|menuconfig|choice|endchoice|comment|menu|' - r'endmenu|visible if|if|endif|source|prompt|select|depends on|' - r'default|range|option)\b', Keyword), - (r'(---help---|help)[\t ]*\n', Keyword, 'help'), - (r'(bool|tristate|string|hex|int|defconfig_list|modules|env)\b', - Name.Builtin), - (r'[!=&|]', Operator), - (r'[()]', Punctuation), - (r'[0-9]+', Number.Integer), - (r"'(''|[^'])*'", String.Single), - (r'"(""|[^"])*"', String.Double), - (r'\S+', Text), - ], - # Help text is indented, multi-line and ends when a lower indentation - # level is detected. - 'help': [ - # Skip blank lines after help token, if any - (r'\s*\n', Text), - # Determine the first help line's indentation level heuristically(!). - # Attention: this is not perfect, but works for 99% of "normal" - # indentation schemes up to a max. indentation level of 7. - call_indent(7), - call_indent(6), - call_indent(5), - call_indent(4), - call_indent(3), - call_indent(2), - call_indent(1), - ('', Text, '#pop'), # for incomplete help sections without text - ], - # Handle text for indentation levels 7 to 1 - 'indent7': do_indent(7), - 'indent6': do_indent(6), - 'indent5': do_indent(5), - 'indent4': do_indent(4), - 'indent3': do_indent(3), - 'indent2': do_indent(2), - 'indent1': do_indent(1), - } - - class VGLLexer(RegexLexer): """ For `SampleManager VGL `_ @@ -3276,7 +1301,7 @@ class SourcePawnLexer(RegexLexer): ] } - SM_TYPES = set(['Action', 'bool', 'Float', 'Plugin', 'String', 'any', + SM_TYPES = set(('Action', 'bool', 'Float', 'Plugin', 'String', 'any', 'AdminFlag', 'OverrideType', 'OverrideRule', 'ImmunityType', 'GroupId', 'AdminId', 'AdmAccessMode', 'AdminCachePart', 'CookieAccess', 'CookieMenu', 'CookieMenuAction', 'NetFlow', @@ -3294,7 +1319,7 @@ class SourcePawnLexer(RegexLexer): 'SortFunc2D', 'APLRes', 'FeatureType', 'FeatureStatus', 'SMCResult', 'SMCError', 'TFClassType', 'TFTeam', 'TFCond', 'TFResourceType', 'Timer', 'TopMenuAction', 'TopMenuObjectType', - 'TopMenuPosition', 'TopMenuObject', 'UserMsg']) + 'TopMenuPosition', 'TopMenuObject', 'UserMsg')) def __init__(self, **options): self.smhighlighting = get_bool_opt(options, @@ -3970,6 +1995,7 @@ class APLLexer(RegexLexer): ], } + class AmbientTalkLexer(RegexLexer): """ Lexer for `AmbientTalk `_ source code. @@ -4095,37 +2121,11 @@ class PawnLexer(RegexLexer): } -class VCTreeStatusLexer(RegexLexer): - """ - For colorizing output of version control status commans, like "hg - status" or "svn status". - - .. versionadded:: 2.0 - """ - name = 'VCTreeStatus' - aliases = ['vctreestatus'] - filenames = [] - mimetypes = [] - - tokens = { - 'root' : [ - (r'^A \+ C\s+', Generic.Error), - (r'^A\s+\+?\s+', String), - (r'^M\s+', Generic.Inserted), - (r'^C\s+', Generic.Error), - (r'^D\s+', Generic.Deleted), - (r'^[\?!]\s+', Comment.Preproc), - (r' >\s+.*\n', Comment.Preproc), - (r'.*\n', Text) - ] - } - - class RslLexer(RegexLexer): """ `RSL `_ is the formal specification language used in RAISE (Rigorous Approach to Industrial Software Engineering) - method. + method. .. versionadded:: 2.0 """ @@ -4169,7 +2169,7 @@ class RslLexer(RegexLexer): } def analyse_text(text): - """ + """ Check for the most common text in the beginning of a RSL file. """ if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None: @@ -4239,7 +2239,8 @@ class PanLexer(RegexLexer): include('root'), ], } - + + class RedLexer(RegexLexer): """ A `Red-language `_ lexer. diff --git a/pygments/lexers/prolog.py b/pygments/lexers/prolog.py index 5b705a0b..b0338611 100644 --- a/pygments/lexers/prolog.py +++ b/pygments/lexers/prolog.py @@ -17,7 +17,7 @@ from pygments.util import get_bool_opt from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error -__all__ = ['PrologLexer'] +__all__ = ['PrologLexer', 'LogtalkLexer'] class PrologLexer(RegexLexer): @@ -85,3 +85,222 @@ class PrologLexer(RegexLexer): def analyse_text(text): return ':-' in text + + +class LogtalkLexer(RegexLexer): + """ + For `Logtalk `_ source code. + + .. versionadded:: 0.10 + """ + + name = 'Logtalk' + aliases = ['logtalk'] + filenames = ['*.lgt', '*.logtalk'] + mimetypes = ['text/x-logtalk'] + + tokens = { + 'root': [ + # Directives + (r'^\s*:-\s',Punctuation,'directive'), + # Comments + (r'%.*?\n', Comment), + (r'/\*(.|\n)*?\*/',Comment), + # Whitespace + (r'\n', Text), + (r'\s+', Text), + # Numbers + (r"0'.", Number), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), + # Variables + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), + # Event handlers + (r'(after|before)(?=[(])', Keyword), + # Message forwarding handler + (r'forward(?=[(])', Keyword), + # Execution-context methods + (r'(parameter|this|se(lf|nder))(?=[(])', Keyword), + # Reflection + (r'(current_predicate|predicate_property)(?=[(])', Keyword), + # DCGs and term expansion + (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword), + # Entity + (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword), + (r'(object|protocol|category)_property(?=[(])', Keyword), + # Entity relations + (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword), + (r'extends_(object|protocol|category)(?=[(])', Keyword), + (r'imp(lements_protocol|orts_category)(?=[(])', Keyword), + (r'(instantiat|specializ)es_class(?=[(])', Keyword), + # Events + (r'(current_event|(abolish|define)_events)(?=[(])', Keyword), + # Flags + (r'(current|set)_logtalk_flag(?=[(])', Keyword), + # Compiling, loading, and library paths + (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make)(?=[(])', Keyword), + (r'\blogtalk_make\b', Keyword), + # Database + (r'(clause|retract(all)?)(?=[(])', Keyword), + (r'a(bolish|ssert(a|z))(?=[(])', Keyword), + # Control constructs + (r'(ca(ll|tch)|throw)(?=[(])', Keyword), + (r'(fa(il|lse)|true)\b', Keyword), + # All solutions + (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword), + # Multi-threading meta-predicates + (r'threaded(_(call|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword), + # Term unification + (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword), + # Term creation and decomposition + (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword), + # Evaluable functors + (r'(rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword), + (r'float(_(integer|fractional)_part)?(?=[(])', Keyword), + (r'(floor|truncate|round|ceiling)(?=[(])', Keyword), + # Other arithmetic functors + (r'(cos|a(cos|sin|tan)|exp|log|s(in|qrt))(?=[(])', Keyword), + # Term testing + (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword), + # Term comparison + (r'compare(?=[(])', Keyword), + # Stream selection and control + (r'(curren|se)t_(in|out)put(?=[(])', Keyword), + (r'(open|close)(?=[(])', Keyword), + (r'flush_output(?=[(])', Keyword), + (r'(at_end_of_stream|flush_output)\b', Keyword), + (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword), + # Character and byte input/output + (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword), + (r'\bnl\b', Keyword), + # Term input/output + (r'read(_term)?(?=[(])', Keyword), + (r'write(q|_(canonical|term))?(?=[(])', Keyword), + (r'(current_)?op(?=[(])', Keyword), + (r'(current_)?char_conversion(?=[(])', Keyword), + # Atomic term processing + (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword), + (r'(char_code|sub_atom)(?=[(])', Keyword), + (r'number_c(har|ode)s(?=[(])', Keyword), + # Implementation defined hooks functions + (r'(se|curren)t_prolog_flag(?=[(])', Keyword), + (r'\bhalt\b', Keyword), + (r'halt(?=[(])', Keyword), + # Message sending operators + (r'(::|:|\^\^)', Operator), + # External call + (r'[{}]', Keyword), + # Logic and control + (r'(ignore|once)(?=[(])', Keyword), + (r'\brepeat\b', Keyword), + # Sorting + (r'(key)?sort(?=[(])', Keyword), + # Bitwise functors + (r'(>>|<<|/\\|\\\\|\\)', Operator), + # Predicate aliases + (r'\bas\b', Operator), + # Arithemtic evaluation + (r'\bis\b', Keyword), + # Arithemtic comparison + (r'(=:=|=\\=|<|=<|>=|>)', Operator), + # Term creation and decomposition + (r'=\.\.', Operator), + # Term unification + (r'(=|\\=)', Operator), + # Term comparison + (r'(==|\\==|@=<|@<|@>=|@>)', Operator), + # Evaluable functors + (r'(//|[-+*/])', Operator), + (r'\b(e|pi|mod|rem)\b', Operator), + # Other arithemtic functors + (r'\b\*\*\b', Operator), + # DCG rules + (r'-->', Operator), + # Control constructs + (r'([!;]|->)', Operator), + # Logic and control + (r'\\+', Operator), + # Mode operators + (r'[?@]', Operator), + # Existential quantifier + (r'\^', Operator), + # Strings + (r'"(\\\\|\\"|[^"])*"', String), + # Ponctuation + (r'[()\[\],.|]', Text), + # Atoms + (r"[a-z][a-zA-Z0-9_]*", Text), + (r"[']", String, 'quoted_atom'), + ], + + 'quoted_atom': [ + (r"['][']", String), + (r"[']", String, '#pop'), + (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape), + (r"[^\\'\n]+", String), + (r'\\', String), + ], + + 'directive': [ + # Conditional compilation directives + (r'(el)?if(?=[(])', Keyword, 'root'), + (r'(e(lse|ndif))[.]', Keyword, 'root'), + # Entity directives + (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'), + (r'(end_(category|object|protocol))[.]',Keyword, 'root'), + # Predicate scope directives + (r'(public|protected|private)(?=[(])', Keyword, 'root'), + # Other directives + (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'), + (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'), + (r'(built_in|dynamic|synchronized|threaded)[.]', Keyword, 'root'), + (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'), + (r'op(?=[(])', Keyword, 'root'), + (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'), + (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'), + (r'[a-z][a-zA-Z0-9_]*[.]', Text, 'root'), + ], + + 'entityrelations': [ + (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword), + # Numbers + (r"0'.", Number), + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number), + # Variables + (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable), + # Atoms + (r"[a-z][a-zA-Z0-9_]*", Text), + (r"[']", String, 'quoted_atom'), + # Strings + (r'"(\\\\|\\"|[^"])*"', String), + # End of entity-opening directive + (r'([)]\.)', Text, 'root'), + # Scope operator + (r'(::)', Operator), + # Ponctuation + (r'[()\[\],.|]', Text), + # Comments + (r'%.*?\n', Comment), + (r'/\*(.|\n)*?\*/',Comment), + # Whitespace + (r'\n', Text), + (r'\s+', Text), + ] + } + + def analyse_text(text): + if ':- object(' in text: + return 1.0 + elif ':- protocol(' in text: + return 1.0 + elif ':- category(' in text: + return 1.0 + elif re.search('^:-\s[a-z]', text, re.M): + return 0.9 + else: + return 0.0 diff --git a/pygments/lexers/robotframework.py b/pygments/lexers/robotframework.py new file mode 100644 index 00000000..7b6f5564 --- /dev/null +++ b/pygments/lexers/robotframework.py @@ -0,0 +1,560 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.robotframework + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexer for Robot Framework. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +# Copyright 2012 Nokia Siemens Networks Oyj +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re + +from pygments.lexer import Lexer +from pygments.token import Token +from pygments.util import text_type + +__all__ = ['RobotFrameworkLexer'] + + +HEADING = Token.Generic.Heading +SETTING = Token.Keyword.Namespace +IMPORT = Token.Name.Namespace +TC_KW_NAME = Token.Generic.Subheading +KEYWORD = Token.Name.Function +ARGUMENT = Token.String +VARIABLE = Token.Name.Variable +COMMENT = Token.Comment +SEPARATOR = Token.Punctuation +SYNTAX = Token.Punctuation +GHERKIN = Token.Generic.Emph +ERROR = Token.Error + + +def normalize(string, remove=''): + string = string.lower() + for char in remove + ' ': + if char in string: + string = string.replace(char, '') + return string + + +class RobotFrameworkLexer(Lexer): + """ + For `Robot Framework `_ test data. + + Supports both space and pipe separated plain text formats. + + .. versionadded:: 1.6 + """ + name = 'RobotFramework' + aliases = ['robotframework'] + filenames = ['*.txt', '*.robot'] + mimetypes = ['text/x-robotframework'] + + def __init__(self, **options): + options['tabsize'] = 2 + options['encoding'] = 'UTF-8' + Lexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + row_tokenizer = RowTokenizer() + var_tokenizer = VariableTokenizer() + index = 0 + for row in text.splitlines(): + for value, token in row_tokenizer.tokenize(row): + for value, token in var_tokenizer.tokenize(value, token): + if value: + yield index, token, text_type(value) + index += len(value) + + +class VariableTokenizer(object): + + def tokenize(self, string, token): + var = VariableSplitter(string, identifiers='$@%') + if var.start < 0 or token in (COMMENT, ERROR): + yield string, token + return + for value, token in self._tokenize(var, string, token): + if value: + yield value, token + + def _tokenize(self, var, string, orig_token): + before = string[:var.start] + yield before, orig_token + yield var.identifier + '{', SYNTAX + for value, token in self.tokenize(var.base, VARIABLE): + yield value, token + yield '}', SYNTAX + if var.index: + yield '[', SYNTAX + for value, token in self.tokenize(var.index, VARIABLE): + yield value, token + yield ']', SYNTAX + for value, token in self.tokenize(string[var.end:], orig_token): + yield value, token + + +class RowTokenizer(object): + + def __init__(self): + self._table = UnknownTable() + self._splitter = RowSplitter() + testcases = TestCaseTable() + settings = SettingTable(testcases.set_default_template) + variables = VariableTable() + keywords = KeywordTable() + self._tables = {'settings': settings, 'setting': settings, + 'metadata': settings, + 'variables': variables, 'variable': variables, + 'testcases': testcases, 'testcase': testcases, + 'keywords': keywords, 'keyword': keywords, + 'userkeywords': keywords, 'userkeyword': keywords} + + def tokenize(self, row): + commented = False + heading = False + for index, value in enumerate(self._splitter.split(row)): + # First value, and every second after that, is a separator. + index, separator = divmod(index-1, 2) + if value.startswith('#'): + commented = True + elif index == 0 and value.startswith('*'): + self._table = self._start_table(value) + heading = True + for value, token in self._tokenize(value, index, commented, + separator, heading): + yield value, token + self._table.end_row() + + def _start_table(self, header): + name = normalize(header, remove='*') + return self._tables.get(name, UnknownTable()) + + def _tokenize(self, value, index, commented, separator, heading): + if commented: + yield value, COMMENT + elif separator: + yield value, SEPARATOR + elif heading: + yield value, HEADING + else: + for value, token in self._table.tokenize(value, index): + yield value, token + + +class RowSplitter(object): + _space_splitter = re.compile('( {2,})') + _pipe_splitter = re.compile('((?:^| +)\|(?: +|$))') + + def split(self, row): + splitter = (row.startswith('| ') and self._split_from_pipes + or self._split_from_spaces) + for value in splitter(row): + yield value + yield '\n' + + def _split_from_spaces(self, row): + yield '' # Start with (pseudo)separator similarly as with pipes + for value in self._space_splitter.split(row): + yield value + + def _split_from_pipes(self, row): + _, separator, rest = self._pipe_splitter.split(row, 1) + yield separator + while self._pipe_splitter.search(rest): + cell, separator, rest = self._pipe_splitter.split(rest, 1) + yield cell + yield separator + yield rest + + +class Tokenizer(object): + _tokens = None + + def __init__(self): + self._index = 0 + + def tokenize(self, value): + values_and_tokens = self._tokenize(value, self._index) + self._index += 1 + if isinstance(values_and_tokens, type(Token)): + values_and_tokens = [(value, values_and_tokens)] + return values_and_tokens + + def _tokenize(self, value, index): + index = min(index, len(self._tokens) - 1) + return self._tokens[index] + + def _is_assign(self, value): + if value.endswith('='): + value = value[:-1].strip() + var = VariableSplitter(value, identifiers='$@') + return var.start == 0 and var.end == len(value) + + +class Comment(Tokenizer): + _tokens = (COMMENT,) + + +class Setting(Tokenizer): + _tokens = (SETTING, ARGUMENT) + _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown', + 'suitepostcondition', 'testsetup', 'testprecondition', + 'testteardown', 'testpostcondition', 'testtemplate') + _import_settings = ('library', 'resource', 'variables') + _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags', + 'testtimeout') + _custom_tokenizer = None + + def __init__(self, template_setter=None): + Tokenizer.__init__(self) + self._template_setter = template_setter + + def _tokenize(self, value, index): + if index == 1 and self._template_setter: + self._template_setter(value) + if index == 0: + normalized = normalize(value) + if normalized in self._keyword_settings: + self._custom_tokenizer = KeywordCall(support_assign=False) + elif normalized in self._import_settings: + self._custom_tokenizer = ImportSetting() + elif normalized not in self._other_settings: + return ERROR + elif self._custom_tokenizer: + return self._custom_tokenizer.tokenize(value) + return Tokenizer._tokenize(self, value, index) + + +class ImportSetting(Tokenizer): + _tokens = (IMPORT, ARGUMENT) + + +class TestCaseSetting(Setting): + _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition', + 'template') + _import_settings = () + _other_settings = ('documentation', 'tags', 'timeout') + + def _tokenize(self, value, index): + if index == 0: + type = Setting._tokenize(self, value[1:-1], index) + return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)] + return Setting._tokenize(self, value, index) + + +class KeywordSetting(TestCaseSetting): + _keyword_settings = ('teardown',) + _other_settings = ('documentation', 'arguments', 'return', 'timeout') + + +class Variable(Tokenizer): + _tokens = (SYNTAX, ARGUMENT) + + def _tokenize(self, value, index): + if index == 0 and not self._is_assign(value): + return ERROR + return Tokenizer._tokenize(self, value, index) + + +class KeywordCall(Tokenizer): + _tokens = (KEYWORD, ARGUMENT) + + def __init__(self, support_assign=True): + Tokenizer.__init__(self) + self._keyword_found = not support_assign + self._assigns = 0 + + def _tokenize(self, value, index): + if not self._keyword_found and self._is_assign(value): + self._assigns += 1 + return SYNTAX # VariableTokenizer tokenizes this later. + if self._keyword_found: + return Tokenizer._tokenize(self, value, index - self._assigns) + self._keyword_found = True + return GherkinTokenizer().tokenize(value, KEYWORD) + + +class GherkinTokenizer(object): + _gherkin_prefix = re.compile('^(Given|When|Then|And) ', re.IGNORECASE) + + def tokenize(self, value, token): + match = self._gherkin_prefix.match(value) + if not match: + return [(value, token)] + end = match.end() + return [(value[:end], GHERKIN), (value[end:], token)] + + +class TemplatedKeywordCall(Tokenizer): + _tokens = (ARGUMENT,) + + +class ForLoop(Tokenizer): + + def __init__(self): + Tokenizer.__init__(self) + self._in_arguments = False + + def _tokenize(self, value, index): + token = self._in_arguments and ARGUMENT or SYNTAX + if value.upper() in ('IN', 'IN RANGE'): + self._in_arguments = True + return token + + +class _Table(object): + _tokenizer_class = None + + def __init__(self, prev_tokenizer=None): + self._tokenizer = self._tokenizer_class() + self._prev_tokenizer = prev_tokenizer + self._prev_values_on_row = [] + + def tokenize(self, value, index): + if self._continues(value, index): + self._tokenizer = self._prev_tokenizer + yield value, SYNTAX + else: + for value_and_token in self._tokenize(value, index): + yield value_and_token + self._prev_values_on_row.append(value) + + def _continues(self, value, index): + return value == '...' and all(self._is_empty(t) + for t in self._prev_values_on_row) + + def _is_empty(self, value): + return value in ('', '\\') + + def _tokenize(self, value, index): + return self._tokenizer.tokenize(value) + + def end_row(self): + self.__init__(prev_tokenizer=self._tokenizer) + + +class UnknownTable(_Table): + _tokenizer_class = Comment + + def _continues(self, value, index): + return False + + +class VariableTable(_Table): + _tokenizer_class = Variable + + +class SettingTable(_Table): + _tokenizer_class = Setting + + def __init__(self, template_setter, prev_tokenizer=None): + _Table.__init__(self, prev_tokenizer) + self._template_setter = template_setter + + def _tokenize(self, value, index): + if index == 0 and normalize(value) == 'testtemplate': + self._tokenizer = Setting(self._template_setter) + return _Table._tokenize(self, value, index) + + def end_row(self): + self.__init__(self._template_setter, prev_tokenizer=self._tokenizer) + + +class TestCaseTable(_Table): + _setting_class = TestCaseSetting + _test_template = None + _default_template = None + + @property + def _tokenizer_class(self): + if self._test_template or (self._default_template and + self._test_template is not False): + return TemplatedKeywordCall + return KeywordCall + + def _continues(self, value, index): + return index > 0 and _Table._continues(self, value, index) + + def _tokenize(self, value, index): + if index == 0: + if value: + self._test_template = None + return GherkinTokenizer().tokenize(value, TC_KW_NAME) + if index == 1 and self._is_setting(value): + if self._is_template(value): + self._test_template = False + self._tokenizer = self._setting_class(self.set_test_template) + else: + self._tokenizer = self._setting_class() + if index == 1 and self._is_for_loop(value): + self._tokenizer = ForLoop() + if index == 1 and self._is_empty(value): + return [(value, SYNTAX)] + return _Table._tokenize(self, value, index) + + def _is_setting(self, value): + return value.startswith('[') and value.endswith(']') + + def _is_template(self, value): + return normalize(value) == '[template]' + + def _is_for_loop(self, value): + return value.startswith(':') and normalize(value, remove=':') == 'for' + + def set_test_template(self, template): + self._test_template = self._is_template_set(template) + + def set_default_template(self, template): + self._default_template = self._is_template_set(template) + + def _is_template_set(self, template): + return normalize(template) not in ('', '\\', 'none', '${empty}') + + +class KeywordTable(TestCaseTable): + _tokenizer_class = KeywordCall + _setting_class = KeywordSetting + + def _is_template(self, value): + return False + + +# Following code copied directly from Robot Framework 2.7.5. + +class VariableSplitter: + + def __init__(self, string, identifiers): + self.identifier = None + self.base = None + self.index = None + self.start = -1 + self.end = -1 + self._identifiers = identifiers + self._may_have_internal_variables = False + try: + self._split(string) + except ValueError: + pass + else: + self._finalize() + + def get_replaced_base(self, variables): + if self._may_have_internal_variables: + return variables.replace_string(self.base) + return self.base + + def _finalize(self): + self.identifier = self._variable_chars[0] + self.base = ''.join(self._variable_chars[2:-1]) + self.end = self.start + len(self._variable_chars) + if self._has_list_variable_index(): + self.index = ''.join(self._list_variable_index_chars[1:-1]) + self.end += len(self._list_variable_index_chars) + + def _has_list_variable_index(self): + return self._list_variable_index_chars\ + and self._list_variable_index_chars[-1] == ']' + + def _split(self, string): + start_index, max_index = self._find_variable(string) + self.start = start_index + self._open_curly = 1 + self._state = self._variable_state + self._variable_chars = [string[start_index], '{'] + self._list_variable_index_chars = [] + self._string = string + start_index += 2 + for index, char in enumerate(string[start_index:]): + index += start_index # Giving start to enumerate only in Py 2.6+ + try: + self._state(char, index) + except StopIteration: + return + if index == max_index and not self._scanning_list_variable_index(): + return + + def _scanning_list_variable_index(self): + return self._state in [self._waiting_list_variable_index_state, + self._list_variable_index_state] + + def _find_variable(self, string): + max_end_index = string.rfind('}') + if max_end_index == -1: + raise ValueError('No variable end found') + if self._is_escaped(string, max_end_index): + return self._find_variable(string[:max_end_index]) + start_index = self._find_start_index(string, 1, max_end_index) + if start_index == -1: + raise ValueError('No variable start found') + return start_index, max_end_index + + def _find_start_index(self, string, start, end): + index = string.find('{', start, end) - 1 + if index < 0: + return -1 + if self._start_index_is_ok(string, index): + return index + return self._find_start_index(string, index+2, end) + + def _start_index_is_ok(self, string, index): + return string[index] in self._identifiers\ + and not self._is_escaped(string, index) + + def _is_escaped(self, string, index): + escaped = False + while index > 0 and string[index-1] == '\\': + index -= 1 + escaped = not escaped + return escaped + + def _variable_state(self, char, index): + self._variable_chars.append(char) + if char == '}' and not self._is_escaped(self._string, index): + self._open_curly -= 1 + if self._open_curly == 0: + if not self._is_list_variable(): + raise StopIteration + self._state = self._waiting_list_variable_index_state + elif char in self._identifiers: + self._state = self._internal_variable_start_state + + def _is_list_variable(self): + return self._variable_chars[0] == '@' + + def _internal_variable_start_state(self, char, index): + self._state = self._variable_state + if char == '{': + self._variable_chars.append(char) + self._open_curly += 1 + self._may_have_internal_variables = True + else: + self._variable_state(char, index) + + def _waiting_list_variable_index_state(self, char, index): + if char != '[': + raise StopIteration + self._list_variable_index_chars.append(char) + self._state = self._list_variable_index_state + + def _list_variable_index_state(self, char, index): + self._list_variable_index_chars.append(char) + if char == ']': + raise StopIteration diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index 2d8a58a4..0646891c 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -14,10 +14,11 @@ import re from pygments.lexer import RegexLexer, include, bygroups, default, combined, \ words from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation + Number, Punctuation, Error, Whitespace from pygments.util import get_bool_opt, get_list_opt, iteritems -__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer'] +__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer', + 'AppleScriptLexer'] class LuaLexer(RegexLexer): @@ -274,3 +275,410 @@ class ChaiscriptLexer(RegexLexer): (r'"', String.Double, '#pop'), ], } + + +class LSLLexer(RegexLexer): + """ + For Second Life's Linden Scripting Language source code. + + .. versionadded:: 2.0 + """ + + name = 'LSL' + aliases = ['lsl'] + filenames = ['*.lsl'] + mimetypes = ['text/x-lsl'] + + flags = re.MULTILINE + + lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b' + lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b' + lsl_states = r'\b(?:(?:state)\s+\w+|default)\b' + lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b' + lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b' + lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b' + lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[ABCD]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b' + lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b' + lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b' + lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b' + lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b' + lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b' + lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b' + lsl_invalid_illegal = r'\b(?:event)\b' + lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b' + lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b' + lsl_reserved_log = r'\b(?:print)\b' + lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-\/]=?' + + tokens = { + 'root': + [ + (r'//.*?\n', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), + (r'"', String.Double, 'string'), + (lsl_keywords, Keyword), + (lsl_types, Keyword.Type), + (lsl_states, Name.Class), + (lsl_events, Name.Builtin), + (lsl_functions_builtin, Name.Function), + (lsl_constants_float, Keyword.Constant), + (lsl_constants_integer, Keyword.Constant), + (lsl_constants_integer_boolean, Keyword.Constant), + (lsl_constants_rotation, Keyword.Constant), + (lsl_constants_string, Keyword.Constant), + (lsl_constants_vector, Keyword.Constant), + (lsl_invalid_broken, Error), + (lsl_invalid_deprecated, Error), + (lsl_invalid_illegal, Error), + (lsl_invalid_unimplemented, Error), + (lsl_reserved_godmode, Keyword.Reserved), + (lsl_reserved_log, Keyword.Reserved), + (r'\b([a-zA-Z_]\w*)\b', Name.Variable), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float), + (r'(\d+\.\d*|\.\d+)', Number.Float), + (r'0[xX][0-9a-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + (lsl_operators, Operator), + (r':=?', Error), + (r'[,;{}\(\)\[\]]', Punctuation), + (r'\n+', Whitespace), + (r'\s+', Whitespace) + ], + 'comment': + [ + (r'[^*/]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ], + 'string': + [ + (r'\\([nt"\\])', String.Escape), + (r'"', String.Double, '#pop'), + (r'\\.', Error), + (r'[^"\\]+', String.Double), + ] + } + + +class AppleScriptLexer(RegexLexer): + """ + For `AppleScript source code + `_, + including `AppleScript Studio + `_. + Contributed by Andreas Amann . + + .. versionadded:: 1.0 + """ + + name = 'AppleScript' + aliases = ['applescript'] + filenames = ['*.applescript'] + + flags = re.MULTILINE | re.DOTALL + + Identifiers = r'[a-zA-Z]\w*' + + # XXX: use words() for all of these + Literals = ('AppleScript', 'current application', 'false', 'linefeed', + 'missing value', 'pi', 'quote', 'result', 'return', 'space', + 'tab', 'text item delimiters', 'true', 'version') + Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ', + 'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ', + 'real ', 'record ', 'reference ', 'RGB color ', 'script ', + 'text ', 'unit types', '(?:Unicode )?text', 'string') + BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month', + 'paragraph', 'word', 'year') + HandlerParams = ('about', 'above', 'against', 'apart from', 'around', + 'aside from', 'at', 'below', 'beneath', 'beside', + 'between', 'for', 'given', 'instead of', 'on', 'onto', + 'out of', 'over', 'since') + Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL', + 'choose application', 'choose color', 'choose file( name)?', + 'choose folder', 'choose from list', + 'choose remote application', 'clipboard info', + 'close( access)?', 'copy', 'count', 'current date', 'delay', + 'delete', 'display (alert|dialog)', 'do shell script', + 'duplicate', 'exists', 'get eof', 'get volume settings', + 'info for', 'launch', 'list (disks|folder)', 'load script', + 'log', 'make', 'mount volume', 'new', 'offset', + 'open( (for access|location))?', 'path to', 'print', 'quit', + 'random number', 'read', 'round', 'run( script)?', + 'say', 'scripting components', + 'set (eof|the clipboard to|volume)', 'store script', + 'summarize', 'system attribute', 'system info', + 'the clipboard', 'time to GMT', 'write', 'quoted form') + References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)', + 'first', 'second', 'third', 'fourth', 'fifth', 'sixth', + 'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back', + 'before', 'behind', 'every', 'front', 'index', 'last', + 'middle', 'some', 'that', 'through', 'thru', 'where', 'whose') + Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not", + "isn't", "isn't equal( to)?", "is not equal( to)?", + "doesn't equal", "does not equal", "(is )?greater than", + "comes after", "is not less than or equal( to)?", + "isn't less than or equal( to)?", "(is )?less than", + "comes before", "is not greater than or equal( to)?", + "isn't greater than or equal( to)?", + "(is )?greater than or equal( to)?", "is not less than", + "isn't less than", "does not come before", + "doesn't come before", "(is )?less than or equal( to)?", + "is not greater than", "isn't greater than", + "does not come after", "doesn't come after", "starts? with", + "begins? with", "ends? with", "contains?", "does not contain", + "doesn't contain", "is in", "is contained by", "is not in", + "is not contained by", "isn't contained by", "div", "mod", + "not", "(a )?(ref( to)?|reference to)", "is", "does") + Control = ('considering', 'else', 'error', 'exit', 'from', 'if', + 'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to', + 'try', 'until', 'using terms from', 'while', 'whith', + 'with timeout( of)?', 'with transaction', 'by', 'continue', + 'end', 'its?', 'me', 'my', 'return', 'of', 'as') + Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get') + Reserved = ('but', 'put', 'returning', 'the') + StudioClasses = ('action cell', 'alert reply', 'application', 'box', + 'browser( cell)?', 'bundle', 'button( cell)?', 'cell', + 'clip view', 'color well', 'color-panel', + 'combo box( item)?', 'control', + 'data( (cell|column|item|row|source))?', 'default entry', + 'dialog reply', 'document', 'drag info', 'drawer', + 'event', 'font(-panel)?', 'formatter', + 'image( (cell|view))?', 'matrix', 'menu( item)?', 'item', + 'movie( view)?', 'open-panel', 'outline view', 'panel', + 'pasteboard', 'plugin', 'popup button', + 'progress indicator', 'responder', 'save-panel', + 'scroll view', 'secure text field( cell)?', 'slider', + 'sound', 'split view', 'stepper', 'tab view( item)?', + 'table( (column|header cell|header view|view))', + 'text( (field( cell)?|view))?', 'toolbar( item)?', + 'user-defaults', 'view', 'window') + StudioEvents = ('accept outline drop', 'accept table drop', 'action', + 'activated', 'alert ended', 'awake from nib', 'became key', + 'became main', 'begin editing', 'bounds changed', + 'cell value', 'cell value changed', 'change cell value', + 'change item value', 'changed', 'child of item', + 'choose menu item', 'clicked', 'clicked toolbar item', + 'closed', 'column clicked', 'column moved', + 'column resized', 'conclude drop', 'data representation', + 'deminiaturized', 'dialog ended', 'document nib name', + 'double clicked', 'drag( (entered|exited|updated))?', + 'drop', 'end editing', 'exposed', 'idle', 'item expandable', + 'item value', 'item value changed', 'items changed', + 'keyboard down', 'keyboard up', 'launched', + 'load data representation', 'miniaturized', 'mouse down', + 'mouse dragged', 'mouse entered', 'mouse exited', + 'mouse moved', 'mouse up', 'moved', + 'number of browser rows', 'number of items', + 'number of rows', 'open untitled', 'opened', 'panel ended', + 'parameters updated', 'plugin loaded', 'prepare drop', + 'prepare outline drag', 'prepare outline drop', + 'prepare table drag', 'prepare table drop', + 'read from file', 'resigned active', 'resigned key', + 'resigned main', 'resized( sub views)?', + 'right mouse down', 'right mouse dragged', + 'right mouse up', 'rows changed', 'scroll wheel', + 'selected tab view item', 'selection changed', + 'selection changing', 'should begin editing', + 'should close', 'should collapse item', + 'should end editing', 'should expand item', + 'should open( untitled)?', + 'should quit( after last window closed)?', + 'should select column', 'should select item', + 'should select row', 'should select tab view item', + 'should selection change', 'should zoom', 'shown', + 'update menu item', 'update parameters', + 'update toolbar item', 'was hidden', 'was miniaturized', + 'will become active', 'will close', 'will dismiss', + 'will display browser cell', 'will display cell', + 'will display item cell', 'will display outline cell', + 'will finish launching', 'will hide', 'will miniaturize', + 'will move', 'will open', 'will pop up', 'will quit', + 'will resign active', 'will resize( sub views)?', + 'will select tab view item', 'will show', 'will zoom', + 'write to file', 'zoomed') + StudioCommands = ('animate', 'append', 'call method', 'center', + 'close drawer', 'close panel', 'display', + 'display alert', 'display dialog', 'display panel', 'go', + 'hide', 'highlight', 'increment', 'item for', + 'load image', 'load movie', 'load nib', 'load panel', + 'load sound', 'localized string', 'lock focus', 'log', + 'open drawer', 'path for', 'pause', 'perform action', + 'play', 'register', 'resume', 'scroll', 'select( all)?', + 'show', 'size to fit', 'start', 'step back', + 'step forward', 'stop', 'synchronize', 'unlock focus', + 'update') + StudioProperties = ('accepts arrow key', 'action method', 'active', + 'alignment', 'allowed identifiers', + 'allows branch selection', 'allows column reordering', + 'allows column resizing', 'allows column selection', + 'allows customization', + 'allows editing text attributes', + 'allows empty selection', 'allows mixed state', + 'allows multiple selection', 'allows reordering', + 'allows undo', 'alpha( value)?', 'alternate image', + 'alternate increment value', 'alternate title', + 'animation delay', 'associated file name', + 'associated object', 'auto completes', 'auto display', + 'auto enables items', 'auto repeat', + 'auto resizes( outline column)?', + 'auto save expanded items', 'auto save name', + 'auto save table columns', 'auto saves configuration', + 'auto scroll', 'auto sizes all columns to fit', + 'auto sizes cells', 'background color', 'bezel state', + 'bezel style', 'bezeled', 'border rect', 'border type', + 'bordered', 'bounds( rotation)?', 'box type', + 'button returned', 'button type', + 'can choose directories', 'can choose files', + 'can draw', 'can hide', + 'cell( (background color|size|type))?', 'characters', + 'class', 'click count', 'clicked( data)? column', + 'clicked data item', 'clicked( data)? row', + 'closeable', 'collating', 'color( (mode|panel))', + 'command key down', 'configuration', + 'content(s| (size|view( margins)?))?', 'context', + 'continuous', 'control key down', 'control size', + 'control tint', 'control view', + 'controller visible', 'coordinate system', + 'copies( on scroll)?', 'corner view', 'current cell', + 'current column', 'current( field)? editor', + 'current( menu)? item', 'current row', + 'current tab view item', 'data source', + 'default identifiers', 'delta (x|y|z)', + 'destination window', 'directory', 'display mode', + 'displayed cell', 'document( (edited|rect|view))?', + 'double value', 'dragged column', 'dragged distance', + 'dragged items', 'draws( cell)? background', + 'draws grid', 'dynamically scrolls', 'echos bullets', + 'edge', 'editable', 'edited( data)? column', + 'edited data item', 'edited( data)? row', 'enabled', + 'enclosing scroll view', 'ending page', + 'error handling', 'event number', 'event type', + 'excluded from windows menu', 'executable path', + 'expanded', 'fax number', 'field editor', 'file kind', + 'file name', 'file type', 'first responder', + 'first visible column', 'flipped', 'floating', + 'font( panel)?', 'formatter', 'frameworks path', + 'frontmost', 'gave up', 'grid color', 'has data items', + 'has horizontal ruler', 'has horizontal scroller', + 'has parent data item', 'has resize indicator', + 'has shadow', 'has sub menu', 'has vertical ruler', + 'has vertical scroller', 'header cell', 'header view', + 'hidden', 'hides when deactivated', 'highlights by', + 'horizontal line scroll', 'horizontal page scroll', + 'horizontal ruler view', 'horizontally resizable', + 'icon image', 'id', 'identifier', + 'ignores multiple clicks', + 'image( (alignment|dims when disabled|frame style|scaling))?', + 'imports graphics', 'increment value', + 'indentation per level', 'indeterminate', 'index', + 'integer value', 'intercell spacing', 'item height', + 'key( (code|equivalent( modifier)?|window))?', + 'knob thickness', 'label', 'last( visible)? column', + 'leading offset', 'leaf', 'level', 'line scroll', + 'loaded', 'localized sort', 'location', 'loop mode', + 'main( (bunde|menu|window))?', 'marker follows cell', + 'matrix mode', 'maximum( content)? size', + 'maximum visible columns', + 'menu( form representation)?', 'miniaturizable', + 'miniaturized', 'minimized image', 'minimized title', + 'minimum column width', 'minimum( content)? size', + 'modal', 'modified', 'mouse down state', + 'movie( (controller|file|rect))?', 'muted', 'name', + 'needs display', 'next state', 'next text', + 'number of tick marks', 'only tick mark values', + 'opaque', 'open panel', 'option key down', + 'outline table column', 'page scroll', 'pages across', + 'pages down', 'palette label', 'pane splitter', + 'parent data item', 'parent window', 'pasteboard', + 'path( (names|separator))?', 'playing', + 'plays every frame', 'plays selection only', 'position', + 'preferred edge', 'preferred type', 'pressure', + 'previous text', 'prompt', 'properties', + 'prototype cell', 'pulls down', 'rate', + 'released when closed', 'repeated', + 'requested print time', 'required file type', + 'resizable', 'resized column', 'resource path', + 'returns records', 'reuses columns', 'rich text', + 'roll over', 'row height', 'rulers visible', + 'save panel', 'scripts path', 'scrollable', + 'selectable( identifiers)?', 'selected cell', + 'selected( data)? columns?', 'selected data items?', + 'selected( data)? rows?', 'selected item identifier', + 'selection by rect', 'send action on arrow key', + 'sends action when done editing', 'separates columns', + 'separator item', 'sequence number', 'services menu', + 'shared frameworks path', 'shared support path', + 'sheet', 'shift key down', 'shows alpha', + 'shows state by', 'size( mode)?', + 'smart insert delete enabled', 'sort case sensitivity', + 'sort column', 'sort order', 'sort type', + 'sorted( data rows)?', 'sound', 'source( mask)?', + 'spell checking enabled', 'starting page', 'state', + 'string value', 'sub menu', 'super menu', 'super view', + 'tab key traverses cells', 'tab state', 'tab type', + 'tab view', 'table view', 'tag', 'target( printer)?', + 'text color', 'text container insert', + 'text container origin', 'text returned', + 'tick mark position', 'time stamp', + 'title(d| (cell|font|height|position|rect))?', + 'tool tip', 'toolbar', 'trailing offset', 'transparent', + 'treat packages as directories', 'truncated labels', + 'types', 'unmodified characters', 'update views', + 'use sort indicator', 'user defaults', + 'uses data source', 'uses ruler', + 'uses threaded animation', + 'uses title from previous column', 'value wraps', + 'version', + 'vertical( (line scroll|page scroll|ruler view))?', + 'vertically resizable', 'view', + 'visible( document rect)?', 'volume', 'width', 'window', + 'windows menu', 'wraps', 'zoomable', 'zoomed') + + tokens = { + 'root': [ + (r'\s+', Text), + (u'¬\\n', String.Escape), + (r"'s\s+", Text), # This is a possessive, consider moving + (r'(--|#).*?$', Comment), + (r'\(\*', Comment.Multiline, 'comment'), + (r'[\(\){}!,.:]', Punctuation), + (u'(«)([^»]+)(»)', + bygroups(Text, Name.Builtin, Text)), + (r'\b((?:considering|ignoring)\s*)' + r'(application responses|case|diacriticals|hyphens|' + r'numeric strings|punctuation|white space)', + bygroups(Keyword, Name.Builtin)), + (u'(-|\\*|\\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\\^)', Operator), + (r"\b(%s)\b" % '|'.join(Operators), Operator.Word), + (r'^(\s*(?:on|end)\s+)' + r'(%s)' % '|'.join(StudioEvents[::-1]), + bygroups(Keyword, Name.Function)), + (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)), + (r'\b(as )(%s)\b' % '|'.join(Classes), + bygroups(Keyword, Name.Class)), + (r'\b(%s)\b' % '|'.join(Literals), Name.Constant), + (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin), + (r'\b(%s)\b' % '|'.join(Control), Keyword), + (r'\b(%s)\b' % '|'.join(Declarations), Keyword), + (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin), + (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin), + (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin), + (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute), + (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin), + (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin), + (r'\b(%s)\b' % '|'.join(References), Name.Builtin), + (r'"(\\\\|\\"|[^"])*"', String.Double), + (r'\b(%s)\b' % Identifiers, Name.Variable), + (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float), + (r'[-+]?\d+', Number.Integer), + ], + 'comment': [ + ('\(\*', Comment.Multiline, '#push'), + ('\*\)', Comment.Multiline, '#pop'), + ('[^*(]+', Comment.Multiline), + ('[*(]', Comment.Multiline), + ], + } diff --git a/pygments/lexers/testing.py b/pygments/lexers/testing.py new file mode 100644 index 00000000..2e769930 --- /dev/null +++ b/pygments/lexers/testing.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.testing + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for testing languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, \ + this, inherit, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['GherkinLexer'] + + +class GherkinLexer(RegexLexer): + """ + For `Gherkin ` syntax. + + .. versionadded:: 1.2 + """ + name = 'Gherkin' + aliases = ['cucumber', 'gherkin'] + filenames = ['*.feature'] + mimetypes = ['text/x-gherkin'] + + feature_keywords = u'^(기능|機能|功能|フィーチャ|خاصية|תכונה|Функціонал|Функционалност|Функционал|Фича|Особина|Могућност|Özellik|Właściwość|Tính năng|Trajto|Savybė|Požiadavka|Požadavek|Osobina|Ominaisuus|Omadus|OH HAI|Mogućnost|Mogucnost|Jellemző|Fīča|Funzionalità|Funktionalität|Funkcionalnost|Funkcionalitāte|Funcționalitate|Functionaliteit|Functionalitate|Funcionalitat|Funcionalidade|Fonctionnalité|Fitur|Feature|Egenskap|Egenskab|Crikey|Característica|Arwedd)(:)(.*)$' + feature_element_keywords = u'^(\\s*)(시나리오 개요|시나리오|배경|背景|場景大綱|場景|场景大纲|场景|劇本大綱|劇本|テンプレ|シナリオテンプレート|シナリオテンプレ|シナリオアウトライン|シナリオ|سيناريو مخطط|سيناريو|الخلفية|תרחיש|תבנית תרחיש|רקע|Тарих|Сценарій|Сценарио|Сценарий структураси|Сценарий|Структура сценарію|Структура сценарија|Структура сценария|Скица|Рамка на сценарий|Пример|Предыстория|Предистория|Позадина|Передумова|Основа|Концепт|Контекст|Założenia|Wharrimean is|Tình huống|The thing of it is|Tausta|Taust|Tapausaihio|Tapaus|Szenariogrundriss|Szenario|Szablon scenariusza|Stsenaarium|Struktura scenarija|Skica|Skenario konsep|Skenario|Situācija|Senaryo taslağı|Senaryo|Scénář|Scénario|Schema dello scenario|Scenārijs pēc parauga|Scenārijs|Scenár|Scenaro|Scenariusz|Scenariul de şablon|Scenariul de sablon|Scenariu|Scenario Outline|Scenario Amlinellol|Scenario|Scenarijus|Scenarijaus šablonas|Scenarij|Scenarie|Rerefons|Raamstsenaarium|Primer|Pozadí|Pozadina|Pozadie|Plan du scénario|Plan du Scénario|Osnova scénáře|Osnova|Náčrt Scénáře|Náčrt Scenáru|Mate|MISHUN SRSLY|MISHUN|Kịch bản|Konturo de la scenaro|Kontext|Konteksts|Kontekstas|Kontekst|Koncept|Khung tình huống|Khung kịch bản|Háttér|Grundlage|Geçmiş|Forgatókönyv vázlat|Forgatókönyv|Fono|Esquema do Cenário|Esquema do Cenario|Esquema del escenario|Esquema de l\'escenari|Escenario|Escenari|Dis is what went down|Dasar|Contexto|Contexte|Contesto|Condiţii|Conditii|Cenário|Cenario|Cefndir|Bối cảnh|Blokes|Bakgrunn|Bakgrund|Baggrund|Background|B4|Antecedents|Antecedentes|All y\'all|Achtergrond|Abstrakt Scenario|Abstract Scenario)(:)(.*)$' + examples_keywords = u'^(\\s*)(예|例子|例|サンプル|امثلة|דוגמאות|Сценарији|Примери|Приклади|Мисоллар|Значения|Örnekler|Voorbeelden|Variantai|Tapaukset|Scenarios|Scenariji|Scenarijai|Příklady|Példák|Príklady|Przykłady|Primjeri|Primeri|Piemēri|Pavyzdžiai|Paraugs|Juhtumid|Exemplos|Exemples|Exemplele|Exempel|Examples|Esempi|Enghreifftiau|Ekzemploj|Eksempler|Ejemplos|EXAMPLZ|Dữ liệu|Contoh|Cobber|Beispiele)(:)(.*)$' + step_keywords = u'^(\\s*)(하지만|조건|먼저|만일|만약|단|그리고|그러면|那麼|那么|而且|當|当|前提|假設|假如|但是|但し|並且|もし|ならば|ただし|しかし|かつ|و |متى |لكن |عندما |ثم |بفرض |اذاً |כאשר |וגם |בהינתן |אזי |אז |אבל |Якщо |Унда |То |Припустимо, що |Припустимо |Онда |Но |Нехай |Лекин |Когато |Када |Кад |К тому же |И |Задато |Задати |Задате |Если |Допустим |Дадено |Ва |Бирок |Аммо |Али |Але |Агар |А |І |Și |És |Zatati |Zakładając |Zadato |Zadate |Zadano |Zadani |Zadan |Youse know when youse got |Youse know like when |Yna |Ya know how |Ya gotta |Y |Wun |Wtedy |When y\'all |When |Wenn |WEN |Và |Ve |Und |Un |Thì |Then y\'all |Then |Tapi |Tak |Tada |Tad |Så |Stel |Soit |Siis |Si |Sed |Se |Quando |Quand |Quan |Pryd |Pokud |Pokiaľ |Però |Pero |Pak |Oraz |Onda |Ond |Oletetaan |Og |Och |O zaman |Når |När |Niin |Nhưng |N |Mutta |Men |Mas |Maka |Majd |Mais |Maar |Ma |Lorsque |Lorsqu\'|Kun |Kuid |Kui |Khi |Keď |Ketika |Když |Kaj |Kai |Kada |Kad |Jeżeli |Ja |Ir |I CAN HAZ |I |Ha |Givun |Givet |Given y\'all |Given |Gitt |Gegeven |Gegeben sei |Fakat |Eğer ki |Etant donné |Et |Então |Entonces |Entao |En |Eeldades |E |Duota |Dun |Donitaĵo |Donat |Donada |Do |Diyelim ki |Dengan |Den youse gotta |De |Dato |Dar |Dann |Dan |Dado |Dacă |Daca |DEN |Când |Cuando |Cho |Cept |Cand |Cal |But y\'all |But |Buh |Biết |Bet |BUT |Atès |Atunci |Atesa |Anrhegedig a |Angenommen |And y\'all |And |An |Ama |Als |Alors |Allora |Ali |Aleshores |Ale |Akkor |Aber |AN |A také |A |\* )' + + tokens = { + 'comments': [ + (r'^\s*#.*$', Comment), + ], + 'feature_elements' : [ + (step_keywords, Keyword, "step_content_stack"), + include('comments'), + (r"(\s|.)", Name.Function), + ], + 'feature_elements_on_stack' : [ + (step_keywords, Keyword, "#pop:2"), + include('comments'), + (r"(\s|.)", Name.Function), + ], + 'examples_table': [ + (r"\s+\|", Keyword, 'examples_table_header'), + include('comments'), + (r"(\s|.)", Name.Function), + ], + 'examples_table_header': [ + (r"\s+\|\s*$", Keyword, "#pop:2"), + include('comments'), + (r"\\\|", Name.Variable), + (r"\s*\|", Keyword), + (r"[^\|]", Name.Variable), + ], + 'scenario_sections_on_stack': [ + (feature_element_keywords, + bygroups(Name.Function, Keyword, Keyword, Name.Function), + "feature_elements_on_stack"), + ], + 'narrative': [ + include('scenario_sections_on_stack'), + include('comments'), + (r"(\s|.)", Name.Function), + ], + 'table_vars': [ + (r'(<[^>]+>)', Name.Variable), + ], + 'numbers': [ + (r'(\d+\.?\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', String), + ], + 'string': [ + include('table_vars'), + (r'(\s|.)', String), + ], + 'py_string': [ + (r'"""', Keyword, "#pop"), + include('string'), + ], + 'step_content_root':[ + (r"$", Keyword, "#pop"), + include('step_content'), + ], + 'step_content_stack':[ + (r"$", Keyword, "#pop:2"), + include('step_content'), + ], + 'step_content':[ + (r'"', Name.Function, "double_string"), + include('table_vars'), + include('numbers'), + include('comments'), + (r'(\s|.)', Name.Function), + ], + 'table_content': [ + (r"\s+\|\s*$", Keyword, "#pop"), + include('comments'), + (r"\\\|", String), + (r"\s*\|", Keyword), + include('string'), + ], + 'double_string': [ + (r'"', Name.Function, "#pop"), + include('string'), + ], + 'root': [ + (r'\n', Name.Function), + include('comments'), + (r'"""', Keyword, "py_string"), + (r'\s+\|', Keyword, 'table_content'), + (r'"', Name.Function, "double_string"), + include('table_vars'), + include('numbers'), + (r'(\s*)(@[^@\r\n\t ]+)', bygroups(Name.Function, Name.Tag)), + (step_keywords, bygroups(Name.Function, Keyword), + 'step_content_root'), + (feature_keywords, bygroups(Keyword, Keyword, Name.Function), + 'narrative'), + (feature_element_keywords, + bygroups(Name.Function, Keyword, Keyword, Name.Function), + 'feature_elements'), + (examples_keywords, + bygroups(Name.Function, Keyword, Keyword, Name.Function), + 'examples_table'), + (r'(\s|.)', Name.Function), + ] + } -- cgit v1.2.1 From 78a45160ef63837dbaa18f5a49e1bd342354705a Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 21:14:43 +0200 Subject: reorganization of other.py, part 2 --- CHANGES | 1 + pygments/lexers/_mapping.py | 51 +- pygments/lexers/automation.py | 373 +++++++ pygments/lexers/business.py | 112 +- pygments/lexers/dsls.py | 441 ++++++++ pygments/lexers/esoteric.py | 64 +- pygments/lexers/installers.py | 215 ++++ pygments/lexers/misc/apl.py | 101 ++ pygments/lexers/misc/basic.py | 356 +++++++ pygments/lexers/misc/blitz.py | 318 ------ pygments/lexers/misc/pawn.py | 195 ++++ pygments/lexers/misc/rebol.py | 200 +++- pygments/lexers/misc/smalltalk.py | 195 ++++ pygments/lexers/other.py | 2090 ++----------------------------------- pygments/lexers/scripting.py | 158 ++- 15 files changed, 2502 insertions(+), 2368 deletions(-) create mode 100644 pygments/lexers/automation.py create mode 100644 pygments/lexers/dsls.py create mode 100644 pygments/lexers/installers.py create mode 100644 pygments/lexers/misc/apl.py create mode 100644 pygments/lexers/misc/basic.py delete mode 100644 pygments/lexers/misc/blitz.py create mode 100644 pygments/lexers/misc/pawn.py create mode 100644 pygments/lexers/misc/smalltalk.py diff --git a/CHANGES b/CHANGES index 8db78964..f5196bde 100644 --- a/CHANGES +++ b/CHANGES @@ -43,6 +43,7 @@ Version 2.0 * APL (#969) * Nit (PR#375) * LSL (PR#296) + * Alloy (PR#355) - Added a helper to "optimize" regular expressions that match one of many literal words; this can save 20% and more lexing time with lexers that diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index fee4096d..66bc6b74 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -17,12 +17,12 @@ from __future__ import print_function LEXERS = { 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)), - 'APLLexer': ('pygments.lexers.other', 'APL', ('apl',), ('*.apl',), ()), + 'APLLexer': ('pygments.lexers.misc.apl', 'APL', ('apl',), ('*.apl',), ()), 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), - 'AlloyLexer': ('pygments.lexers.other', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), + 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), 'AmbientTalkLexer': ('pygments.lexers.other', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), @@ -37,8 +37,8 @@ LEXERS = { 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), - 'AutoItLexer': ('pygments.lexers.other', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), - 'AutohotkeyLexer': ('pygments.lexers.other', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), + 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), + 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()), @@ -46,11 +46,11 @@ LEXERS = { 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), 'BefungeLexer': ('pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), - 'BlitzBasicLexer': ('pygments.lexers.misc.blitz', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), - 'BlitzMaxLexer': ('pygments.lexers.misc.blitz', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), + 'BlitzBasicLexer': ('pygments.lexers.misc.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), + 'BlitzMaxLexer': ('pygments.lexers.misc.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)), 'BooLexer': ('pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)), 'BrainfuckLexer': ('pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), - 'BroLexer': ('pygments.lexers.other', 'Bro', ('bro',), ('*.bro',), ()), + 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), 'CLexer': ('pygments.lexers.c_like.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), 'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), @@ -58,7 +58,7 @@ LEXERS = { 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), 'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()), - 'CbmBasicV2Lexer': ('pygments.lexers.other', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), + 'CbmBasicV2Lexer': ('pygments.lexers.misc.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), @@ -136,7 +136,7 @@ LEXERS = { 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), 'GoLexer': ('pygments.lexers.c_like.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), 'GoloLexer': ('pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), - 'GoodDataCLLexer': ('pygments.lexers.other', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), + 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), 'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), @@ -201,14 +201,14 @@ LEXERS = { 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), - 'MOOCodeLexer': ('pygments.lexers.other', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), + 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), - 'MaqlLexer': ('pygments.lexers.other', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), + 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), 'MaskLexer': ('pygments.lexers.web', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), 'MathematicaLexer': ('pygments.lexers.math', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), @@ -218,10 +218,10 @@ LEXERS = { 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), - 'MonkeyLexer': ('pygments.lexers.misc.blitz', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), + 'MonkeyLexer': ('pygments.lexers.misc.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), 'MqlLexer': ('pygments.lexers.c_like.other', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), - 'MscgenLexer': ('pygments.lexers.other', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), + 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()), 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()), 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), @@ -230,13 +230,13 @@ LEXERS = { 'MyghtyJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Myghty', ('js+myghty', 'javascript+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')), 'MyghtyLexer': ('pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)), 'MyghtyXmlLexer': ('pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), - 'NSISLexer': ('pygments.lexers.other', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), + 'NSISLexer': ('pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), 'NasmLexer': ('pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), 'NesCLexer': ('pygments.lexers.c_like.other', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')), - 'NewspeakLexer': ('pygments.lexers.other', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), + 'NewspeakLexer': ('pygments.lexers.misc.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), 'NimrodLexer': ('pygments.lexers.misc.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), 'NitLexer': ('pygments.lexers.misc.nit', 'Nit', ('nit',), ('*.nit',), ()), @@ -252,6 +252,7 @@ LEXERS = { 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), + 'PawnLexer': ('pygments.lexers.misc.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), @@ -266,8 +267,8 @@ LEXERS = { 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), - 'ProtoBufLexer': ('pygments.lexers.other', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), - 'PuppetLexer': ('pygments.lexers.other', 'Puppet', ('puppet',), ('*.pp',), ()), + 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), + 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), @@ -277,7 +278,7 @@ LEXERS = { 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)), 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), - 'RPMSpecLexer': ('pygments.lexers.other', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), + 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), 'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), @@ -290,14 +291,14 @@ LEXERS = { 'RawTokenLexer': ('pygments.lexers.special', 'Raw token data', ('raw',), (), ('application/x-pygments-tokens',)), 'RdLexer': ('pygments.lexers.math', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)), 'RebolLexer': ('pygments.lexers.misc.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), - 'RedLexer': ('pygments.lexers.other', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), - 'RedcodeLexer': ('pygments.lexers.other', 'Redcode', ('redcode',), ('*.cw',), ()), + 'RedLexer': ('pygments.lexers.misc.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), + 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), - 'RexxLexer': ('pygments.lexers.other', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), + 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), - 'RslLexer': ('pygments.lexers.other', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), + 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), @@ -313,10 +314,10 @@ LEXERS = { 'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)), 'SlimLexer': ('pygments.lexers.web', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), - 'SmalltalkLexer': ('pygments.lexers.other', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), + 'SmalltalkLexer': ('pygments.lexers.misc.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), 'SnobolLexer': ('pygments.lexers.misc.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), - 'SourcePawnLexer': ('pygments.lexers.other', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), + 'SourcePawnLexer': ('pygments.lexers.misc.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), @@ -337,7 +338,7 @@ LEXERS = { 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), - 'VGLLexer': ('pygments.lexers.other', 'VGL', ('vgl',), ('*.rpf',), ()), + 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), 'ValaLexer': ('pygments.lexers.c_like.other', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), 'VbNetAspxLexer': ('pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'VbNetLexer': ('pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), diff --git a/pygments/lexers/automation.py b/pygments/lexers/automation.py new file mode 100644 index 00000000..e012aae4 --- /dev/null +++ b/pygments/lexers/automation.py @@ -0,0 +1,373 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.automation + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for automation scripting languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups, combined +from pygments.token import Text, Comment, Operator, Name, String, \ + Number, Punctuation, Generic + +__all__ = ['AutohotkeyLexer', 'AutoItLexer'] + + +class AutohotkeyLexer(RegexLexer): + """ + For `autohotkey `_ source code. + + .. versionadded:: 1.4 + """ + name = 'autohotkey' + aliases = ['ahk', 'autohotkey'] + filenames = ['*.ahk', '*.ahkl'] + mimetypes = ['text/x-autohotkey'] + + tokens = { + 'root': [ + (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), 'incomment'), + (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'), + (r'\s+;.*?$', Comment.Singleline), + (r'^;.*?$', Comment.Singleline), + (r'[]{}(),;[]', Punctuation), + (r'(in|is|and|or|not)\b', Operator.Word), + (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable), + (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator), + include('commands'), + include('labels'), + include('builtInFunctions'), + include('builtInVariables'), + (r'"', String, combined('stringescape', 'dqs')), + include('numbers'), + (r'[a-zA-Z_#@$][\w#@$]*', Name), + (r'\\|\'', Text), + (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape), + include('garbage'), + ], + 'incomment': [ + (r'^\s*\*/', Comment.Multiline, '#pop'), + (r'[^*/]', Comment.Multiline), + (r'[*/]', Comment.Multiline) + ], + 'incontinuation': [ + (r'^\s*\)', Generic, '#pop'), + (r'[^)]', Generic), + (r'[)]', Generic), + ], + 'commands': [ + (r'(?i)^(\s*)(global|local|static|' + r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|' + r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|' + r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|' + r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|' + r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|' + r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|' + r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|' + r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|' + r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|' + r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|' + r'ControlSendRaw|ControlSetText|CoordMode|Critical|' + r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|' + r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|' + r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|' + r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|' + r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|' + r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|' + r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|' + r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|' + r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|' + r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|' + r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|' + r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|' + r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|' + r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|' + r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|' + r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|' + r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|' + r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|' + r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|' + r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|' + r'SetBatchLines|SetCapslockState|SetControlDelay|' + r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|' + r'SetMouseDelay|SetNumlockState|SetScrollLockState|' + r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|' + r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|' + r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|' + r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|' + r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|' + r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|' + r'StringReplace|StringRight|StringSplit|StringTrimLeft|' + r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|' + r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|' + r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|' + r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|' + r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|' + r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|' + r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|' + r'WinWait)\b', bygroups(Text, Name.Builtin)), + ], + 'builtInFunctions': [ + (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|' + r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|' + r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|' + r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|' + r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|' + r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|' + r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|' + r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|' + r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|' + r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|' + r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|' + r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|' + r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|' + r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|' + r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|' + r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b', + Name.Function), + ], + 'builtInVariables': [ + (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|' + r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|' + r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|' + r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|' + r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|' + r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|' + r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|' + r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|' + r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|' + r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|' + r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|' + r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|' + r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|' + r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|' + r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|' + r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|' + r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|' + r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|' + r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|' + r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|' + r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|' + r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|' + r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|' + r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|' + r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|' + r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|' + r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|' + r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|' + r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|' + r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b', + Name.Variable), + ], + 'labels': [ + # hotkeys and labels + # technically, hotkey names are limited to named keys and buttons + (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)), + (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)), + ], + 'numbers': [ + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+[eE][+-]?[0-9]+', Number.Float), + (r'0\d+', Number.Oct), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + (r'\d+L', Number.Integer.Long), + (r'\d+', Number.Integer) + ], + 'stringescape': [ + (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape), + ], + 'strings': [ + (r'[^"\n]+', String), + ], + 'dqs': [ + (r'"', String, '#pop'), + include('strings') + ], + 'garbage': [ + (r'[^\S\n]', Text), + # (r'.', Text), # no cheating + ], + } + + +class AutoItLexer(RegexLexer): + """ + For `AutoIt `_ files. + + AutoIt is a freeware BASIC-like scripting language + designed for automating the Windows GUI and general scripting + + .. versionadded:: 1.6 + """ + name = 'AutoIt' + aliases = ['autoit'] + filenames = ['*.au3'] + mimetypes = ['text/x-autoit'] + + # Keywords, functions, macros from au3.keywords.properties + # which can be found in AutoIt installed directory, e.g. + # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties + + keywords = """\ + #include-once #include #endregion #forcedef #forceref #region + and byref case continueloop dim do else elseif endfunc endif + endselect exit exitloop for func global + if local next not or return select step + then to until wend while exit""".split() + + functions = """\ + abs acos adlibregister adlibunregister asc ascw asin assign atan + autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen + binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor + blockinput break call cdtray ceiling chr chrw clipget clipput consoleread + consolewrite consolewriteerror controlclick controlcommand controldisable + controlenable controlfocus controlgetfocus controlgethandle controlgetpos + controlgettext controlhide controllistview controlmove controlsend + controlsettext controlshow controltreeview cos dec dircopy dircreate + dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree + dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate + dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata + drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype + drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree + drivespacetotal drivestatus envget envset envupdate eval execute exp + filechangedir fileclose filecopy filecreatentfslink filecreateshortcut + filedelete fileexists filefindfirstfile filefindnextfile fileflush + filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut + filegetshortname filegetsize filegettime filegetversion fileinstall filemove + fileopen fileopendialog fileread filereadline filerecycle filerecycleempty + filesavedialog fileselectfolder filesetattrib filesetpos filesettime + filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi + guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo + guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy + guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon + guictrlcreateinput guictrlcreatelabel guictrlcreatelist + guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu + guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj + guictrlcreatepic guictrlcreateprogress guictrlcreateradio + guictrlcreateslider guictrlcreatetab guictrlcreatetabitem + guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown + guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg + guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy + guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata + guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic + guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos + guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete + guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators + guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon + guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset + httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize + inetread inidelete iniread inireadsection inireadsectionnames + inirenamesection iniwrite iniwritesection inputbox int isadmin isarray + isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword + isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag + mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox + number objcreate objcreateinterface objevent objevent objget objname + onautoitexitregister onautoitexitunregister opt ping pixelchecksum + pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists + processgetstats processlist processsetpriority processwait processwaitclose + progressoff progresson progressset ptr random regdelete regenumkey + regenumval regread regwrite round run runas runaswait runwait send + sendkeepactive seterror setextended shellexecute shellexecutewait shutdown + sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton + sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread + string stringaddcr stringcompare stringformat stringfromasciiarray + stringinstr stringisalnum stringisalpha stringisascii stringisdigit + stringisfloat stringisint stringislower stringisspace stringisupper + stringisxdigit stringleft stringlen stringlower stringmid stringregexp + stringregexpreplace stringreplace stringright stringsplit stringstripcr + stringstripws stringtoasciiarray stringtobinary stringtrimleft + stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect + tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff + timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete + trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent + trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent + traysetpauseicon traysetstate traysettooltip traytip ubound udpbind + udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype + winactivate winactive winclose winexists winflash wingetcaretpos + wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess + wingetstate wingettext wingettitle winkill winlist winmenuselectitem + winminimizeall winminimizeallundo winmove winsetontop winsetstate + winsettitle winsettrans winwait winwaitactive winwaitclose + winwaitnotactive""".split() + + macros = """\ + @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion + @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec + @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir + @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error + @exitcode @exitmethod @extended @favoritescommondir @favoritesdir + @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid + @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour + @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf + @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang + @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype + @osversion @programfilesdir @programscommondir @programsdir @scriptdir + @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir + @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide + @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault + @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna + @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir + @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday + @windowsdir @workingdir @yday @year""".split() + + tokens = { + 'root': [ + (r';.*\n', Comment.Single), + (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline), + (r'[\[\]{}(),;]', Punctuation), + (r'(and|or|not)\b', Operator.Word), + (r'[\$|@][a-zA-Z_]\w*', Name.Variable), + (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator), + include('commands'), + include('labels'), + include('builtInFunctions'), + include('builtInMarcros'), + (r'"', String, combined('stringescape', 'dqs')), + include('numbers'), + (r'[a-zA-Z_#@$][\w#@$]*', Name), + (r'\\|\'', Text), + (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape), + (r'_\n', Text), # Line continuation + include('garbage'), + ], + 'commands': [ + (r'(?i)(\s*)(%s)\b' % '|'.join(keywords), + bygroups(Text, Name.Builtin)), + ], + 'builtInFunctions': [ + (r'(?i)(%s)\b' % '|'.join(functions), + Name.Function), + ], + 'builtInMarcros': [ + (r'(?i)(%s)\b' % '|'.join(macros), + Name.Variable.Global), + ], + 'labels': [ + # sendkeys + (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)), + ], + 'numbers': [ + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+[eE][+-]?[0-9]+', Number.Float), + (r'0\d+', Number.Oct), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + (r'\d+L', Number.Integer.Long), + (r'\d+', Number.Integer) + ], + 'stringescape': [ + (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape), + ], + 'strings': [ + (r'[^"\n]+', String), + ], + 'dqs': [ + (r'"', String, '#pop'), + include('strings') + ], + 'garbage': [ + (r'[^\S\n]', Text), + ], + } diff --git a/pygments/lexers/business.py b/pygments/lexers/business.py index 01a15eaa..b02a8beb 100644 --- a/pygments/lexers/business.py +++ b/pygments/lexers/business.py @@ -17,7 +17,8 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ from pygments.lexers._openedgebuiltins import OPENEDGEKEYWORDS -__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer'] +__all__ = ['CobolLexer', 'CobolFreeformatLexer', 'ABAPLexer', 'OpenEdgeLexer', + 'GoodDataCLLexer', 'MaqlLexer'] class CobolLexer(RegexLexer): @@ -477,3 +478,112 @@ class OpenEdgeLexer(RegexLexer): (r'}', Comment.Preproc, '#pop'), ], } + + +class GoodDataCLLexer(RegexLexer): + """ + Lexer for `GoodData-CL `_ + script files. + + .. versionadded:: 1.4 + """ + + name = 'GoodData-CL' + aliases = ['gooddata-cl'] + filenames = ['*.gdc'] + mimetypes = ['text/x-gooddata-cl'] + + flags = re.IGNORECASE + tokens = { + 'root': [ + # Comments + (r'#.*', Comment.Single), + # Function call + (r'[a-z]\w*', Name.Function), + # Argument list + (r'\(', Punctuation, 'args-list'), + # Punctuation + (r';', Punctuation), + # Space is not significant + (r'\s+', Text) + ], + 'args-list': [ + (r'\)', Punctuation, '#pop'), + (r',', Punctuation), + (r'[a-z]\w*', Name.Variable), + (r'=', Operator), + (r'"', String, 'string-literal'), + (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Number), + # Space is not significant + (r'\s', Text) + ], + 'string-literal': [ + (r'\\[tnrfbae"\\]', String.Escape), + (r'"', String, '#pop'), + (r'[^\\"]+', String) + ] + } + + +class MaqlLexer(RegexLexer): + """ + Lexer for `GoodData MAQL + `_ + scripts. + + .. versionadded:: 1.4 + """ + + name = 'MAQL' + aliases = ['maql'] + filenames = ['*.maql'] + mimetypes = ['text/x-gooddata-maql', 'application/x-gooddata-maql'] + + flags = re.IGNORECASE + tokens = { + 'root': [ + # IDENTITY + (r'IDENTIFIER\b', Name.Builtin), + # IDENTIFIER + (r'\{[^}]+\}', Name.Variable), + # NUMBER + (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Number), + # STRING + (r'"', String, 'string-literal'), + # RELATION + (r'\<\>|\!\=', Operator), + (r'\=|\>\=|\>|\<\=|\<', Operator), + # := + (r'\:\=', Operator), + # OBJECT + (r'\[[^]]+\]', Name.Variable.Class), + # keywords + (words(( + 'DIMENSION', 'DIMENSIONS', 'BOTTOM', 'METRIC', 'COUNT', 'OTHER', + 'FACT', 'WITH', 'TOP', 'OR', 'ATTRIBUTE', 'CREATE', 'PARENT', + 'FALSE', 'ROW', 'ROWS', 'FROM', 'ALL', 'AS', 'PF', 'COLUMN', + 'COLUMNS', 'DEFINE', 'REPORT', 'LIMIT', 'TABLE', 'LIKE', 'AND', + 'BY', 'BETWEEN', 'EXCEPT', 'SELECT', 'MATCH', 'WHERE', 'TRUE', + 'FOR', 'IN', 'WITHOUT', 'FILTER', 'ALIAS', 'ORDER', 'FACT', + 'WHEN', 'NOT', 'ON', 'KEYS', 'KEY', 'FULLSET', 'PRIMARY', + 'LABELS', 'LABEL', 'VISUAL', 'TITLE', 'DESCRIPTION', 'FOLDER', + 'ALTER', 'DROP', 'ADD', 'DATASET', 'DATATYPE', 'INT', 'BIGINT', + 'DOUBLE', 'DATE', 'VARCHAR', 'DECIMAL', 'SYNCHRONIZE', 'TYPE', + 'DEFAULT', 'ORDER', 'ASC', 'DESC', 'HYPERLINK', 'INCLUDE', + 'TEMPLATE', 'MODIFY'), suffix=r'\b'), + Keyword), + # FUNCNAME + (r'[a-z]\w*\b', Name.Function), + # Comments + (r'#.*', Comment.Single), + # Punctuation + (r'[,;\(\)]', Punctuation), + # Space is not significant + (r'\s+', Text) + ], + 'string-literal': [ + (r'\\[tnrfbae"\\]', String.Escape), + (r'"', String, '#pop'), + (r'[^\\"]+', String) + ], + } diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py new file mode 100644 index 00000000..dc7a5ae2 --- /dev/null +++ b/pygments/lexers/dsls.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.dsls + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for various domain-specific languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, words, include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Literal + +__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', 'MscgenLexer', + 'VGLLexer', 'AlloyLexer'] + + +class ProtoBufLexer(RegexLexer): + """ + Lexer for `Protocol Buffer `_ + definition files. + + .. versionadded:: 1.4 + """ + + name = 'Protocol Buffer' + aliases = ['protobuf', 'proto'] + filenames = ['*.proto'] + + tokens = { + 'root': [ + (r'[ \t]+', Text), + (r'[,;{}\[\]\(\)]', Punctuation), + (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), + (words(( + 'import', 'option', 'optional', 'required', 'repeated', 'default', + 'packed', 'ctype', 'extensions', 'to', 'max', 'rpc', 'returns', + 'oneof'), prefix=r'\b', suffix=r'\b'), + Keyword), + (words(( + 'int32', 'int64', 'uint32', 'uint64', 'sint32', 'sint64', + 'fixed32', 'fixed64', 'sfixed32', 'sfixed64', + 'float', 'double', 'bool', 'string', 'bytes'), suffix=r'\b'), + Keyword.Type), + (r'(true|false)\b', Keyword.Constant), + (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'), + (r'(message|extend)(\s+)', + bygroups(Keyword.Declaration, Text), 'message'), + (r'(enum|group|service)(\s+)', + bygroups(Keyword.Declaration, Text), 'type'), + (r'\".*\"', String), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'(\-?(inf|nan))', Number.Float), + (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'0[0-7]+[LlUu]*', Number.Oct), + (r'\d+[LlUu]*', Number.Integer), + (r'[+-=]', Operator), + (r'([a-zA-Z_][\w\.]*)([ \t]*)(=)', + bygroups(Name.Attribute, Text, Operator)), + ('[a-zA-Z_][\w\.]*', Name), + ], + 'package': [ + (r'[a-zA-Z_]\w*', Name.Namespace, '#pop') + ], + 'message': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop') + ], + 'type': [ + (r'[a-zA-Z_]\w*', Name, '#pop') + ], + } + + +class BroLexer(RegexLexer): + """ + For `Bro `_ scripts. + + .. versionadded:: 1.5 + """ + name = 'Bro' + aliases = ['bro'] + filenames = ['*.bro'] + + _hex = r'[0-9a-fA-F_]+' + _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?' + _h = r'[A-Za-z0-9][-A-Za-z0-9]*' + + tokens = { + 'root': [ + # Whitespace + (r'^@.*?\n', Comment.Preproc), + (r'#.*?\n', Comment.Single), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), + # Keywords + (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event' + r'|export|for|function|if|global|hook|local|module|next' + r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword), + (r'(addr|any|bool|count|counter|double|file|int|interval|net' + r'|pattern|port|record|set|string|subnet|table|time|timer' + r'|vector)\b', Keyword.Type), + (r'(T|F)\b', Keyword.Constant), + (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire' + r'|default|disable_print_hook|raw_output|encrypt|group|log' + r'|mergeable|optional|persistent|priority|redef' + r'|rotate_(?:interval|size)|synchronized)\b', + bygroups(Punctuation, Keyword)), + (r'\s+module\b', Keyword.Namespace), + # Addresses, ports and networks + (r'\d+/(tcp|udp|icmp|unknown)\b', Number), + (r'(\d+\.){3}\d+', Number), + (r'(' + _hex + r'){7}' + _hex, Number), + (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number), + (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number), + (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number), + # Hostnames + (_h + r'(\.' + _h + r')+', String), + # Numeric + (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date), + (r'0[xX]' + _hex, Number.Hex), + (_float, Number.Float), + (r'\d+', Number.Integer), + (r'/', String.Regex, 'regex'), + (r'"', String, 'string'), + # Operators + (r'[!%*/+:<=>?~|-]', Operator), + (r'([-+=&|]{2}|[+=!><-]=)', Operator), + (r'(in|match)\b', Operator.Word), + (r'[{}()\[\]$.,;]', Punctuation), + # Identfier + (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)), + (r'[a-zA-Z_][a-zA-Z_0-9]*', Name) + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), + (r'\\\n', String), + (r'\\', String) + ], + 'regex': [ + (r'/', String.Regex, '#pop'), + (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here. + (r'[^\\/\n]+', String.Regex), + (r'\\\n', String.Regex), + (r'\\', String.Regex) + ] + } + + +class PuppetLexer(RegexLexer): + """ + For `Puppet `__ configuration DSL. + + .. versionadded:: 1.6 + """ + name = 'Puppet' + aliases = ['puppet'] + filenames = ['*.pp'] + + tokens = { + 'root': [ + include('comments'), + include('keywords'), + include('names'), + include('numbers'), + include('operators'), + include('strings'), + + (r'[]{}:(),;[]', Punctuation), + (r'[^\S\n]+', Text), + ], + + 'comments': [ + (r'\s*#.*$', Comment), + (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), + ], + + 'operators': [ + (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator), + (r'(in|and|or|not)\b', Operator.Word), + ], + + 'names': [ + ('[a-zA-Z_]\w*', Name.Attribute), + (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation, + String, Punctuation)), + (r'\$\S+', Name.Variable), + ], + + 'numbers': [ + # Copypasta from the Python lexer + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), + (r'\d+[eE][+-]?[0-9]+j?', Number.Float), + (r'0[0-7]+j?', Number.Oct), + (r'0[xX][a-fA-F0-9]+', Number.Hex), + (r'\d+L', Number.Integer.Long), + (r'\d+j?', Number.Integer) + ], + + 'keywords': [ + # Left out 'group' and 'require' + # Since they're often used as attributes + (r'(?i)(absent|alert|alias|audit|augeas|before|case|check|class|' + r'computer|configured|contained|create_resources|crit|cron|debug|' + r'default|define|defined|directory|else|elsif|emerg|err|exec|' + r'extlookup|fail|false|file|filebucket|fqdn_rand|generate|host|if|' + r'import|include|info|inherits|inline_template|installed|' + r'interface|k5login|latest|link|loglevel|macauthorization|' + r'mailalias|maillist|mcx|md5|mount|mounted|nagios_command|' + r'nagios_contact|nagios_contactgroup|nagios_host|' + r'nagios_hostdependency|nagios_hostescalation|nagios_hostextinfo|' + r'nagios_hostgroup|nagios_service|nagios_servicedependency|' + r'nagios_serviceescalation|nagios_serviceextinfo|' + r'nagios_servicegroup|nagios_timeperiod|node|noop|notice|notify|' + r'package|present|purged|realize|regsubst|resources|role|router|' + r'running|schedule|scheduled_task|search|selboolean|selmodule|' + r'service|sha1|shellquote|split|sprintf|ssh_authorized_key|sshkey|' + r'stage|stopped|subscribe|tag|tagged|template|tidy|true|undef|' + r'unmounted|user|versioncmp|vlan|warning|yumrepo|zfs|zone|' + r'zpool)\b', Keyword), + ], + + 'strings': [ + (r'"([^"])*"', String), + (r"'(\\'|[^'])*'", String), + ], + + } + + +class RslLexer(RegexLexer): + """ + `RSL `_ is the formal specification + language used in RAISE (Rigorous Approach to Industrial Software Engineering) + method. + + .. versionadded:: 2.0 + """ + name = 'RSL' + aliases = ['rsl'] + filenames = ['*.rsl'] + mimetypes = ['text/rsl'] + + flags = re.MULTILINE | re.DOTALL + + tokens = { + 'root': [ + (words(( + 'Bool', 'Char', 'Int', 'Nat', 'Real', 'Text', 'Unit', 'abs', + 'all', 'always', 'any', 'as', 'axiom', 'card', 'case', 'channel', + 'chaos', 'class', 'devt_relation', 'dom', 'elems', 'else', 'elif', + 'end', 'exists', 'extend', 'false', 'for', 'hd', 'hide', 'if', + 'in', 'is', 'inds', 'initialise', 'int', 'inter', 'isin', 'len', + 'let', 'local', 'ltl_assertion', 'object', 'of', 'out', 'post', + 'pre', 'read', 'real', 'rng', 'scheme', 'skip', 'stop', 'swap', + 'then', 'theory', 'test_case', 'tl', 'transition_system', 'true', + 'type', 'union', 'until', 'use', 'value', 'variable', 'while', + 'with', 'write', '~isin', '-inflist', '-infset', '-list', + '-set'), prefix=r'\b', suffix=r'\b'), + Keyword), + (r'(variable|value)\b', Keyword.Declaration), + (r'--.*?\n', Comment), + (r'<:.*?:>', Comment), + (r'\{!.*?!\}', Comment), + (r'/\*.*?\*/', Comment), + (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function), + (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)', + bygroups(Text, Name.Function, Text, Keyword)), + (r'\b[A-Z]\w*\b', Keyword.Type), + (r'(true|false)\b', Keyword.Constant), + (r'".*"', String), + (r'\'.\'', String.Char), + (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|' + r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)', + Operator), + (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-f]+', Number.Hex), + (r'[0-9]+', Number.Integer), + (r'.', Text), + ], + } + + def analyse_text(text): + """ + Check for the most common text in the beginning of a RSL file. + """ + if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None: + return 1.0 + else: + return 0.01 + + +class MscgenLexer(RegexLexer): + """ + For `Mscgen `_ files. + + .. versionadded:: 1.6 + """ + name = 'Mscgen' + aliases = ['mscgen', 'msc'] + filenames = ['*.msc'] + + _var = r'(\w+|"(?:\\"|[^"])*")' + + tokens = { + 'root': [ + (r'msc\b', Keyword.Type), + # Options + (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS' + r'|arcgradient|ARCGRADIENT)\b', Name.Property), + # Operators + (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word), + (r'(\.|-|\|){3}', Keyword), + (r'(?:-|=|\.|:){2}' + r'|<<=>>|<->|<=>|<<>>|<:>' + r'|->|=>>|>>|=>|:>|-x|-X' + r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator), + # Names + (r'\*', Name.Builtin), + (_var, Name.Variable), + # Other + (r'\[', Punctuation, 'attrs'), + (r'\{|\}|,|;', Punctuation), + include('comments') + ], + 'attrs': [ + (r'\]', Punctuation, '#pop'), + (_var + r'(\s*)(=)(\s*)' + _var, + bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace, + String)), + (r',', Punctuation), + include('comments') + ], + 'comments': [ + (r'(?://|#).*?\n', Comment.Single), + (r'/\*(?:.|\n)*?\*/', Comment.Multiline), + (r'[ \t\r\n]+', Text.Whitespace) + ] + } + + +class VGLLexer(RegexLexer): + """ + For `SampleManager VGL `_ + source code. + + .. versionadded:: 1.6 + """ + name = 'VGL' + aliases = ['vgl'] + filenames = ['*.rpf'] + + flags = re.MULTILINE | re.DOTALL | re.IGNORECASE + + tokens = { + 'root': [ + (r'\{[^\}]*\}', Comment.Multiline), + (r'declare', Keyword.Constant), + (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object' + r'|create|on|line|with|global|routine|value|endroutine|constant' + r'|global|set|join|library|compile_option|file|exists|create|copy' + r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])', + Keyword), + (r'(true|false|null|empty|error|locked)', Keyword.Constant), + (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator), + (r'"[^"]*"', String), + (r'(\.)([a-z_\$][\w\$]*)', bygroups(Operator, Name.Attribute)), + (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number), + (r'[a-z_\$][\w\$]*', Name), + (r'[\r\n]+', Text), + (r'\s+', Text) + ] + } + + +class AlloyLexer(RegexLexer): + """ + For `Alloy `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'Alloy' + aliases = ['alloy'] + filenames = ['*.als'] + mimetypes = ['text/x-alloy'] + + flags = re.MULTILINE | re.DOTALL + + iden_rex = r'[a-zA-Z_][a-zA-Z0-9_\']*' + text_tuple = (r'[^\S\n]+', Text) + + tokens = { + 'sig': [ + (r'(extends)\b', Keyword, '#pop'), + (iden_rex, Name), + text_tuple, + (r',', Punctuation), + (r'\{', Operator, '#pop'), + ], + 'module': [ + text_tuple, + (iden_rex, Name, '#pop'), + ], + 'fun': [ + text_tuple, + (r'\{', Operator, '#pop'), + (iden_rex, Name, '#pop'), + ], + 'root': [ + (r'--.*?$', Comment.Single), + (r'//.*?$', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + text_tuple, + (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text), + 'module'), + (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'), + (r'(iden|univ|none)\b', Keyword.Constant), + (r'(int|Int)\b', Keyword.Type), + (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword), + (r'(all|some|no|sum|disj|when|else)\b', Keyword), + (r'(run|check|for|but|exactly|expect|as)\b', Keyword), + (r'(and|or|implies|iff|in)\b', Operator.Word), + (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'), + (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator), + (r'[-+/*%=<>&!^|~\{\}\[\]\(\)\.]', Operator), + (iden_rex, Name), + (r'[:,]', Punctuation), + (r'[0-9]+', Number.Integer), + (r'"(\\\\|\\"|[^"])*"', String), + (r'\n', Text), + ] + } diff --git a/pygments/lexers/esoteric.py b/pygments/lexers/esoteric.py index 775945fd..2efc057b 100644 --- a/pygments/lexers/esoteric.py +++ b/pygments/lexers/esoteric.py @@ -9,15 +9,11 @@ :license: BSD, see LICENSE for details. """ -import re - -from pygments.lexer import RegexLexer, include, bygroups, using, \ - this, inherit, default, words -from pygments.util import get_bool_opt +from pygments.lexer import RegexLexer, include from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Error -__all__ = ['BrainfuckLexer', 'BefungeLexer'] +__all__ = ['BrainfuckLexer', 'BefungeLexer', 'RedcodeLexer'] class BrainfuckLexer(RegexLexer): @@ -67,16 +63,52 @@ class BefungeLexer(RegexLexer): tokens = { 'root': [ (r'[0-9a-f]', Number), - (r'[\+\*/%!`-]', Operator), # Traditional math - (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives - (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives + (r'[\+\*/%!`-]', Operator), # Traditional math + (r'[<>^v?\[\]rxjk]', Name.Variable), # Move, imperatives + (r'[:\\$.,n]', Name.Builtin), # Stack ops, imperatives (r'[|_mw]', Keyword), - (r'[{}]', Name.Tag), # Befunge-98 stack ops - (r'".*?"', String.Double), # Strings don't appear to allow escapes - (r'\'.', String.Single), # Single character - (r'[#;]', Comment), # Trampoline... depends on direction hit - (r'[pg&~=@iotsy]', Keyword), # Misc - (r'[()A-Z]', Comment), # Fingerprints - (r'\s+', Text), # Whitespace doesn't matter + (r'[{}]', Name.Tag), # Befunge-98 stack ops + (r'".*?"', String.Double), # Strings don't appear to allow escapes + (r'\'.', String.Single), # Single character + (r'[#;]', Comment), # Trampoline... depends on direction hit + (r'[pg&~=@iotsy]', Keyword), # Misc + (r'[()A-Z]', Comment), # Fingerprints + (r'\s+', Text), # Whitespace doesn't matter + ], + } + + +class RedcodeLexer(RegexLexer): + """ + A simple Redcode lexer based on ICWS'94. + Contributed by Adam Blinkinsop . + + .. versionadded:: 0.8 + """ + name = 'Redcode' + aliases = ['redcode'] + filenames = ['*.cw'] + + opcodes = ('DAT', 'MOV', 'ADD', 'SUB', 'MUL', 'DIV', 'MOD', + 'JMP', 'JMZ', 'JMN', 'DJN', 'CMP', 'SLT', 'SPL', + 'ORG', 'EQU', 'END') + modifiers = ('A', 'B', 'AB', 'BA', 'F', 'X', 'I') + + tokens = { + 'root': [ + # Whitespace: + (r'\s+', Text), + (r';.*$', Comment.Single), + # Lexemes: + # Identifiers + (r'\b(%s)\b' % '|'.join(opcodes), Name.Function), + (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator), + (r'[A-Za-z_][A-Za-z_0-9]+', Name), + # Operators + (r'[-+*/%]', Operator), + (r'[#$@<>]', Operator), # mode + (r'[.,]', Punctuation), # mode + # Numbers + (r'[-+]?\d+', Number.Integer), ], } diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py new file mode 100644 index 00000000..758e8fbc --- /dev/null +++ b/pygments/lexers/installers.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.installers + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for installer/packager DSLs and formats. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, this +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Punctuation, Generic + +__all__ = ['NSISLexer', 'RPMSpecLexer'] + + +class NSISLexer(RegexLexer): + """ + For `NSIS `_ scripts. + + .. versionadded:: 1.6 + """ + name = 'NSIS' + aliases = ['nsis', 'nsi', 'nsh'] + filenames = ['*.nsi', '*.nsh'] + mimetypes = ['text/x-nsis'] + + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'[;\#].*\n', Comment), + (r"'.*?'", String.Single), + (r'"', String.Double, 'str_double'), + (r'`', String.Backtick, 'str_backtick'), + include('macro'), + include('interpol'), + include('basic'), + (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo), + (r'/[a-z_]\w*', Name.Attribute), + ('.', Text), + ], + 'basic': [ + (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b', + bygroups(Text, Keyword, Text, Name.Function)), + (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b', + bygroups(Keyword.Namespace, Punctuation, Name.Function)), + (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)), + (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator), + (r'[|+-]', Operator), + (r'\\', Punctuation), + (r'\b(Abort|Add(?:BrandingImage|Size)|' + r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|' + r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|' + r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|' + r'ComponentText|CopyFiles|CRCCheck|' + r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|' + r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|' + r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|' + r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|' + r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|' + r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|' + r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|' + r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|' + r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|' + r'InstDirError|LabelAddress|TempFileName)|' + r'Goto|HideWindow|Icon|' + r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|' + r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|' + r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|' + r'IsWindow|LangString(?:UP)?|' + r'License(?:BkColor|Data|ForceSelection|LangString|Text)|' + r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|' + r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|' + r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|' + r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|' + r'Return|RMDir|SearchPath|Section(?:Divider|End|' + r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|' + r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|' + r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|' + r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|' + r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|' + r'Silent|StaticBkColor)|' + r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|' + r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|' + r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|' + r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|' + r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|' + r'XPStyle)\b', Keyword), + (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?' + r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|' + r'HK(CC|CR|CU|DD|LM|PD|U)|' + r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|' + r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|' + r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|' + r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|' + r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|' + r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|' + r'YESNO(?:CANCEL)?)|SET|SHCTX|' + r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|' + r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|' + r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|' + r'listonly|lzma|nevershow|none|normal|off|on|pop|push|' + r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|' + r'true|try|user|zlib)\b', Name.Constant), + ], + 'macro': [ + (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|' + r'delfilefile|echo(?:message)?|else|endif|error|execute|' + r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|' + r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|' + r'warning)\b', Comment.Preproc), + ], + 'interpol': [ + (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers + (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|' + r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|' + r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|' + r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|' + r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|' + r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})', + Name.Builtin), + (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global), + (r'\$[a-z_]\w*', Name.Variable), + ], + 'str_double': [ + (r'"', String, '#pop'), + (r'\$(\\[nrt"]|\$)', String.Escape), + include('interpol'), + (r'.', String.Double), + ], + 'str_backtick': [ + (r'`', String, '#pop'), + (r'\$(\\[nrt"]|\$)', String.Escape), + include('interpol'), + (r'.', String.Double), + ], + } + + +class RPMSpecLexer(RegexLexer): + """ + For RPM ``.spec`` files. + + .. versionadded:: 1.6 + """ + + name = 'RPMSpec' + aliases = ['spec'] + filenames = ['*.spec'] + mimetypes = ['text/x-rpm-spec'] + + _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|' + 'post[a-z]*|trigger[a-z]*|files)') + + tokens = { + 'root': [ + (r'#.*\n', Comment), + include('basic'), + ], + 'description': [ + (r'^(%' + _directives + ')(.*)$', + bygroups(Name.Decorator, Text), '#pop'), + (r'\n', Text), + (r'.', Text), + ], + 'changelog': [ + (r'\*.*\n', Generic.Subheading), + (r'^(%' + _directives + ')(.*)$', + bygroups(Name.Decorator, Text), '#pop'), + (r'\n', Text), + (r'.', Text), + ], + 'string': [ + (r'"', String.Double, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + include('interpol'), + (r'.', String.Double), + ], + 'basic': [ + include('macro'), + (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|' + r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|' + r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|' + r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$', + bygroups(Generic.Heading, Punctuation, using(this))), + (r'^%description', Name.Decorator, 'description'), + (r'^%changelog', Name.Decorator, 'changelog'), + (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)), + (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|' + r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)', + Keyword), + include('interpol'), + (r"'.*?'", String.Single), + (r'"', String.Double, 'string'), + (r'.', Text), + ], + 'macro': [ + (r'%define.*\n', Comment.Preproc), + (r'%\{\!\?.*%define.*\}', Comment.Preproc), + (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$', + bygroups(Comment.Preproc, Text)), + ], + 'interpol': [ + (r'%\{?__[a-z_]+\}?', Name.Function), + (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo), + (r'%\{\?\w+\}', Name.Variable), + (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global), + (r'%\{[a-zA-Z]\w+\}', Keyword.Constant), + ] + } diff --git a/pygments/lexers/misc/apl.py b/pygments/lexers/misc/apl.py new file mode 100644 index 00000000..a710721e --- /dev/null +++ b/pygments/lexers/misc/apl.py @@ -0,0 +1,101 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.apl + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for APL. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['APLLexer'] + + +class APLLexer(RegexLexer): + """ + A simple APL lexer. + + .. versionadded:: 2.0 + """ + name = 'APL' + aliases = ['apl'] + filenames = ['*.apl'] + + tokens = { + 'root': [ + # Whitespace + # ========== + (r'\s+', Text), + # + # Comment + # ======= + # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog) + (u'[⍝#].*$', Comment.Single), + # + # Strings + # ======= + (r'\'((\'\')|[^\'])*\'', String.Single), + (r'"(("")|[^"])*"', String.Double), # supported by NGN APL + # + # Punctuation + # =========== + # This token type is used for diamond and parenthesis + # but not for bracket and ; (see below) + (u'[⋄◇()]', Punctuation), + # + # Array indexing + # ============== + # Since this token type is very important in APL, it is not included in + # the punctuation token type but rather in the following one + (r'[\[\];]', String.Regex), + # + # Distinguished names + # =================== + # following IBM APL2 standard + (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function), + # + # Labels + # ====== + # following IBM APL2 standard + # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label), + # + # Variables + # ========= + # following IBM APL2 standard + (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable), + # + # Numbers + # ======= + (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' + u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', + Number), + # + # Operators + # ========== + (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type + (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]', + Operator), + # + # Constant + # ======== + (u'⍬', Name.Constant), + # + # Quad symbol + # =========== + (u'[⎕⍞]', Name.Variable.Global), + # + # Arrows left/right + # ================= + (u'[←→]', Keyword.Declaration), + # + # D-Fn + # ==== + (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo), + (r'[{}]', Keyword.Type), + ], + } diff --git a/pygments/lexers/misc/basic.py b/pygments/lexers/misc/basic.py new file mode 100644 index 00000000..104ca47b --- /dev/null +++ b/pygments/lexers/misc/basic.py @@ -0,0 +1,356 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.basic + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for BASIC like languages (other than VB.net). + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer'] + + +class BlitzMaxLexer(RegexLexer): + """ + For `BlitzMax `_ source code. + + .. versionadded:: 1.4 + """ + + name = 'BlitzMax' + aliases = ['blitzmax', 'bmax'] + filenames = ['*.bmx'] + mimetypes = ['text/x-bmx'] + + bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b' + bmax_sktypes = r'@{1,2}|[!#$%]' + bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b' + bmax_name = r'[a-z_]\w*' + bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)' + r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \ + (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) + bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])' + + flags = re.MULTILINE | re.IGNORECASE + tokens = { + 'root': [ + # Text + (r'[ \t]+', Text), + (r'\.\.\n', Text), # Line continuation + # Comments + (r"'.*?\n", Comment.Single), + (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline), + # Data types + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]*(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-f]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Other + (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' % + (bmax_vopwords), Operator), + (r'[(),.:\[\]]', Punctuation), + (r'(?:#[\w \t]*)', Name.Label), + (r'(?:\?[\w \t]*)', Comment.Preproc), + # Identifiers + (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name), + bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)), + (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' % + (bmax_name, bmax_name), + bygroups(Keyword.Reserved, Text, Keyword.Namespace)), + (bmax_func, bygroups(Name.Function, Text, Keyword.Type, + Operator, Text, Punctuation, Text, + Keyword.Type, Name.Class, Text, + Keyword.Type, Text, Punctuation)), + (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator, + Text, Punctuation, Text, Keyword.Type, + Name.Class, Text, Keyword.Type)), + (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + # Keywords + (r'\b(Ptr)\b', Keyword.Type), + (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant), + (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration), + (words(( + 'TNullMethodException', 'TNullFunctionException', + 'TNullObjectException', 'TArrayBoundsException', + 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception), + (words(( + 'Strict', 'SuperStrict', 'Module', 'ModuleInfo', + 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private', + 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max', + 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen', + 'Framework', 'Include', 'Import', 'Extern', 'EndExtern', + 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod', + 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect', + 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData', + 'RestoreData'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # Final resolve (for variable names and such) + (r'(%s)' % (bmax_name), Name.Variable), + ], + 'string': [ + (r'""', String.Double), + (r'"C?', String.Double, '#pop'), + (r'[^"]+', String.Double), + ], + } + + +class BlitzBasicLexer(RegexLexer): + """ + For `BlitzBasic `_ source code. + + .. versionadded:: 2.0 + """ + + name = 'BlitzBasic' + aliases = ['blitzbasic', 'b3d', 'bplus'] + filenames = ['*.bb', '*.decls'] + mimetypes = ['text/x-bb'] + + bb_sktypes = r'@{1,2}|[#$%]' + bb_name = r'[a-z]\w*' + bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \ + (bb_name, bb_sktypes, bb_name) + + flags = re.MULTILINE | re.IGNORECASE + tokens = { + 'root': [ + # Text + (r'[ \t]+', Text), + # Comments + (r";.*?\n", Comment.Single), + # Data types + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]+(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-f]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Other + (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not', + 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str', + 'First', 'Last', 'Before', 'After'), + prefix=r'\b', suffix=r'\b'), + Operator), + (r'([+\-*/~=<>^])', Operator), + (r'[(),:\[\]\\]', Punctuation), + (r'\.([ \t]*)(%s)' % bb_name, Name.Label), + # Identifiers + (r'\b(New)\b([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Label)), + (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name), + bygroups(Operator, Text, Punctuation, Text, Name.Class)), + (r'\b%s\b([ \t]*)(\()' % bb_var, + bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation, + Text, Name.Class, Text, Punctuation)), + (r'\b(Function)\b([ \t]+)%s' % bb_var, + bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type, + Text, Punctuation, Text, Name.Class)), + (r'\b(Type)([ \t]+)(%s)' % (bb_name), + bygroups(Keyword.Reserved, Text, Name.Class)), + # Keywords + (r'\b(Pi|True|False|Null)\b', Keyword.Constant), + (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration), + (words(( + 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert', + 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', + 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend', + 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', + 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'), + Keyword.Reserved), + # Final resolve (for variable names and such) + # (r'(%s)' % (bb_name), Name.Variable), + (bb_var, bygroups(Name.Variable, Text, Keyword.Type, + Text, Punctuation, Text, Name.Class)), + ], + 'string': [ + (r'""', String.Double), + (r'"C?', String.Double, '#pop'), + (r'[^"]+', String.Double), + ], + } + + +class MonkeyLexer(RegexLexer): + """ + For + `Monkey `_ + source code. + + .. versionadded:: 1.6 + """ + + name = 'Monkey' + aliases = ['monkey'] + filenames = ['*.monkey'] + mimetypes = ['text/x-monkey'] + + name_variable = r'[a-z_]\w*' + name_function = r'[A-Z]\w*' + name_constant = r'[A-Z_][A-Z0-9_]*' + name_class = r'[A-Z]\w*' + name_module = r'[a-z0-9_]*' + + keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)' + # ? == Bool // % == Int // # == Float // $ == String + keyword_type_special = r'[?%#$]' + + flags = re.MULTILINE + + tokens = { + 'root': [ + # Text + (r'\s+', Text), + # Comments + (r"'.*", Comment), + (r'(?i)^#rem\b', Comment.Multiline, 'comment'), + # preprocessor directives + (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc), + # preprocessor variable (any line starting with '#' that is not a directive) + (r'^#', Comment.Preproc, 'variables'), + # String + ('"', String.Double, 'string'), + # Numbers + (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), + (r'\.[0-9]+(?!\.)', Number.Float), + (r'[0-9]+', Number.Integer), + (r'\$[0-9a-fA-Z]+', Number.Hex), + (r'\%[10]+', Number.Bin), + # Native data types + (r'\b%s\b' % keyword_type, Keyword.Type), + # Exception handling + (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved), + (r'Throwable', Name.Exception), + # Builtins + (r'(?i)\b(?:Null|True|False)\b', Name.Builtin), + (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo), + (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant), + # Keywords + (r'(?i)^(Import)(\s+)(.*)(\n)', + bygroups(Keyword.Namespace, Text, Name.Namespace, Text)), + (r'(?i)^Strict\b.*\n', Keyword.Reserved), + (r'(?i)(Const|Local|Global|Field)(\s+)', + bygroups(Keyword.Declaration, Text), 'variables'), + (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)', + bygroups(Keyword.Reserved, Text), 'classname'), + (r'(?i)(Function|Method)(\s+)', + bygroups(Keyword.Reserved, Text), 'funcname'), + (r'(?i)(?:End|Return|Public|Private|Extern|Property|' + r'Final|Abstract)\b', Keyword.Reserved), + # Flow Control stuff + (r'(?i)(?:If|Then|Else|ElseIf|EndIf|' + r'Select|Case|Default|' + r'While|Wend|' + r'Repeat|Until|Forever|' + r'For|To|Until|Step|EachIn|Next|' + r'Exit|Continue)\s+', Keyword.Reserved), + # not used yet + (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved), + # Array + (r'[\[\]]', Punctuation), + # Other + (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator), + (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word), + (r'[\(\){}!#,.:]', Punctuation), + # catch the rest + (r'%s\b' % name_constant, Name.Constant), + (r'%s\b' % name_function, Name.Function), + (r'%s\b' % name_variable, Name.Variable), + ], + 'funcname': [ + (r'(?i)%s\b' % name_function, Name.Function), + (r':', Punctuation, 'classname'), + (r'\s+', Text), + (r'\(', Punctuation, 'variables'), + (r'\)', Punctuation, '#pop') + ], + 'classname': [ + (r'%s\.' % name_module, Name.Namespace), + (r'%s\b' % keyword_type, Keyword.Type), + (r'%s\b' % name_class, Name.Class), + # array (of given size) + (r'(\[)(\s*)(\d*)(\s*)(\])', + bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)), + # generics + (r'\s+(?!<)', Text, '#pop'), + (r'<', Punctuation, '#push'), + (r'>', Punctuation, '#pop'), + (r'\n', Text, '#pop'), + default('#pop') + ], + 'variables': [ + (r'%s\b' % name_constant, Name.Constant), + (r'%s\b' % name_variable, Name.Variable), + (r'%s' % keyword_type_special, Keyword.Type), + (r'\s+', Text), + (r':', Punctuation, 'classname'), + (r',', Punctuation, '#push'), + default('#pop') + ], + 'string': [ + (r'[^"~]+', String.Double), + (r'~q|~n|~r|~t|~z|~~', String.Escape), + (r'"', String.Double, '#pop'), + ], + 'comment': [ + (r'(?i)^#rem.*?', Comment.Multiline, "#push"), + (r'(?i)^#end.*?', Comment.Multiline, "#pop"), + (r'\n', Comment.Multiline), + (r'.+', Comment.Multiline), + ], + } + + +class CbmBasicV2Lexer(RegexLexer): + """ + For CBM BASIC V2 sources. + + .. versionadded:: 1.6 + """ + name = 'CBM BASIC V2' + aliases = ['cbmbas'] + filenames = ['*.bas'] + + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'rem.*\n', Comment.Single), + (r'\s+', Text), + (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont' + r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?' + r'|list|clr|cmd|open|close|get#?', Keyword.Reserved), + (r'data|restore|dim|let|def|fn', Keyword.Declaration), + (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn' + r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin), + (r'[-+*/^<>=]', Operator), + (r'not|and|or', Operator.Word), + (r'"[^"\n]*.', String), + (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float), + (r'[\(\),:;]', Punctuation), + (r'\w+[$%]?', Name), + ] + } + + def analyse_text(self, text): + # if it starts with a line number, it shouldn't be a "modern" Basic + # like VB.net + if re.match(r'\d+', text): + return True diff --git a/pygments/lexers/misc/blitz.py b/pygments/lexers/misc/blitz.py deleted file mode 100644 index 9e324adb..00000000 --- a/pygments/lexers/misc/blitz.py +++ /dev/null @@ -1,318 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.misc.blitz - ~~~~~~~~~~~~~~~~~~~~~~~~~~ - - Lexers for blitzbasic.com languages. - - :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from pygments.lexer import RegexLexer, bygroups, default, words -from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation - -__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer'] - - -class BlitzMaxLexer(RegexLexer): - """ - For `BlitzMax `_ source code. - - .. versionadded:: 1.4 - """ - - name = 'BlitzMax' - aliases = ['blitzmax', 'bmax'] - filenames = ['*.bmx'] - mimetypes = ['text/x-bmx'] - - bmax_vopwords = r'\b(Shl|Shr|Sar|Mod)\b' - bmax_sktypes = r'@{1,2}|[!#$%]' - bmax_lktypes = r'\b(Int|Byte|Short|Float|Double|Long)\b' - bmax_name = r'[a-z_]\w*' - bmax_var = (r'(%s)(?:(?:([ \t]*)(%s)|([ \t]*:[ \t]*\b(?:Shl|Shr|Sar|Mod)\b)' - r'|([ \t]*)(:)([ \t]*)(?:%s|(%s)))(?:([ \t]*)(Ptr))?)') % \ - (bmax_name, bmax_sktypes, bmax_lktypes, bmax_name) - bmax_func = bmax_var + r'?((?:[ \t]|\.\.\n)*)([(])' - - flags = re.MULTILINE | re.IGNORECASE - tokens = { - 'root': [ - # Text - (r'[ \t]+', Text), - (r'\.\.\n', Text), # Line continuation - # Comments - (r"'.*?\n", Comment.Single), - (r'([ \t]*)\bRem\n(\n|.)*?\s*\bEnd([ \t]*)Rem', Comment.Multiline), - # Data types - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]*(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-f]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Other - (r'(?:(?:(:)?([ \t]*)(:?%s|([+\-*/&|~]))|Or|And|Not|[=<>^]))' % - (bmax_vopwords), Operator), - (r'[(),.:\[\]]', Punctuation), - (r'(?:#[\w \t]*)', Name.Label), - (r'(?:\?[\w \t]*)', Comment.Preproc), - # Identifiers - (r'\b(New)\b([ \t]?)([(]?)(%s)' % (bmax_name), - bygroups(Keyword.Reserved, Text, Punctuation, Name.Class)), - (r'\b(Import|Framework|Module)([ \t]+)(%s\.%s)' % - (bmax_name, bmax_name), - bygroups(Keyword.Reserved, Text, Keyword.Namespace)), - (bmax_func, bygroups(Name.Function, Text, Keyword.Type, - Operator, Text, Punctuation, Text, - Keyword.Type, Name.Class, Text, - Keyword.Type, Text, Punctuation)), - (bmax_var, bygroups(Name.Variable, Text, Keyword.Type, Operator, - Text, Punctuation, Text, Keyword.Type, - Name.Class, Text, Keyword.Type)), - (r'\b(Type|Extends)([ \t]+)(%s)' % (bmax_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - # Keywords - (r'\b(Ptr)\b', Keyword.Type), - (r'\b(Pi|True|False|Null|Self|Super)\b', Keyword.Constant), - (r'\b(Local|Global|Const|Field)\b', Keyword.Declaration), - (words(( - 'TNullMethodException', 'TNullFunctionException', - 'TNullObjectException', 'TArrayBoundsException', - 'TRuntimeException'), prefix=r'\b', suffix=r'\b'), Name.Exception), - (words(( - 'Strict', 'SuperStrict', 'Module', 'ModuleInfo', - 'End', 'Return', 'Continue', 'Exit', 'Public', 'Private', - 'Var', 'VarPtr', 'Chr', 'Len', 'Asc', 'SizeOf', 'Sgn', 'Abs', 'Min', 'Max', - 'New', 'Release', 'Delete', 'Incbin', 'IncbinPtr', 'IncbinLen', - 'Framework', 'Include', 'Import', 'Extern', 'EndExtern', - 'Function', 'EndFunction', 'Type', 'EndType', 'Extends', 'Method', 'EndMethod', - 'Abstract', 'Final', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', - 'For', 'To', 'Next', 'Step', 'EachIn', 'While', 'Wend', 'EndWhile', - 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', 'EndSelect', - 'Try', 'Catch', 'EndTry', 'Throw', 'Assert', 'Goto', 'DefData', 'ReadData', - 'RestoreData'), prefix=r'\b', suffix=r'\b'), - Keyword.Reserved), - # Final resolve (for variable names and such) - (r'(%s)' % (bmax_name), Name.Variable), - ], - 'string': [ - (r'""', String.Double), - (r'"C?', String.Double, '#pop'), - (r'[^"]+', String.Double), - ], - } - - -class BlitzBasicLexer(RegexLexer): - """ - For `BlitzBasic `_ source code. - - .. versionadded:: 2.0 - """ - - name = 'BlitzBasic' - aliases = ['blitzbasic', 'b3d', 'bplus'] - filenames = ['*.bb', '*.decls'] - mimetypes = ['text/x-bb'] - - bb_sktypes = r'@{1,2}|[#$%]' - bb_name = r'[a-z]\w*' - bb_var = (r'(%s)(?:([ \t]*)(%s)|([ \t]*)([.])([ \t]*)(?:(%s)))?') % \ - (bb_name, bb_sktypes, bb_name) - - flags = re.MULTILINE | re.IGNORECASE - tokens = { - 'root': [ - # Text - (r'[ \t]+', Text), - # Comments - (r";.*?\n", Comment.Single), - # Data types - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]+(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-f]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Other - (words(('Shl', 'Shr', 'Sar', 'Mod', 'Or', 'And', 'Not', - 'Abs', 'Sgn', 'Handle', 'Int', 'Float', 'Str', - 'First', 'Last', 'Before', 'After'), - prefix=r'\b', suffix=r'\b'), - Operator), - (r'([+\-*/~=<>^])', Operator), - (r'[(),:\[\]\\]', Punctuation), - (r'\.([ \t]*)(%s)' % bb_name, Name.Label), - # Identifiers - (r'\b(New)\b([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - (r'\b(Gosub|Goto)\b([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Label)), - (r'\b(Object)\b([ \t]*)([.])([ \t]*)(%s)\b' % (bb_name), - bygroups(Operator, Text, Punctuation, Text, Name.Class)), - (r'\b%s\b([ \t]*)(\()' % bb_var, - bygroups(Name.Function, Text, Keyword.Type, Text, Punctuation, - Text, Name.Class, Text, Punctuation)), - (r'\b(Function)\b([ \t]+)%s' % bb_var, - bygroups(Keyword.Reserved, Text, Name.Function, Text, Keyword.Type, - Text, Punctuation, Text, Name.Class)), - (r'\b(Type)([ \t]+)(%s)' % (bb_name), - bygroups(Keyword.Reserved, Text, Name.Class)), - # Keywords - (r'\b(Pi|True|False|Null)\b', Keyword.Constant), - (r'\b(Local|Global|Const|Field|Dim)\b', Keyword.Declaration), - (words(( - 'End', 'Return', 'Exit', 'Chr', 'Len', 'Asc', 'New', 'Delete', 'Insert', - 'Include', 'Function', 'Type', 'If', 'Then', 'Else', 'ElseIf', 'EndIf', - 'For', 'To', 'Next', 'Step', 'Each', 'While', 'Wend', - 'Repeat', 'Until', 'Forever', 'Select', 'Case', 'Default', - 'Goto', 'Gosub', 'Data', 'Read', 'Restore'), prefix=r'\b', suffix=r'\b'), - Keyword.Reserved), - # Final resolve (for variable names and such) - # (r'(%s)' % (bb_name), Name.Variable), - (bb_var, bygroups(Name.Variable, Text, Keyword.Type, - Text, Punctuation, Text, Name.Class)), - ], - 'string': [ - (r'""', String.Double), - (r'"C?', String.Double, '#pop'), - (r'[^"]+', String.Double), - ], - } - - -class MonkeyLexer(RegexLexer): - """ - For - `Monkey `_ - source code. - - .. versionadded:: 1.6 - """ - - name = 'Monkey' - aliases = ['monkey'] - filenames = ['*.monkey'] - mimetypes = ['text/x-monkey'] - - name_variable = r'[a-z_]\w*' - name_function = r'[A-Z]\w*' - name_constant = r'[A-Z_][A-Z0-9_]*' - name_class = r'[A-Z]\w*' - name_module = r'[a-z0-9_]*' - - keyword_type = r'(?:Int|Float|String|Bool|Object|Array|Void)' - # ? == Bool // % == Int // # == Float // $ == String - keyword_type_special = r'[?%#$]' - - flags = re.MULTILINE - - tokens = { - 'root': [ - # Text - (r'\s+', Text), - # Comments - (r"'.*", Comment), - (r'(?i)^#rem\b', Comment.Multiline, 'comment'), - # preprocessor directives - (r'(?i)^(?:#If|#ElseIf|#Else|#EndIf|#End|#Print|#Error)\b', Comment.Preproc), - # preprocessor variable (any line starting with '#' that is not a directive) - (r'^#', Comment.Preproc, 'variables'), - # String - ('"', String.Double, 'string'), - # Numbers - (r'[0-9]+\.[0-9]*(?!\.)', Number.Float), - (r'\.[0-9]+(?!\.)', Number.Float), - (r'[0-9]+', Number.Integer), - (r'\$[0-9a-fA-Z]+', Number.Hex), - (r'\%[10]+', Number.Bin), - # Native data types - (r'\b%s\b' % keyword_type, Keyword.Type), - # Exception handling - (r'(?i)\b(?:Try|Catch|Throw)\b', Keyword.Reserved), - (r'Throwable', Name.Exception), - # Builtins - (r'(?i)\b(?:Null|True|False)\b', Name.Builtin), - (r'(?i)\b(?:Self|Super)\b', Name.Builtin.Pseudo), - (r'\b(?:HOST|LANG|TARGET|CONFIG)\b', Name.Constant), - # Keywords - (r'(?i)^(Import)(\s+)(.*)(\n)', - bygroups(Keyword.Namespace, Text, Name.Namespace, Text)), - (r'(?i)^Strict\b.*\n', Keyword.Reserved), - (r'(?i)(Const|Local|Global|Field)(\s+)', - bygroups(Keyword.Declaration, Text), 'variables'), - (r'(?i)(New|Class|Interface|Extends|Implements)(\s+)', - bygroups(Keyword.Reserved, Text), 'classname'), - (r'(?i)(Function|Method)(\s+)', - bygroups(Keyword.Reserved, Text), 'funcname'), - (r'(?i)(?:End|Return|Public|Private|Extern|Property|' - r'Final|Abstract)\b', Keyword.Reserved), - # Flow Control stuff - (r'(?i)(?:If|Then|Else|ElseIf|EndIf|' - r'Select|Case|Default|' - r'While|Wend|' - r'Repeat|Until|Forever|' - r'For|To|Until|Step|EachIn|Next|' - r'Exit|Continue)\s+', Keyword.Reserved), - # not used yet - (r'(?i)\b(?:Module|Inline)\b', Keyword.Reserved), - # Array - (r'[\[\]]', Punctuation), - # Other - (r'<=|>=|<>|\*=|/=|\+=|-=|&=|~=|\|=|[-&*/^+=<>|~]', Operator), - (r'(?i)(?:Not|Mod|Shl|Shr|And|Or)', Operator.Word), - (r'[\(\){}!#,.:]', Punctuation), - # catch the rest - (r'%s\b' % name_constant, Name.Constant), - (r'%s\b' % name_function, Name.Function), - (r'%s\b' % name_variable, Name.Variable), - ], - 'funcname': [ - (r'(?i)%s\b' % name_function, Name.Function), - (r':', Punctuation, 'classname'), - (r'\s+', Text), - (r'\(', Punctuation, 'variables'), - (r'\)', Punctuation, '#pop') - ], - 'classname': [ - (r'%s\.' % name_module, Name.Namespace), - (r'%s\b' % keyword_type, Keyword.Type), - (r'%s\b' % name_class, Name.Class), - # array (of given size) - (r'(\[)(\s*)(\d*)(\s*)(\])', - bygroups(Punctuation, Text, Number.Integer, Text, Punctuation)), - # generics - (r'\s+(?!<)', Text, '#pop'), - (r'<', Punctuation, '#push'), - (r'>', Punctuation, '#pop'), - (r'\n', Text, '#pop'), - default('#pop') - ], - 'variables': [ - (r'%s\b' % name_constant, Name.Constant), - (r'%s\b' % name_variable, Name.Variable), - (r'%s' % keyword_type_special, Keyword.Type), - (r'\s+', Text), - (r':', Punctuation, 'classname'), - (r',', Punctuation, '#push'), - default('#pop') - ], - 'string': [ - (r'[^"~]+', String.Double), - (r'~q|~n|~r|~t|~z|~~', String.Escape), - (r'"', String.Double, '#pop'), - ], - 'comment': [ - (r'(?i)^#rem.*?', Comment.Multiline, "#push"), - (r'(?i)^#end.*?', Comment.Multiline, "#pop"), - (r'\n', Comment.Multiline), - (r'.+', Comment.Multiline), - ], - } diff --git a/pygments/lexers/misc/pawn.py b/pygments/lexers/misc/pawn.py new file mode 100644 index 00000000..79161caf --- /dev/null +++ b/pygments/lexers/misc/pawn.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.pawn + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the Pawn languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error +from pygments.util import get_bool_opt + +__all__ = ['SourcePawnLexer', 'PawnLexer'] + + +class SourcePawnLexer(RegexLexer): + """ + For SourcePawn source code with preprocessor directives. + + .. versionadded:: 1.6 + """ + name = 'SourcePawn' + aliases = ['sp'] + filenames = ['*.sp'] + mimetypes = ['text/x-sourcepawn'] + + #: optional Comment or Whitespace + _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+' + + tokens = { + 'root': [ + # preprocessor directives: without whitespace + ('^#if\s+0', Comment.Preproc, 'if0'), + ('^#', Comment.Preproc, 'macro'), + # or with whitespace + ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'), + ('^' + _ws + '#', Comment.Preproc, 'macro'), + (r'\n', Text), + (r'\s+', Text), + (r'\\\n', Text), # line continuation + (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), + (r'[{}]', Punctuation), + (r'L?"', String, 'string'), + (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), + (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), + (r'0[0-7]+[LlUu]*', Number.Oct), + (r'\d+[LlUu]*', Number.Integer), + (r'\*/', Error), + (r'[~!%^&*+=|?:<>/-]', Operator), + (r'[()\[\],.;]', Punctuation), + (r'(case|const|continue|native|' + r'default|else|enum|for|if|new|operator|' + r'public|return|sizeof|static|decl|struct|switch)\b', Keyword), + (r'(bool|Float)\b', Keyword.Type), + (r'(true|false)\b', Keyword.Constant), + ('[a-zA-Z_]\w*', Name), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'macro': [ + (r'[^/\n]+', Comment.Preproc), + (r'/\*(.|\n)*?\*/', Comment.Multiline), + (r'//.*?\n', Comment.Single, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Comment.Preproc, '#pop'), + ], + 'if0': [ + (r'^\s*#if.*?(?/-]', Operator), + (r'[()\[\],.;]', Punctuation), + (r'(switch|case|default|const|new|static|char|continue|break|' + r'if|else|for|while|do|operator|enum|' + r'public|return|sizeof|tagof|state|goto)\b', Keyword), + (r'(bool|Float)\b', Keyword.Type), + (r'(true|false)\b', Keyword.Constant), + ('[a-zA-Z_]\w*', Name), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), + (r'[^\\"\n]+', String), # all other characters + (r'\\\n', String), # line continuation + (r'\\', String), # stray backslash + ], + 'macro': [ + (r'[^/\n]+', Comment.Preproc), + (r'/\*(.|\n)*?\*/', Comment.Multiline), + (r'//.*?\n', Comment.Single, '#pop'), + (r'/', Comment.Preproc), + (r'(?<=\\)\n', Comment.Preproc), + (r'\n', Comment.Preproc, '#pop'), + ], + 'if0': [ + (r'^\s*#if.*?(?`_ lexer. + + .. versionadded:: 2.0 + """ + name = 'Red' + aliases = ['red', 'red/system'] + filenames = ['*.red', '*.reds'] + mimetypes = ['text/x-red', 'text/x-red-system'] + + flags = re.IGNORECASE | re.MULTILINE + + escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)' + + def word_callback(lexer, match): + word = match.group() + + if re.match(".*:$", word): + yield match.start(), Generic.Subheading, word + elif re.match( + r'(if|unless|either|any|all|while|until|loop|repeat|' + r'foreach|forall|func|function|does|has|switch|' + r'case|reduce|compose|get|set|print|prin|equal\?|' + r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|' + r'greater-or-equal\?|same\?|not|type\?|stats|' + r'bind|union|replace|charset|routine)$', word): + yield match.start(), Name.Builtin, word + elif re.match( + r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|' + r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|' + r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|' + r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|' + r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|' + r'update|write)$', word): + yield match.start(), Name.Function, word + elif re.match( + r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|' + r'none|crlf|dot|null-byte)$', word): + yield match.start(), Name.Builtin.Pseudo, word + elif re.match( + r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|' + r'#switch|#default|#get-definition)$', word): + yield match.start(), Keyword.Namespace, word + elif re.match( + r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|' + r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|' + r'quote|forever)$', word): + yield match.start(), Name.Exception, word + elif re.match( + r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|' + r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|' + r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|' + r'any-struct\?|none\?|word\?|any-series\?)$', word): + yield match.start(), Keyword, word + elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word): + yield match.start(), Keyword.Namespace, word + elif re.match("to-.*", word): + yield match.start(), Keyword, word + elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|===|<>|<|>|<=|>=|<<|>>|<<<|>>>|%|-\*\*)$', word): + yield match.start(), Operator, word + elif re.match(".*\!$", word): + yield match.start(), Keyword.Type, word + elif re.match("'.*", word): + yield match.start(), Name.Variable.Instance, word # lit-word + elif re.match("#.*", word): + yield match.start(), Name.Label, word # issue + elif re.match("%.*", word): + yield match.start(), Name.Decorator, word # file + elif re.match(":.*", word): + yield match.start(), Generic.Subheading, word # get-word + else: + yield match.start(), Name.Variable, word + + tokens = { + 'root': [ + (r'[^R]+', Comment), + (r'Red/System\s+\[', Generic.Strong, 'script'), + (r'Red\s+\[', Generic.Strong, 'script'), + (r'R', Comment) + ], + 'script': [ + (r'\s+', Text), + (r'#"', String.Char, 'char'), + (r'#{[0-9a-f\s]*}', Number.Hex), + (r'2#{', Number.Hex, 'bin2'), + (r'64#{[0-9a-z+/=\s]*}', Number.Hex), + (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}""\(\)]))', + bygroups(Number.Hex, Name.Variable, Whitespace)), + (r'"', String, 'string'), + (r'{', String, 'string2'), + (r';#+.*\n', Comment.Special), + (r';\*+.*\n', Comment.Preproc), + (r';.*\n', Comment), + (r'%"', Name.Decorator, 'stringFile'), + (r'%[^(\^{^")\s\[\]]+', Name.Decorator), + (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money + (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time + (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?' + r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date + (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple + (r'\d+[xX]\d+', Keyword.Constant), # pair + (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float), + (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float), + (r'[+-]?\d+(\'\d+)?', Number), + (r'[\[\]\(\)]', Generic.Strong), + (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url + (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url + (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email + (r'comment\s"', Comment, 'commentString1'), + (r'comment\s{', Comment, 'commentString2'), + (r'comment\s\[', Comment, 'commentBlock'), + (r'comment\s[^(\s{\"\[]+', Comment), + (r'/[^(\^{^")\s/[\]]*', Name.Attribute), + (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback), + (r'<[\w:.-]*>', Name.Tag), + (r'<[^(<>\s")]+', Name.Tag, 'tag'), + (r'([^(\^{^")\s]+)', Text), + ], + 'string': [ + (r'[^(\^")]+', String), + (escape_re, String.Escape), + (r'[\(|\)]+', String), + (r'\^.', String.Escape), + (r'"', String, '#pop'), + ], + 'string2': [ + (r'[^(\^{^})]+', String), + (escape_re, String.Escape), + (r'[\(|\)]+', String), + (r'\^.', String.Escape), + (r'{', String, '#push'), + (r'}', String, '#pop'), + ], + 'stringFile': [ + (r'[^(\^")]+', Name.Decorator), + (escape_re, Name.Decorator), + (r'\^.', Name.Decorator), + (r'"', Name.Decorator, '#pop'), + ], + 'char': [ + (escape_re + '"', String.Char, '#pop'), + (r'\^."', String.Char, '#pop'), + (r'."', String.Char, '#pop'), + ], + 'tag': [ + (escape_re, Name.Tag), + (r'"', Name.Tag, 'tagString'), + (r'[^(<>\r\n")]+', Name.Tag), + (r'>', Name.Tag, '#pop'), + ], + 'tagString': [ + (r'[^(\^")]+', Name.Tag), + (escape_re, Name.Tag), + (r'[\(|\)]+', Name.Tag), + (r'\^.', Name.Tag), + (r'"', Name.Tag, '#pop'), + ], + 'tuple': [ + (r'(\d+\.)+', Keyword.Constant), + (r'\d+', Keyword.Constant, '#pop'), + ], + 'bin2': [ + (r'\s+', Number.Hex), + (r'([0-1]\s*){8}', Number.Hex), + (r'}', Number.Hex, '#pop'), + ], + 'commentString1': [ + (r'[^(\^")]+', Comment), + (escape_re, Comment), + (r'[\(|\)]+', Comment), + (r'\^.', Comment), + (r'"', Comment, '#pop'), + ], + 'commentString2': [ + (r'[^(\^{^})]+', Comment), + (escape_re, Comment), + (r'[\(|\)]+', Comment), + (r'\^.', Comment), + (r'{', Comment, '#push'), + (r'}', Comment, '#pop'), + ], + 'commentBlock': [ + (r'\[', Comment, '#push'), + (r'\]', Comment, '#pop'), + (r'"', Comment, "commentString1"), + (r'{', Comment, "commentString2"), + (r'[^(\[\]\"{)]+', Comment), + ], + } diff --git a/pygments/lexers/misc/smalltalk.py b/pygments/lexers/misc/smalltalk.py new file mode 100644 index 00000000..6467a16d --- /dev/null +++ b/pygments/lexers/misc/smalltalk.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.smalltalk + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Smalltalk and related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups, default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['SmalltalkLexer', 'NewspeakLexer'] + + +class SmalltalkLexer(RegexLexer): + """ + For `Smalltalk `_ syntax. + Contributed by Stefan Matthias Aust. + Rewritten by Nils Winter. + + .. versionadded:: 0.10 + """ + name = 'Smalltalk' + filenames = ['*.st'] + aliases = ['smalltalk', 'squeak', 'st'] + mimetypes = ['text/x-smalltalk'] + + tokens = { + 'root': [ + (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)), + include('squeak fileout'), + include('whitespaces'), + include('method definition'), + (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)), + include('objects'), + (r'\^|\:=|\_', Operator), + # temporaries + (r'[\]({}.;!]', Text), + ], + 'method definition': [ + # Not perfect can't allow whitespaces at the beginning and the + # without breaking everything + (r'([a-zA-Z]+\w*:)(\s*)(\w+)', + bygroups(Name.Function, Text, Name.Variable)), + (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)), + (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$', + bygroups(Name.Function, Text, Name.Variable, Text)), + ], + 'blockvariables': [ + include('whitespaces'), + (r'(:)(\s*)(\w+)', + bygroups(Operator, Text, Name.Variable)), + (r'\|', Operator, '#pop'), + default('#pop'), # else pop + ], + 'literals': [ + (r"'(''|[^'])*'", String, 'afterobject'), + (r'\$.', String.Char, 'afterobject'), + (r'#\(', String.Symbol, 'parenth'), + (r'\)', Text, 'afterobject'), + (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'), + ], + '_parenth_helper': [ + include('whitespaces'), + (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number), + (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol), + # literals + (r"'(''|[^'])*'", String), + (r'\$.', String.Char), + (r'#*\(', String.Symbol, 'inner_parenth'), + ], + 'parenth': [ + # This state is a bit tricky since + # we can't just pop this state + (r'\)', String.Symbol, ('root', 'afterobject')), + include('_parenth_helper'), + ], + 'inner_parenth': [ + (r'\)', String.Symbol, '#pop'), + include('_parenth_helper'), + ], + 'whitespaces': [ + # skip whitespace and comments + (r'\s+', Text), + (r'"(""|[^"])*"', Comment), + ], + 'objects': [ + (r'\[', Text, 'blockvariables'), + (r'\]', Text, 'afterobject'), + (r'\b(self|super|true|false|nil|thisContext)\b', + Name.Builtin.Pseudo, 'afterobject'), + (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'), + (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'), + (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)', + String.Symbol, 'afterobject'), + include('literals'), + ], + 'afterobject': [ + (r'! !$', Keyword, '#pop'), # squeak chunk delimiter + include('whitespaces'), + (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)', + Name.Builtin, '#pop'), + (r'\b(new\b(?!:))', Name.Builtin), + (r'\:=|\_', Operator, '#pop'), + (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'), + (r'\b[a-zA-Z]+\w*', Name.Function), + (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'), + (r'\.', Punctuation, '#pop'), + (r';', Punctuation), + (r'[\])}]', Text), + (r'[\[({]', Text, '#pop'), + ], + 'squeak fileout': [ + # Squeak fileout format (optional) + (r'^"(""|[^"])*"!', Keyword), + (r"^'(''|[^'])*'!", Keyword), + (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)', + bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)), + (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)", + bygroups(Keyword, Name.Class, Keyword, String, Keyword)), + (r'^(\w+)( subclass: )(#\w+)' + r'(\s+instanceVariableNames: )(.*?)' + r'(\s+classVariableNames: )(.*?)' + r'(\s+poolDictionaries: )(.*?)' + r'(\s+category: )(.*?)(!)', + bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword, + String, Keyword, String, Keyword, String, Keyword)), + (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)', + bygroups(Name.Class, Keyword, String, Keyword)), + (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)), + (r'! !$', Keyword), + ], + } + + +class NewspeakLexer(RegexLexer): + """ + For `Newspeak ` syntax. + + .. versionadded:: 1.1 + """ + name = 'Newspeak' + filenames = ['*.ns2'] + aliases = ['newspeak', ] + mimetypes = ['text/x-newspeak'] + + tokens = { + 'root': [ + (r'\b(Newsqueak2)\b', Keyword.Declaration), + (r"'[^']*'", String), + (r'\b(class)(\s+)(\w+)(\s*)', + bygroups(Keyword.Declaration, Text, Name.Class, Text)), + (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b', + Keyword), + (r'(\w+\:)(\s*)([a-zA-Z_]\w+)', + bygroups(Name.Function, Text, Name.Variable)), + (r'(\w+)(\s*)(=)', + bygroups(Name.Attribute, Text, Operator)), + (r'<\w+>', Comment.Special), + include('expressionstat'), + include('whitespace') + ], + + 'expressionstat': [ + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'\d+', Number.Integer), + (r':\w+', Name.Variable), + (r'(\w+)(::)', bygroups(Name.Variable, Operator)), + (r'\w+:', Name.Function), + (r'\w+', Name.Variable), + (r'\(|\)', Punctuation), + (r'\[|\]', Punctuation), + (r'\{|\}', Punctuation), + + (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator), + (r'\.|;', Punctuation), + include('whitespace'), + include('literals'), + ], + 'literals': [ + (r'\$.', String), + (r"'[^']*'", String), + (r"#'[^']*'", String.Symbol), + (r"#\w+:?", String.Symbol), + (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol) + ], + 'whitespace': [ + (r'\s+', Text), + (r'"[^"]*"', Comment) + ], + } diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index 834f0b27..daf204e5 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -11,7 +11,7 @@ import re from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \ - default, using, this, combined + default, using, this, combined, words from pygments.token import Generic, Comment, String, Text, Number, Keyword, Name, \ Error, Operator, Punctuation, Literal, Whitespace @@ -21,26 +21,27 @@ from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \ TcshLexer from pygments.lexers.robotframework import RobotFrameworkLexer from pygments.lexers.testing import GherkinLexer -from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer +from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer from pygments.lexers.prolog import LogtalkLexer from pygments.lexers.misc.snobol import SnobolLexer from pygments.lexers.misc.rebol import RebolLexer from pygments.lexers.configs import KconfigLexer from pygments.lexers.modeling import ModelicaLexer -from pygments.lexers.scripting import AppleScriptLexer +from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \ AsymptoteLexer, PovrayLexer -from pygments.lexers.business import ABAPLexer, OpenEdgeLexer +from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \ + GoodDataCLLexer, MaqlLexer +from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer +from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \ + MscgenLexer, VGLLexer +from pygments.lexers.misc.basic import CbmBasicV2Lexer +from pygments.lexers.misc.pawn import SourcePawnLexer, PawnLexer +from pygments.lexers.installers import NSISLexer, RPMSpecLexer +from pygments.lexers.misc.smalltalk import SmalltalkLexer, NewspeakLexer - -__all__ = ['RedcodeLexer', 'MOOCodeLexer', 'SmalltalkLexer', 'NewspeakLexer', - 'AutohotkeyLexer', 'GoodDataCLLexer', 'MaqlLexer', 'ProtoBufLexer', - 'HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'ECLLexer', - 'UrbiscriptLexer', 'BroLexer', 'MscgenLexer', 'VGLLexer', - 'SourcePawnLexer', 'PuppetLexer', 'NSISLexer', 'RPMSpecLexer', - 'CbmBasicV2Lexer', 'AutoItLexer', 'RexxLexer', 'APLLexer', - 'AmbientTalkLexer', 'PawnLexer', 'RslLexer', 'PanLexer', 'RedLexer', - 'AlloyLexer'] +__all__ = ['HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'ECLLexer', + 'UrbiscriptLexer', 'AmbientTalkLexer', 'PanLexer'] class ECLLexer(RegexLexer): @@ -97,43 +98,50 @@ class ECLLexer(RegexLexer): bygroups(Keyword.Type, Text)), ], 'keywords': [ - (r'(APPLY|ASSERT|BUILD|BUILDINDEX|EVALUATE|FAIL|KEYDIFF|KEYPATCH|' - r'LOADXML|NOTHOR|NOTIFY|OUTPUT|PARALLEL|SEQUENTIAL|SOAPCALL|WAIT' - r'CHECKPOINT|DEPRECATED|FAILCODE|FAILMESSAGE|FAILURE|GLOBAL|' - r'INDEPENDENT|ONWARNING|PERSIST|PRIORITY|RECOVERY|STORED|SUCCESS|' - r'WAIT|WHEN)\b', Keyword.Reserved), + (words(( + 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL', + 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT', + 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED', + 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT', + 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS', + 'WAIT', 'WHEN'), suffix=r'\b'), + Keyword.Reserved), # These are classed differently, check later - (r'(ALL|AND|ANY|AS|ATMOST|BEFORE|BEGINC\+\+|BEST|BETWEEN|CASE|CONST|' - r'COUNTER|CSV|DESCEND|ENCRYPT|ENDC\+\+|ENDMACRO|EXCEPT|EXCLUSIVE|' - r'EXPIRE|EXPORT|EXTEND|FALSE|FEW|FIRST|FLAT|FULL|FUNCTION|GROUP|' - r'HEADER|HEADING|HOLE|IFBLOCK|IMPORT|IN|JOINED|KEEP|KEYED|LAST|' - r'LEFT|LIMIT|LOAD|LOCAL|LOCALE|LOOKUP|MACRO|MANY|MAXCOUNT|' - r'MAXLENGTH|MIN SKEW|MODULE|INTERFACE|NAMED|NOCASE|NOROOT|NOSCAN|' - r'NOSORT|NOT|OF|ONLY|OPT|OR|OUTER|OVERWRITE|PACKED|PARTITION|' - r'PENALTY|PHYSICALLENGTH|PIPE|QUOTE|RELATIONSHIP|REPEAT|RETURN|' - r'RIGHT|SCAN|SELF|SEPARATOR|SERVICE|SHARED|SKEW|SKIP|SQL|STORE|' - r'TERMINATOR|THOR|THRESHOLD|TOKEN|TRANSFORM|TRIM|TRUE|TYPE|' - r'UNICODEORDER|UNSORTED|VALIDATE|VIRTUAL|WHOLE|WILD|WITHIN|XML|' - r'XPATH|__COMPRESSED__)\b', Keyword.Reserved), + (words(( + 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE', 'CONST', + 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT', 'EXCLUSIVE', + 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL', 'FUNCTION', 'GROUP', + 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED', 'KEEP', 'KEYED', 'LAST', + 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO', 'MANY', 'MAXCOUNT', + 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE', 'NOROOT', 'NOSCAN', + 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE', 'PACKED', 'PARTITION', + 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP', 'REPEAT', 'RETURN', + 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW', 'SKIP', 'SQL', 'STORE', + 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM', 'TRUE', 'TYPE', + 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD', 'WITHIN', 'XML', + 'XPATH', '__COMPRESSED__'), suffix=r'\b'), + Keyword.Reserved), ], 'functions': [ - (r'(ABS|ACOS|ALLNODES|ASCII|ASIN|ASSTRING|ATAN|ATAN2|AVE|CASE|' - r'CHOOSE|CHOOSEN|CHOOSESETS|CLUSTERSIZE|COMBINE|CORRELATION|COS|' - r'COSH|COUNT|COVARIANCE|CRON|DATASET|DEDUP|DEFINE|DENORMALIZE|' - r'DISTRIBUTE|DISTRIBUTED|DISTRIBUTION|EBCDIC|ENTH|ERROR|EVALUATE|' - r'EVENT|EVENTEXTRA|EVENTNAME|EXISTS|EXP|FAILCODE|FAILMESSAGE|' - r'FETCH|FROMUNICODE|GETISVALID|GLOBAL|GRAPH|GROUP|HASH|HASH32|' - r'HASH64|HASHCRC|HASHMD5|HAVING|IF|INDEX|INTFORMAT|ISVALID|' - r'ITERATE|JOIN|KEYUNICODE|LENGTH|LIBRARY|LIMIT|LN|LOCAL|LOG|LOOP|' - r'MAP|MATCHED|MATCHLENGTH|MATCHPOSITION|MATCHTEXT|MATCHUNICODE|' - r'MAX|MERGE|MERGEJOIN|MIN|NOLOCAL|NONEMPTY|NORMALIZE|PARSE|PIPE|' - r'POWER|PRELOAD|PROCESS|PROJECT|PULL|RANDOM|RANGE|RANK|RANKED|' - r'REALFORMAT|RECORDOF|REGEXFIND|REGEXREPLACE|REGROUP|REJECTED|' - r'ROLLUP|ROUND|ROUNDUP|ROW|ROWDIFF|SAMPLE|SET|SIN|SINH|SIZEOF|' - r'SOAPCALL|SORT|SORTED|SQRT|STEPPED|STORED|SUM|TABLE|TAN|TANH|' - r'THISNODE|TOPN|TOUNICODE|TRANSFER|TRIM|TRUNCATE|TYPEOF|UNGROUP|' - r'UNICODEORDER|VARIANCE|WHICH|WORKUNIT|XMLDECODE|XMLENCODE|' - r'XMLTEXT|XMLUNICODE)\b', Name.Function), + (words(( + 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE', + 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS', + 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE', + 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE', + 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE', + 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32', + 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID', + 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP', + 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE', + 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE', + 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED', + 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED', + 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF', + 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH', + 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP', + 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE', + 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'), + Name.Function), ], 'string': [ (r'"', String, '#pop'), @@ -143,599 +151,6 @@ class ECLLexer(RegexLexer): } -class RedcodeLexer(RegexLexer): - """ - A simple Redcode lexer based on ICWS'94. - Contributed by Adam Blinkinsop . - - .. versionadded:: 0.8 - """ - name = 'Redcode' - aliases = ['redcode'] - filenames = ['*.cw'] - - opcodes = ['DAT','MOV','ADD','SUB','MUL','DIV','MOD', - 'JMP','JMZ','JMN','DJN','CMP','SLT','SPL', - 'ORG','EQU','END'] - modifiers = ['A','B','AB','BA','F','X','I'] - - tokens = { - 'root': [ - # Whitespace: - (r'\s+', Text), - (r';.*$', Comment.Single), - # Lexemes: - # Identifiers - (r'\b(%s)\b' % '|'.join(opcodes), Name.Function), - (r'\b(%s)\b' % '|'.join(modifiers), Name.Decorator), - (r'[A-Za-z_][A-Za-z_0-9]+', Name), - # Operators - (r'[-+*/%]', Operator), - (r'[#$@<>]', Operator), # mode - (r'[.,]', Punctuation), # mode - # Numbers - (r'[-+]?\d+', Number.Integer), - ], - } - - -class MOOCodeLexer(RegexLexer): - """ - For `MOOCode `_ (the MOO scripting - language). - - .. versionadded:: 0.9 - """ - name = 'MOOCode' - filenames = ['*.moo'] - aliases = ['moocode', 'moo'] - mimetypes = ['text/x-moocode'] - - tokens = { - 'root' : [ - # Numbers - (r'(0|[1-9][0-9_]*)', Number.Integer), - # Strings - (r'"(\\\\|\\"|[^"])*"', String), - # exceptions - (r'(E_PERM|E_DIV)', Name.Exception), - # db-refs - (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity), - # Keywords - (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while' - r'|endwhile|break|continue|return|try' - r'|except|endtry|finally|in)\b', Keyword), - # builtins - (r'(random|length)', Name.Builtin), - # special variables - (r'(player|caller|this|args)', Name.Variable.Instance), - # skip whitespace - (r'\s+', Text), - (r'\n', Text), - # other operators - (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator), - # function call - (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)), - # variables - (r'([a-zA-Z_0-9]+)', Text), - ] - } - - -class SmalltalkLexer(RegexLexer): - """ - For `Smalltalk `_ syntax. - Contributed by Stefan Matthias Aust. - Rewritten by Nils Winter. - - .. versionadded:: 0.10 - """ - name = 'Smalltalk' - filenames = ['*.st'] - aliases = ['smalltalk', 'squeak', 'st'] - mimetypes = ['text/x-smalltalk'] - - tokens = { - 'root' : [ - (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)), - include('squeak fileout'), - include('whitespaces'), - include('method definition'), - (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)), - include('objects'), - (r'\^|\:=|\_', Operator), - # temporaries - (r'[\]({}.;!]', Text), - ], - 'method definition' : [ - # Not perfect can't allow whitespaces at the beginning and the - # without breaking everything - (r'([a-zA-Z]+\w*:)(\s*)(\w+)', - bygroups(Name.Function, Text, Name.Variable)), - (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)), - (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$', - bygroups(Name.Function, Text, Name.Variable, Text)), - ], - 'blockvariables' : [ - include('whitespaces'), - (r'(:)(\s*)(\w+)', - bygroups(Operator, Text, Name.Variable)), - (r'\|', Operator, '#pop'), - default('#pop'), # else pop - ], - 'literals' : [ - (r"'(''|[^'])*'", String, 'afterobject'), - (r'\$.', String.Char, 'afterobject'), - (r'#\(', String.Symbol, 'parenth'), - (r'\)', Text, 'afterobject'), - (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'), - ], - '_parenth_helper' : [ - include('whitespaces'), - (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number), - (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol), - # literals - (r"'(''|[^'])*'", String), - (r'\$.', String.Char), - (r'#*\(', String.Symbol, 'inner_parenth'), - ], - 'parenth' : [ - # This state is a bit tricky since - # we can't just pop this state - (r'\)', String.Symbol, ('root', 'afterobject')), - include('_parenth_helper'), - ], - 'inner_parenth': [ - (r'\)', String.Symbol, '#pop'), - include('_parenth_helper'), - ], - 'whitespaces' : [ - # skip whitespace and comments - (r'\s+', Text), - (r'"(""|[^"])*"', Comment), - ], - 'objects' : [ - (r'\[', Text, 'blockvariables'), - (r'\]', Text, 'afterobject'), - (r'\b(self|super|true|false|nil|thisContext)\b', - Name.Builtin.Pseudo, 'afterobject'), - (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'), - (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'), - (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)', - String.Symbol, 'afterobject'), - include('literals'), - ], - 'afterobject' : [ - (r'! !$', Keyword , '#pop'), # squeak chunk delimiter - include('whitespaces'), - (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)', - Name.Builtin, '#pop'), - (r'\b(new\b(?!:))', Name.Builtin), - (r'\:=|\_', Operator, '#pop'), - (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'), - (r'\b[a-zA-Z]+\w*', Name.Function), - (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'), - (r'\.', Punctuation, '#pop'), - (r';', Punctuation), - (r'[\])}]', Text), - (r'[\[({]', Text, '#pop'), - ], - 'squeak fileout' : [ - # Squeak fileout format (optional) - (r'^"(""|[^"])*"!', Keyword), - (r"^'(''|[^'])*'!", Keyword), - (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)', - bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)), - (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)", - bygroups(Keyword, Name.Class, Keyword, String, Keyword)), - (r'^(\w+)( subclass: )(#\w+)' - r'(\s+instanceVariableNames: )(.*?)' - r'(\s+classVariableNames: )(.*?)' - r'(\s+poolDictionaries: )(.*?)' - r'(\s+category: )(.*?)(!)', - bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword, - String, Keyword, String, Keyword, String, Keyword)), - (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)', - bygroups(Name.Class, Keyword, String, Keyword)), - (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)), - (r'! !$', Keyword), - ], - } - - -class NewspeakLexer(RegexLexer): - """ - For `Newspeak ` syntax. - """ - name = 'Newspeak' - filenames = ['*.ns2'] - aliases = ['newspeak', ] - mimetypes = ['text/x-newspeak'] - - tokens = { - 'root' : [ - (r'\b(Newsqueak2)\b',Keyword.Declaration), - (r"'[^']*'",String), - (r'\b(class)(\s+)(\w+)(\s*)', - bygroups(Keyword.Declaration,Text,Name.Class,Text)), - (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b', - Keyword), - (r'(\w+\:)(\s*)([a-zA-Z_]\w+)', - bygroups(Name.Function,Text,Name.Variable)), - (r'(\w+)(\s*)(=)', - bygroups(Name.Attribute,Text,Operator)), - (r'<\w+>', Comment.Special), - include('expressionstat'), - include('whitespace') - ], - - 'expressionstat': [ - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'\d+', Number.Integer), - (r':\w+',Name.Variable), - (r'(\w+)(::)', bygroups(Name.Variable, Operator)), - (r'\w+:', Name.Function), - (r'\w+', Name.Variable), - (r'\(|\)', Punctuation), - (r'\[|\]', Punctuation), - (r'\{|\}', Punctuation), - - (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator), - (r'\.|;', Punctuation), - include('whitespace'), - include('literals'), - ], - 'literals': [ - (r'\$.', String), - (r"'[^']*'", String), - (r"#'[^']*'", String.Symbol), - (r"#\w+:?", String.Symbol), - (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol) - - ], - 'whitespace' : [ - (r'\s+', Text), - (r'"[^"]*"', Comment) - ] - } - - -class AutohotkeyLexer(RegexLexer): - """ - For `autohotkey `_ source code. - - .. versionadded:: 1.4 - """ - name = 'autohotkey' - aliases = ['ahk', 'autohotkey'] - filenames = ['*.ahk', '*.ahkl'] - mimetypes = ['text/x-autohotkey'] - - tokens = { - 'root': [ - (r'^(\s*)(/\*)', bygroups(Text, Comment.Multiline), - 'incomment'), - (r'^(\s*)(\()', bygroups(Text, Generic), 'incontinuation'), - (r'\s+;.*?$', Comment.Singleline), - (r'^;.*?$', Comment.Singleline), - (r'[]{}(),;[]', Punctuation), - (r'(in|is|and|or|not)\b', Operator.Word), - (r'\%[a-zA-Z_#@$][\w#@$]*\%', Name.Variable), - (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator), - include('commands'), - include('labels'), - include('builtInFunctions'), - include('builtInVariables'), - (r'"', String, combined('stringescape', 'dqs')), - include('numbers'), - (r'[a-zA-Z_#@$][\w#@$]*', Name), - (r'\\|\'', Text), - (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape), - include('garbage'), - ], - 'incomment': [ - (r'^\s*\*/', Comment.Multiline, '#pop'), - (r'[^*/]', Comment.Multiline), - (r'[*/]', Comment.Multiline) - ], - 'incontinuation': [ - (r'^\s*\)', Generic, '#pop'), - (r'[^)]', Generic), - (r'[)]', Generic), - ], - 'commands': [ - (r'(?i)^(\s*)(global|local|static|' - r'#AllowSameLineComments|#ClipboardTimeout|#CommentFlag|' - r'#ErrorStdOut|#EscapeChar|#HotkeyInterval|#HotkeyModifierTimeout|' - r'#Hotstring|#IfWinActive|#IfWinExist|#IfWinNotActive|' - r'#IfWinNotExist|#IncludeAgain|#Include|#InstallKeybdHook|' - r'#InstallMouseHook|#KeyHistory|#LTrim|#MaxHotkeysPerInterval|' - r'#MaxMem|#MaxThreads|#MaxThreadsBuffer|#MaxThreadsPerHotkey|' - r'#NoEnv|#NoTrayIcon|#Persistent|#SingleInstance|#UseHook|' - r'#WinActivateForce|AutoTrim|BlockInput|Break|Click|ClipWait|' - r'Continue|Control|ControlClick|ControlFocus|ControlGetFocus|' - r'ControlGetPos|ControlGetText|ControlGet|ControlMove|ControlSend|' - r'ControlSendRaw|ControlSetText|CoordMode|Critical|' - r'DetectHiddenText|DetectHiddenWindows|Drive|DriveGet|' - r'DriveSpaceFree|Edit|Else|EnvAdd|EnvDiv|EnvGet|EnvMult|EnvSet|' - r'EnvSub|EnvUpdate|Exit|ExitApp|FileAppend|' - r'FileCopy|FileCopyDir|FileCreateDir|FileCreateShortcut|' - r'FileDelete|FileGetAttrib|FileGetShortcut|FileGetSize|' - r'FileGetTime|FileGetVersion|FileInstall|FileMove|FileMoveDir|' - r'FileRead|FileReadLine|FileRecycle|FileRecycleEmpty|' - r'FileRemoveDir|FileSelectFile|FileSelectFolder|FileSetAttrib|' - r'FileSetTime|FormatTime|GetKeyState|Gosub|Goto|GroupActivate|' - r'GroupAdd|GroupClose|GroupDeactivate|Gui|GuiControl|' - r'GuiControlGet|Hotkey|IfEqual|IfExist|IfGreaterOrEqual|IfGreater|' - r'IfInString|IfLess|IfLessOrEqual|IfMsgBox|IfNotEqual|IfNotExist|' - r'IfNotInString|IfWinActive|IfWinExist|IfWinNotActive|' - r'IfWinNotExist|If |ImageSearch|IniDelete|IniRead|IniWrite|' - r'InputBox|Input|KeyHistory|KeyWait|ListHotkeys|ListLines|' - r'ListVars|Loop|Menu|MouseClickDrag|MouseClick|MouseGetPos|' - r'MouseMove|MsgBox|OnExit|OutputDebug|Pause|PixelGetColor|' - r'PixelSearch|PostMessage|Process|Progress|Random|RegDelete|' - r'RegRead|RegWrite|Reload|Repeat|Return|RunAs|RunWait|Run|' - r'SendEvent|SendInput|SendMessage|SendMode|SendPlay|SendRaw|Send|' - r'SetBatchLines|SetCapslockState|SetControlDelay|' - r'SetDefaultMouseSpeed|SetEnv|SetFormat|SetKeyDelay|' - r'SetMouseDelay|SetNumlockState|SetScrollLockState|' - r'SetStoreCapslockMode|SetTimer|SetTitleMatchMode|' - r'SetWinDelay|SetWorkingDir|Shutdown|Sleep|Sort|SoundBeep|' - r'SoundGet|SoundGetWaveVolume|SoundPlay|SoundSet|' - r'SoundSetWaveVolume|SplashImage|SplashTextOff|SplashTextOn|' - r'SplitPath|StatusBarGetText|StatusBarWait|StringCaseSense|' - r'StringGetPos|StringLeft|StringLen|StringLower|StringMid|' - r'StringReplace|StringRight|StringSplit|StringTrimLeft|' - r'StringTrimRight|StringUpper|Suspend|SysGet|Thread|ToolTip|' - r'Transform|TrayTip|URLDownloadToFile|While|WinActivate|' - r'WinActivateBottom|WinClose|WinGetActiveStats|WinGetActiveTitle|' - r'WinGetClass|WinGetPos|WinGetText|WinGetTitle|WinGet|WinHide|' - r'WinKill|WinMaximize|WinMenuSelectItem|WinMinimizeAllUndo|' - r'WinMinimizeAll|WinMinimize|WinMove|WinRestore|WinSetTitle|' - r'WinSet|WinShow|WinWaitActive|WinWaitClose|WinWaitNotActive|' - r'WinWait)\b', bygroups(Text, Name.Builtin)), - ], - 'builtInFunctions': [ - (r'(?i)(Abs|ACos|Asc|ASin|ATan|Ceil|Chr|Cos|DllCall|Exp|FileExist|' - r'Floor|GetKeyState|IL_Add|IL_Create|IL_Destroy|InStr|IsFunc|' - r'IsLabel|Ln|Log|LV_Add|LV_Delete|LV_DeleteCol|LV_GetCount|' - r'LV_GetNext|LV_GetText|LV_Insert|LV_InsertCol|LV_Modify|' - r'LV_ModifyCol|LV_SetImageList|Mod|NumGet|NumPut|OnMessage|' - r'RegExMatch|RegExReplace|RegisterCallback|Round|SB_SetIcon|' - r'SB_SetParts|SB_SetText|Sin|Sqrt|StrLen|SubStr|Tan|TV_Add|' - r'TV_Delete|TV_GetChild|TV_GetCount|TV_GetNext|TV_Get|' - r'TV_GetParent|TV_GetPrev|TV_GetSelection|TV_GetText|TV_Modify|' - r'VarSetCapacity|WinActive|WinExist|Object|ComObjActive|' - r'ComObjArray|ComObjEnwrap|ComObjUnwrap|ComObjParameter|' - r'ComObjType|ComObjConnect|ComObjCreate|ComObjGet|ComObjError|' - r'ComObjValue|Insert|MinIndex|MaxIndex|Remove|SetCapacity|' - r'GetCapacity|GetAddress|_NewEnum|FileOpen|Read|Write|ReadLine|' - r'WriteLine|ReadNumType|WriteNumType|RawRead|RawWrite|Seek|Tell|' - r'Close|Next|IsObject|StrPut|StrGet|Trim|LTrim|RTrim)\b', - Name.Function), - ], - 'builtInVariables': [ - (r'(?i)(A_AhkPath|A_AhkVersion|A_AppData|A_AppDataCommon|' - r'A_AutoTrim|A_BatchLines|A_CaretX|A_CaretY|A_ComputerName|' - r'A_ControlDelay|A_Cursor|A_DDDD|A_DDD|A_DD|A_DefaultMouseSpeed|' - r'A_Desktop|A_DesktopCommon|A_DetectHiddenText|' - r'A_DetectHiddenWindows|A_EndChar|A_EventInfo|A_ExitReason|' - r'A_FormatFloat|A_FormatInteger|A_Gui|A_GuiEvent|A_GuiControl|' - r'A_GuiControlEvent|A_GuiHeight|A_GuiWidth|A_GuiX|A_GuiY|A_Hour|' - r'A_IconFile|A_IconHidden|A_IconNumber|A_IconTip|A_Index|' - r'A_IPAddress1|A_IPAddress2|A_IPAddress3|A_IPAddress4|A_ISAdmin|' - r'A_IsCompiled|A_IsCritical|A_IsPaused|A_IsSuspended|A_KeyDelay|' - r'A_Language|A_LastError|A_LineFile|A_LineNumber|A_LoopField|' - r'A_LoopFileAttrib|A_LoopFileDir|A_LoopFileExt|A_LoopFileFullPath|' - r'A_LoopFileLongPath|A_LoopFileName|A_LoopFileShortName|' - r'A_LoopFileShortPath|A_LoopFileSize|A_LoopFileSizeKB|' - r'A_LoopFileSizeMB|A_LoopFileTimeAccessed|A_LoopFileTimeCreated|' - r'A_LoopFileTimeModified|A_LoopReadLine|A_LoopRegKey|' - r'A_LoopRegName|A_LoopRegSubkey|A_LoopRegTimeModified|' - r'A_LoopRegType|A_MDAY|A_Min|A_MM|A_MMM|A_MMMM|A_Mon|A_MouseDelay|' - r'A_MSec|A_MyDocuments|A_Now|A_NowUTC|A_NumBatchLines|A_OSType|' - r'A_OSVersion|A_PriorHotkey|A_ProgramFiles|A_Programs|' - r'A_ProgramsCommon|A_ScreenHeight|A_ScreenWidth|A_ScriptDir|' - r'A_ScriptFullPath|A_ScriptName|A_Sec|A_Space|A_StartMenu|' - r'A_StartMenuCommon|A_Startup|A_StartupCommon|A_StringCaseSense|' - r'A_Tab|A_Temp|A_ThisFunc|A_ThisHotkey|A_ThisLabel|A_ThisMenu|' - r'A_ThisMenuItem|A_ThisMenuItemPos|A_TickCount|A_TimeIdle|' - r'A_TimeIdlePhysical|A_TimeSincePriorHotkey|A_TimeSinceThisHotkey|' - r'A_TitleMatchMode|A_TitleMatchModeSpeed|A_UserName|A_WDay|' - r'A_WinDelay|A_WinDir|A_WorkingDir|A_YDay|A_YEAR|A_YWeek|A_YYYY|' - r'Clipboard|ClipboardAll|ComSpec|ErrorLevel|ProgramFiles|True|' - r'False|A_IsUnicode|A_FileEncoding|A_OSVersion|A_PtrSize)\b', - Name.Variable), - ], - 'labels': [ - # hotkeys and labels - # technically, hotkey names are limited to named keys and buttons - (r'(^\s*)([^:\s\(\"]+?:{1,2})', bygroups(Text, Name.Label)), - (r'(^\s*)(::[^:\s]+?::)', bygroups(Text, Name.Label)), - ], - 'numbers': [ - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+[eE][+-]?[0-9]+', Number.Float), - (r'0\d+', Number.Oct), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+L', Number.Integer.Long), - (r'\d+', Number.Integer) - ], - 'stringescape': [ - (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape), - ], - 'strings': [ - (r'[^"\n]+', String), - ], - 'dqs': [ - (r'"', String, '#pop'), - include('strings') - ], - 'garbage': [ - (r'[^\S\n]', Text), - # (r'.', Text), # no cheating - ], - } - - -class MaqlLexer(RegexLexer): - """ - Lexer for `GoodData MAQL - `_ - scripts. - - .. versionadded:: 1.4 - """ - - name = 'MAQL' - aliases = ['maql'] - filenames = ['*.maql'] - mimetypes = ['text/x-gooddata-maql','application/x-gooddata-maql'] - - flags = re.IGNORECASE - tokens = { - 'root': [ - # IDENTITY - (r'IDENTIFIER\b', Name.Builtin), - # IDENTIFIER - (r'\{[^}]+\}', Name.Variable), - # NUMBER - (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number), - # STRING - (r'"', Literal.String, 'string-literal'), - # RELATION - (r'\<\>|\!\=', Operator), - (r'\=|\>\=|\>|\<\=|\<', Operator), - # := - (r'\:\=', Operator), - # OBJECT - (r'\[[^]]+\]', Name.Variable.Class), - # keywords - (r'(DIMENSIONS?|BOTTOM|METRIC|COUNT|OTHER|FACT|WITH|TOP|OR|' - r'ATTRIBUTE|CREATE|PARENT|FALSE|ROWS?|FROM|ALL|AS|PF|' - r'COLUMNS?|DEFINE|REPORT|LIMIT|TABLE|LIKE|AND|BY|' - r'BETWEEN|EXCEPT|SELECT|MATCH|WHERE|TRUE|FOR|IN|' - r'WITHOUT|FILTER|ALIAS|ORDER|FACT|WHEN|NOT|ON|' - r'KEYS|KEY|FULLSET|PRIMARY|LABELS|LABEL|VISUAL|' - r'TITLE|DESCRIPTION|FOLDER|ALTER|DROP|ADD|DATASET|' - r'DATATYPE|INT|BIGINT|DOUBLE|DATE|VARCHAR|DECIMAL|' - r'SYNCHRONIZE|TYPE|DEFAULT|ORDER|ASC|DESC|HYPERLINK|' - r'INCLUDE|TEMPLATE|MODIFY)\b', Keyword), - # FUNCNAME - (r'[a-z]\w*\b', Name.Function), - # Comments - (r'#.*', Comment.Single), - # Punctuation - (r'[,;\(\)]', Punctuation), - # Space is not significant - (r'\s+', Text) - ], - 'string-literal': [ - (r'\\[tnrfbae"\\]', String.Escape), - (r'"', Literal.String, '#pop'), - (r'[^\\"]+', Literal.String) - ], - } - - -class GoodDataCLLexer(RegexLexer): - """ - Lexer for `GoodData-CL `_ - script files. - - .. versionadded:: 1.4 - """ - - name = 'GoodData-CL' - aliases = ['gooddata-cl'] - filenames = ['*.gdc'] - mimetypes = ['text/x-gooddata-cl'] - - flags = re.IGNORECASE - tokens = { - 'root': [ - # Comments - (r'#.*', Comment.Single), - # Function call - (r'[a-z]\w*', Name.Function), - # Argument list - (r'\(', Punctuation, 'args-list'), - # Punctuation - (r';', Punctuation), - # Space is not significant - (r'\s+', Text) - ], - 'args-list': [ - (r'\)', Punctuation, '#pop'), - (r',', Punctuation), - (r'[a-z]\w*', Name.Variable), - (r'=', Operator), - (r'"', Literal.String, 'string-literal'), - (r'[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]{1,3})?', Literal.Number), - # Space is not significant - (r'\s', Text) - ], - 'string-literal': [ - (r'\\[tnrfbae"\\]', String.Escape), - (r'"', Literal.String, '#pop'), - (r'[^\\"]+', Literal.String) - ] - } - - -class ProtoBufLexer(RegexLexer): - """ - Lexer for `Protocol Buffer `_ - definition files. - - .. versionadded:: 1.4 - """ - - name = 'Protocol Buffer' - aliases = ['protobuf', 'proto'] - filenames = ['*.proto'] - - tokens = { - 'root': [ - (r'[ \t]+', Text), - (r'[,;{}\[\]\(\)]', Punctuation), - (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), - (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), - (r'\b(import|option|optional|required|repeated|default|packed|' - r'ctype|extensions|to|max|rpc|returns|oneof)\b', Keyword), - (r'(int32|int64|uint32|uint64|sint32|sint64|' - r'fixed32|fixed64|sfixed32|sfixed64|' - r'float|double|bool|string|bytes)\b', Keyword.Type), - (r'(true|false)\b', Keyword.Constant), - (r'(package)(\s+)', bygroups(Keyword.Namespace, Text), 'package'), - (r'(message|extend)(\s+)', - bygroups(Keyword.Declaration, Text), 'message'), - (r'(enum|group|service)(\s+)', - bygroups(Keyword.Declaration, Text), 'type'), - (r'\".*\"', String), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'(\-?(inf|nan))', Number.Float), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'0[0-7]+[LlUu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), - (r'[+-=]', Operator), - (r'([a-zA-Z_][\w\.]*)([ \t]*)(=)', - bygroups(Name.Attribute, Text, Operator)), - ('[a-zA-Z_][\w\.]*', Name), - ], - 'package': [ - (r'[a-zA-Z_]\w*', Name.Namespace, '#pop') - ], - 'message': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop') - ], - 'type': [ - (r'[a-zA-Z_]\w*', Name, '#pop') - ], - } - - class HybrisLexer(RegexLexer): """ For `Hybris `_ source code. @@ -770,29 +185,33 @@ class HybrisLexer(RegexLexer): bygroups(Keyword.Declaration, Text), 'class'), (r'(import|include)(\s+)', bygroups(Keyword.Namespace, Text), 'import'), - (r'(gc_collect|gc_mm_items|gc_mm_usage|gc_collect_threshold|' - r'urlencode|urldecode|base64encode|base64decode|sha1|crc32|sha2|' - r'md5|md5_file|acos|asin|atan|atan2|ceil|cos|cosh|exp|fabs|floor|' - r'fmod|log|log10|pow|sin|sinh|sqrt|tan|tanh|isint|isfloat|ischar|' - r'isstring|isarray|ismap|isalias|typeof|sizeof|toint|tostring|' - r'fromxml|toxml|binary|pack|load|eval|var_names|var_values|' - r'user_functions|dyn_functions|methods|call|call_method|mknod|' - r'mkfifo|mount|umount2|umount|ticks|usleep|sleep|time|strtime|' - r'strdate|dllopen|dlllink|dllcall|dllcall_argv|dllclose|env|exec|' - r'fork|getpid|wait|popen|pclose|exit|kill|pthread_create|' - r'pthread_create_argv|pthread_exit|pthread_join|pthread_kill|' - r'smtp_send|http_get|http_post|http_download|socket|bind|listen|' - r'accept|getsockname|getpeername|settimeout|connect|server|recv|' - r'send|close|print|println|printf|input|readline|serial_open|' - r'serial_fcntl|serial_get_attr|serial_get_ispeed|serial_get_ospeed|' - r'serial_set_attr|serial_set_ispeed|serial_set_ospeed|serial_write|' - r'serial_read|serial_close|xml_load|xml_parse|fopen|fseek|ftell|' - r'fsize|fread|fwrite|fgets|fclose|file|readdir|pcre_replace|size|' - r'pop|unmap|has|keys|values|length|find|substr|replace|split|trim|' - r'remove|contains|join)\b', Name.Builtin), - (r'(MethodReference|Runner|Dll|Thread|Pipe|Process|Runnable|' - r'CGI|ClientSocket|Socket|ServerSocket|File|Console|Directory|' - r'Exception)\b', Keyword.Type), + (words(( + 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold', + 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2', + 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp', 'fabs', 'floor', + 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', + 'isstring', 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring', + 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names', 'var_values', + 'user_functions', 'dyn_functions', 'methods', 'call', 'call_method', 'mknod', + 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep', 'sleep', 'time', 'strtime', + 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', + 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create', + 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill', + 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen', + 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv', + 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open', + 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed', + 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', + 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell', + 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size', + 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split', 'trim', + 'remove', 'contains', 'join'), suffix=r'\b'), + Name.Builtin), + (words(( + 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process', + 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket', + 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'), + Keyword.Type), (r'"(\\\\|\\"|[^"])*"', String), (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), (r'(\.)([a-zA-Z_]\w*)', @@ -1035,967 +454,6 @@ class UrbiscriptLexer(ExtendedRegexLexer): } -class BroLexer(RegexLexer): - """ - For `Bro `_ scripts. - - .. versionadded:: 1.5 - """ - name = 'Bro' - aliases = ['bro'] - filenames = ['*.bro'] - - _hex = r'[0-9a-fA-F_]+' - _float = r'((\d*\.?\d+)|(\d+\.?\d*))([eE][-+]?\d+)?' - _h = r'[A-Za-z0-9][-A-Za-z0-9]*' - - tokens = { - 'root': [ - # Whitespace - (r'^@.*?\n', Comment.Preproc), - (r'#.*?\n', Comment.Single), - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), - # Keywords - (r'(add|alarm|break|case|const|continue|delete|do|else|enum|event' - r'|export|for|function|if|global|hook|local|module|next' - r'|of|print|redef|return|schedule|switch|type|when|while)\b', Keyword), - (r'(addr|any|bool|count|counter|double|file|int|interval|net' - r'|pattern|port|record|set|string|subnet|table|time|timer' - r'|vector)\b', Keyword.Type), - (r'(T|F)\b', Keyword.Constant), - (r'(&)((?:add|delete|expire)_func|attr|(?:create|read|write)_expire' - r'|default|disable_print_hook|raw_output|encrypt|group|log' - r'|mergeable|optional|persistent|priority|redef' - r'|rotate_(?:interval|size)|synchronized)\b', bygroups(Punctuation, - Keyword)), - (r'\s+module\b', Keyword.Namespace), - # Addresses, ports and networks - (r'\d+/(tcp|udp|icmp|unknown)\b', Number), - (r'(\d+\.){3}\d+', Number), - (r'(' + _hex + r'){7}' + _hex, Number), - (r'0x' + _hex + r'(' + _hex + r'|:)*::(' + _hex + r'|:)*', Number), - (r'((\d+|:)(' + _hex + r'|:)*)?::(' + _hex + r'|:)*', Number), - (r'(\d+\.\d+\.|(\d+\.){2}\d+)', Number), - # Hostnames - (_h + r'(\.' + _h + r')+', String), - # Numeric - (_float + r'\s+(day|hr|min|sec|msec|usec)s?\b', Literal.Date), - (r'0[xX]' + _hex, Number.Hex), - (_float, Number.Float), - (r'\d+', Number.Integer), - (r'/', String.Regex, 'regex'), - (r'"', String, 'string'), - # Operators - (r'[!%*/+:<=>?~|-]', Operator), - (r'([-+=&|]{2}|[+=!><-]=)', Operator), - (r'(in|match)\b', Operator.Word), - (r'[{}()\[\]$.,;]', Punctuation), - # Identfier - (r'([_a-zA-Z]\w*)(::)', bygroups(Name, Name.Namespace)), - (r'[a-zA-Z_][a-zA-Z_0-9]*', Name) - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), - (r'\\\n', String), - (r'\\', String) - ], - 'regex': [ - (r'/', String.Regex, '#pop'), - (r'\\[\\nt/]', String.Regex), # String.Escape is too intense here. - (r'[^\\/\n]+', String.Regex), - (r'\\\n', String.Regex), - (r'\\', String.Regex) - ] - } - - -class CbmBasicV2Lexer(RegexLexer): - """ - For CBM BASIC V2 sources. - - .. versionadded:: 1.6 - """ - name = 'CBM BASIC V2' - aliases = ['cbmbas'] - filenames = ['*.bas'] - - flags = re.IGNORECASE - - tokens = { - 'root': [ - (r'rem.*\n', Comment.Single), - (r'\s+', Text), - (r'new|run|end|for|to|next|step|go(to|sub)?|on|return|stop|cont' - r'|if|then|input#?|read|wait|load|save|verify|poke|sys|print#?' - r'|list|clr|cmd|open|close|get#?', Keyword.Reserved), - (r'data|restore|dim|let|def|fn', Keyword.Declaration), - (r'tab|spc|sgn|int|abs|usr|fre|pos|sqr|rnd|log|exp|cos|sin|tan|atn' - r'|peek|len|val|asc|(str|chr|left|right|mid)\$', Name.Builtin), - (r'[-+*/^<>=]', Operator), - (r'not|and|or', Operator.Word), - (r'"[^"\n]*.', String), - (r'\d+|[-+]?\d*\.\d*(e[-+]?\d+)?', Number.Float), - (r'[\(\),:;]', Punctuation), - (r'\w+[$%]?', Name), - ] - } - - def analyse_text(self, text): - # if it starts with a line number, it shouldn't be a "modern" Basic - # like VB.net - if re.match(r'\d+', text): - return True - - -class MscgenLexer(RegexLexer): - """ - For `Mscgen `_ files. - - .. versionadded:: 1.6 - """ - name = 'Mscgen' - aliases = ['mscgen', 'msc'] - filenames = ['*.msc'] - - _var = r'(\w+|"(?:\\"|[^"])*")' - - tokens = { - 'root': [ - (r'msc\b', Keyword.Type), - # Options - (r'(hscale|HSCALE|width|WIDTH|wordwraparcs|WORDWRAPARCS' - r'|arcgradient|ARCGRADIENT)\b', Name.Property), - # Operators - (r'(abox|ABOX|rbox|RBOX|box|BOX|note|NOTE)\b', Operator.Word), - (r'(\.|-|\|){3}', Keyword), - (r'(?:-|=|\.|:){2}' - r'|<<=>>|<->|<=>|<<>>|<:>' - r'|->|=>>|>>|=>|:>|-x|-X' - r'|<-|<<=|<<|<=|<:|x-|X-|=', Operator), - # Names - (r'\*', Name.Builtin), - (_var, Name.Variable), - # Other - (r'\[', Punctuation, 'attrs'), - (r'\{|\}|,|;', Punctuation), - include('comments') - ], - 'attrs': [ - (r'\]', Punctuation, '#pop'), - (_var + r'(\s*)(=)(\s*)' + _var, - bygroups(Name.Attribute, Text.Whitespace, Operator, Text.Whitespace, - String)), - (r',', Punctuation), - include('comments') - ], - 'comments': [ - (r'(?://|#).*?\n', Comment.Single), - (r'/\*(?:.|\n)*?\*/', Comment.Multiline), - (r'[ \t\r\n]+', Text.Whitespace) - ] - } - - -class VGLLexer(RegexLexer): - """ - For `SampleManager VGL `_ - source code. - - .. versionadded:: 1.6 - """ - name = 'VGL' - aliases = ['vgl'] - filenames = ['*.rpf'] - - flags = re.MULTILINE | re.DOTALL | re.IGNORECASE - - tokens = { - 'root': [ - (r'\{[^\}]*\}', Comment.Multiline), - (r'declare', Keyword.Constant), - (r'(if|then|else|endif|while|do|endwhile|and|or|prompt|object' - r'|create|on|line|with|global|routine|value|endroutine|constant' - r'|global|set|join|library|compile_option|file|exists|create|copy' - r'|delete|enable|windows|name|notprotected)(?! *[=<>.,()])', - Keyword), - (r'(true|false|null|empty|error|locked)', Keyword.Constant), - (r'[~\^\*\#!%&\[\]\(\)<>\|+=:;,./?-]', Operator), - (r'"[^"]*"', String), - (r'(\.)([a-z_\$][\w\$]*)', bygroups(Operator, Name.Attribute)), - (r'[0-9][0-9]*(\.[0-9]+(e[+\-]?[0-9]+)?)?', Number), - (r'[a-z_\$][\w\$]*', Name), - (r'[\r\n]+', Text), - (r'\s+', Text) - ] - } - - -class SourcePawnLexer(RegexLexer): - """ - For SourcePawn source code with preprocessor directives. - - .. versionadded:: 1.6 - """ - name = 'SourcePawn' - aliases = ['sp'] - filenames = ['*.sp'] - mimetypes = ['text/x-sourcepawn'] - - #: optional Comment or Whitespace - _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+' - - tokens = { - 'root': [ - # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), - ('^#', Comment.Preproc, 'macro'), - # or with whitespace - ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'), - ('^' + _ws + '#', Comment.Preproc, 'macro'), - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuation - (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), - (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), - (r'[{}]', Punctuation), - (r'L?"', String, 'string'), - (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'0[0-7]+[LlUu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), - (r'\*/', Error), - (r'[~!%^&*+=|?:<>/-]', Operator), - (r'[()\[\],.;]', Punctuation), - (r'(case|const|continue|native|' - r'default|else|enum|for|if|new|operator|' - r'public|return|sizeof|static|decl|struct|switch)\b', Keyword), - (r'(bool|Float)\b', Keyword.Type), - (r'(true|false)\b', Keyword.Constant), - ('[a-zA-Z_]\w*', Name), - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ], - 'macro': [ - (r'[^/\n]+', Comment.Preproc), - (r'/\*(.|\n)*?\*/', Comment.Multiline), - (r'//.*?\n', Comment.Single, '#pop'), - (r'/', Comment.Preproc), - (r'(?<=\\)\n', Comment.Preproc), - (r'\n', Comment.Preproc, '#pop'), - ], - 'if0': [ - (r'^\s*#if.*?(?`__ configuration DSL. - - .. versionadded:: 1.6 - """ - name = 'Puppet' - aliases = ['puppet'] - filenames = ['*.pp'] - - tokens = { - 'root': [ - include('comments'), - include('keywords'), - include('names'), - include('numbers'), - include('operators'), - include('strings'), - - (r'[]{}:(),;[]', Punctuation), - (r'[^\S\n]+', Text), - ], - - 'comments': [ - (r'\s*#.*$', Comment), - (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), - ], - - 'operators': [ - (r'(=>|\?|<|>|=|\+|-|/|\*|~|!|\|)', Operator), - (r'(in|and|or|not)\b', Operator.Word), - ], - - 'names': [ - ('[a-zA-Z_]\w*', Name.Attribute), - (r'(\$\S+)(\[)(\S+)(\])', bygroups(Name.Variable, Punctuation, - String, Punctuation)), - (r'\$\S+', Name.Variable), - ], - - 'numbers': [ - # Copypasta from the Python lexer - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?j?', Number.Float), - (r'\d+[eE][+-]?[0-9]+j?', Number.Float), - (r'0[0-7]+j?', Number.Oct), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+L', Number.Integer.Long), - (r'\d+j?', Number.Integer) - ], - - 'keywords': [ - # Left out 'group' and 'require' - # Since they're often used as attributes - (r'(?i)(absent|alert|alias|audit|augeas|before|case|check|class|' - r'computer|configured|contained|create_resources|crit|cron|debug|' - r'default|define|defined|directory|else|elsif|emerg|err|exec|' - r'extlookup|fail|false|file|filebucket|fqdn_rand|generate|host|if|' - r'import|include|info|inherits|inline_template|installed|' - r'interface|k5login|latest|link|loglevel|macauthorization|' - r'mailalias|maillist|mcx|md5|mount|mounted|nagios_command|' - r'nagios_contact|nagios_contactgroup|nagios_host|' - r'nagios_hostdependency|nagios_hostescalation|nagios_hostextinfo|' - r'nagios_hostgroup|nagios_service|nagios_servicedependency|' - r'nagios_serviceescalation|nagios_serviceextinfo|' - r'nagios_servicegroup|nagios_timeperiod|node|noop|notice|notify|' - r'package|present|purged|realize|regsubst|resources|role|router|' - r'running|schedule|scheduled_task|search|selboolean|selmodule|' - r'service|sha1|shellquote|split|sprintf|ssh_authorized_key|sshkey|' - r'stage|stopped|subscribe|tag|tagged|template|tidy|true|undef|' - r'unmounted|user|versioncmp|vlan|warning|yumrepo|zfs|zone|' - r'zpool)\b', Keyword), - ], - - 'strings': [ - (r'"([^"])*"', String), - (r"'(\\'|[^'])*'", String), - ], - - } - - -class NSISLexer(RegexLexer): - """ - For `NSIS `_ scripts. - - .. versionadded:: 1.6 - """ - name = 'NSIS' - aliases = ['nsis', 'nsi', 'nsh'] - filenames = ['*.nsi', '*.nsh'] - mimetypes = ['text/x-nsis'] - - flags = re.IGNORECASE - - tokens = { - 'root': [ - (r'[;\#].*\n', Comment), - (r"'.*?'", String.Single), - (r'"', String.Double, 'str_double'), - (r'`', String.Backtick, 'str_backtick'), - include('macro'), - include('interpol'), - include('basic'), - (r'\$\{[a-z_|][\w|]*\}', Keyword.Pseudo), - (r'/[a-z_]\w*', Name.Attribute), - ('.', Text), - ], - 'basic': [ - (r'(\n)(Function)(\s+)([._a-z][.\w]*)\b', - bygroups(Text, Keyword, Text, Name.Function)), - (r'\b([_a-z]\w*)(::)([a-z][a-z0-9]*)\b', - bygroups(Keyword.Namespace, Punctuation, Name.Function)), - (r'\b([_a-z]\w*)(:)', bygroups(Name.Label, Punctuation)), - (r'(\b[ULS]|\B)([\!\<\>=]?=|\<\>?|\>)\B', Operator), - (r'[|+-]', Operator), - (r'\\', Punctuation), - (r'\b(Abort|Add(?:BrandingImage|Size)|' - r'Allow(?:RootDirInstall|SkipFiles)|AutoCloseWindow|' - r'BG(?:Font|Gradient)|BrandingText|BringToFront|Call(?:InstDLL)?|' - r'(?:Sub)?Caption|ChangeUI|CheckBitmap|ClearErrors|CompletedText|' - r'ComponentText|CopyFiles|CRCCheck|' - r'Create(?:Directory|Font|Shortcut)|Delete(?:INI(?:Sec|Str)|' - r'Reg(?:Key|Value))?|DetailPrint|DetailsButtonText|' - r'Dir(?:Show|Text|Var|Verify)|(?:Disabled|Enabled)Bitmap|' - r'EnableWindow|EnumReg(?:Key|Value)|Exch|Exec(?:Shell|Wait)?|' - r'ExpandEnvStrings|File(?:BufSize|Close|ErrorText|Open|' - r'Read(?:Byte)?|Seek|Write(?:Byte)?)?|' - r'Find(?:Close|First|Next|Window)|FlushINI|Function(?:End)?|' - r'Get(?:CurInstType|CurrentAddress|DlgItem|DLLVersion(?:Local)?|' - r'ErrorLevel|FileTime(?:Local)?|FullPathName|FunctionAddress|' - r'InstDirError|LabelAddress|TempFileName)|' - r'Goto|HideWindow|Icon|' - r'If(?:Abort|Errors|FileExists|RebootFlag|Silent)|' - r'InitPluginsDir|Install(?:ButtonText|Colors|Dir(?:RegKey)?)|' - r'Inst(?:ProgressFlags|Type(?:[GS]etText)?)|Int(?:CmpU?|Fmt|Op)|' - r'IsWindow|LangString(?:UP)?|' - r'License(?:BkColor|Data|ForceSelection|LangString|Text)|' - r'LoadLanguageFile|LockWindow|Log(?:Set|Text)|MessageBox|' - r'MiscButtonText|Name|Nop|OutFile|(?:Uninst)?Page(?:Ex(?:End)?)?|' - r'PluginDir|Pop|Push|Quit|Read(?:(?:Env|INI|Reg)Str|RegDWORD)|' - r'Reboot|(?:Un)?RegDLL|Rename|RequestExecutionLevel|ReserveFile|' - r'Return|RMDir|SearchPath|Section(?:Divider|End|' - r'(?:(?:Get|Set)(?:Flags|InstTypes|Size|Text))|Group(?:End)?|In)?|' - r'SendMessage|Set(?:AutoClose|BrandingImage|Compress(?:ionLevel|' - r'or(?:DictSize)?)?|CtlColors|CurInstType|DatablockOptimize|' - r'DateSave|Details(?:Print|View)|Error(?:s|Level)|FileAttributes|' - r'Font|OutPath|Overwrite|PluginUnload|RebootFlag|ShellVarContext|' - r'Silent|StaticBkColor)|' - r'Show(?:(?:I|Uni)nstDetails|Window)|Silent(?:Un)?Install|Sleep|' - r'SpaceTexts|Str(?:CmpS?|Cpy|Len)|SubSection(?:End)?|' - r'Uninstall(?:ButtonText|(?:Sub)?Caption|EXEName|Icon|Text)|' - r'UninstPage|Var|VI(?:AddVersionKey|ProductVersion)|WindowIcon|' - r'Write(?:INIStr|Reg(:?Bin|DWORD|(?:Expand)?Str)|Uninstaller)|' - r'XPStyle)\b', Keyword), - (r'\b(CUR|END|(?:FILE_ATTRIBUTE_)?' - r'(?:ARCHIVE|HIDDEN|NORMAL|OFFLINE|READONLY|SYSTEM|TEMPORARY)|' - r'HK(CC|CR|CU|DD|LM|PD|U)|' - r'HKEY_(?:CLASSES_ROOT|CURRENT_(?:CONFIG|USER)|DYN_DATA|' - r'LOCAL_MACHINE|PERFORMANCE_DATA|USERS)|' - r'ID(?:ABORT|CANCEL|IGNORE|NO|OK|RETRY|YES)|' - r'MB_(?:ABORTRETRYIGNORE|DEFBUTTON[1-4]|' - r'ICON(?:EXCLAMATION|INFORMATION|QUESTION|STOP)|' - r'OK(?:CANCEL)?|RETRYCANCEL|RIGHT|SETFOREGROUND|TOPMOST|USERICON|' - r'YESNO(?:CANCEL)?)|SET|SHCTX|' - r'SW_(?:HIDE|SHOW(?:MAXIMIZED|MINIMIZED|NORMAL))|' - r'admin|all|auto|both|bottom|bzip2|checkbox|colored|current|false|' - r'force|hide|highest|if(?:diff|newer)|lastused|leave|left|' - r'listonly|lzma|nevershow|none|normal|off|on|pop|push|' - r'radiobuttons|right|show|silent|silentlog|smooth|textonly|top|' - r'true|try|user|zlib)\b', Name.Constant), - ], - 'macro': [ - (r'\!(addincludedir(?:dir)?|addplugindir|appendfile|cd|define|' - r'delfilefile|echo(?:message)?|else|endif|error|execute|' - r'if(?:macro)?n?(?:def)?|include|insertmacro|macro(?:end)?|packhdr|' - r'search(?:parse|replace)|system|tempfilesymbol|undef|verbose|' - r'warning)\b', Comment.Preproc), - ], - 'interpol': [ - (r'\$(R?[0-9])', Name.Builtin.Pseudo), # registers - (r'\$(ADMINTOOLS|APPDATA|CDBURN_AREA|COOKIES|COMMONFILES(?:32|64)|' - r'DESKTOP|DOCUMENTS|EXE(?:DIR|FILE|PATH)|FAVORITES|FONTS|HISTORY|' - r'HWNDPARENT|INTERNET_CACHE|LOCALAPPDATA|MUSIC|NETHOOD|PICTURES|' - r'PLUGINSDIR|PRINTHOOD|PROFILE|PROGRAMFILES(?:32|64)|QUICKLAUNCH|' - r'RECENT|RESOURCES(?:_LOCALIZED)?|SENDTO|SM(?:PROGRAMS|STARTUP)|' - r'STARTMENU|SYSDIR|TEMP(?:LATES)?|VIDEOS|WINDIR|\{NSISDIR\})', - Name.Builtin), - (r'\$(CMDLINE|INSTDIR|OUTDIR|LANGUAGE)', Name.Variable.Global), - (r'\$[a-z_]\w*', Name.Variable), - ], - 'str_double': [ - (r'"', String, '#pop'), - (r'\$(\\[nrt"]|\$)', String.Escape), - include('interpol'), - (r'.', String.Double), - ], - 'str_backtick': [ - (r'`', String, '#pop'), - (r'\$(\\[nrt"]|\$)', String.Escape), - include('interpol'), - (r'.', String.Double), - ], - } - - -class RPMSpecLexer(RegexLexer): - """ - For RPM ``.spec`` files. - - .. versionadded:: 1.6 - """ - - name = 'RPMSpec' - aliases = ['spec'] - filenames = ['*.spec'] - mimetypes = ['text/x-rpm-spec'] - - _directives = ('(?:package|prep|build|install|clean|check|pre[a-z]*|' - 'post[a-z]*|trigger[a-z]*|files)') - - tokens = { - 'root': [ - (r'#.*\n', Comment), - include('basic'), - ], - 'description': [ - (r'^(%' + _directives + ')(.*)$', - bygroups(Name.Decorator, Text), '#pop'), - (r'\n', Text), - (r'.', Text), - ], - 'changelog': [ - (r'\*.*\n', Generic.Subheading), - (r'^(%' + _directives + ')(.*)$', - bygroups(Name.Decorator, Text), '#pop'), - (r'\n', Text), - (r'.', Text), - ], - 'string': [ - (r'"', String.Double, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - include('interpol'), - (r'.', String.Double), - ], - 'basic': [ - include('macro'), - (r'(?i)^(Name|Version|Release|Epoch|Summary|Group|License|Packager|' - r'Vendor|Icon|URL|Distribution|Prefix|Patch[0-9]*|Source[0-9]*|' - r'Requires\(?[a-z]*\)?|[a-z]+Req|Obsoletes|Suggests|Provides|Conflicts|' - r'Build[a-z]+|[a-z]+Arch|Auto[a-z]+)(:)(.*)$', - bygroups(Generic.Heading, Punctuation, using(this))), - (r'^%description', Name.Decorator, 'description'), - (r'^%changelog', Name.Decorator, 'changelog'), - (r'^(%' + _directives + ')(.*)$', bygroups(Name.Decorator, Text)), - (r'%(attr|defattr|dir|doc(?:dir)?|setup|config(?:ure)?|' - r'make(?:install)|ghost|patch[0-9]+|find_lang|exclude|verify)', - Keyword), - include('interpol'), - (r"'.*?'", String.Single), - (r'"', String.Double, 'string'), - (r'.', Text), - ], - 'macro': [ - (r'%define.*\n', Comment.Preproc), - (r'%\{\!\?.*%define.*\}', Comment.Preproc), - (r'(%(?:if(?:n?arch)?|else(?:if)?|endif))(.*)$', - bygroups(Comment.Preproc, Text)), - ], - 'interpol': [ - (r'%\{?__[a-z_]+\}?', Name.Function), - (r'%\{?_([a-z_]+dir|[a-z_]+path|prefix)\}?', Keyword.Pseudo), - (r'%\{\?\w+\}', Name.Variable), - (r'\$\{?RPM_[A-Z0-9_]+\}?', Name.Variable.Global), - (r'%\{[a-zA-Z]\w+\}', Keyword.Constant), - ] - } - - -class AutoItLexer(RegexLexer): - """ - For `AutoIt `_ files. - - AutoIt is a freeware BASIC-like scripting language - designed for automating the Windows GUI and general scripting - - .. versionadded:: 1.6 - """ - name = 'AutoIt' - aliases = ['autoit'] - filenames = ['*.au3'] - mimetypes = ['text/x-autoit'] - - # Keywords, functions, macros from au3.keywords.properties - # which can be found in AutoIt installed directory, e.g. - # c:\Program Files (x86)\AutoIt3\SciTE\au3.keywords.properties - - keywords = """\ - #include-once #include #endregion #forcedef #forceref #region - and byref case continueloop dim do else elseif endfunc endif - endselect exit exitloop for func global - if local next not or return select step - then to until wend while exit""".split() - - functions = """\ - abs acos adlibregister adlibunregister asc ascw asin assign atan - autoitsetoption autoitwingettitle autoitwinsettitle beep binary binarylen - binarymid binarytostring bitand bitnot bitor bitrotate bitshift bitxor - blockinput break call cdtray ceiling chr chrw clipget clipput consoleread - consolewrite consolewriteerror controlclick controlcommand controldisable - controlenable controlfocus controlgetfocus controlgethandle controlgetpos - controlgettext controlhide controllistview controlmove controlsend - controlsettext controlshow controltreeview cos dec dircopy dircreate - dirgetsize dirmove dirremove dllcall dllcalladdress dllcallbackfree - dllcallbackgetptr dllcallbackregister dllclose dllopen dllstructcreate - dllstructgetdata dllstructgetptr dllstructgetsize dllstructsetdata - drivegetdrive drivegetfilesystem drivegetlabel drivegetserial drivegettype - drivemapadd drivemapdel drivemapget drivesetlabel drivespacefree - drivespacetotal drivestatus envget envset envupdate eval execute exp - filechangedir fileclose filecopy filecreatentfslink filecreateshortcut - filedelete fileexists filefindfirstfile filefindnextfile fileflush - filegetattrib filegetencoding filegetlongname filegetpos filegetshortcut - filegetshortname filegetsize filegettime filegetversion fileinstall filemove - fileopen fileopendialog fileread filereadline filerecycle filerecycleempty - filesavedialog fileselectfolder filesetattrib filesetpos filesettime - filewrite filewriteline floor ftpsetproxy guicreate guictrlcreateavi - guictrlcreatebutton guictrlcreatecheckbox guictrlcreatecombo - guictrlcreatecontextmenu guictrlcreatedate guictrlcreatedummy - guictrlcreateedit guictrlcreategraphic guictrlcreategroup guictrlcreateicon - guictrlcreateinput guictrlcreatelabel guictrlcreatelist - guictrlcreatelistview guictrlcreatelistviewitem guictrlcreatemenu - guictrlcreatemenuitem guictrlcreatemonthcal guictrlcreateobj - guictrlcreatepic guictrlcreateprogress guictrlcreateradio - guictrlcreateslider guictrlcreatetab guictrlcreatetabitem - guictrlcreatetreeview guictrlcreatetreeviewitem guictrlcreateupdown - guictrldelete guictrlgethandle guictrlgetstate guictrlread guictrlrecvmsg - guictrlregisterlistviewsort guictrlsendmsg guictrlsendtodummy - guictrlsetbkcolor guictrlsetcolor guictrlsetcursor guictrlsetdata - guictrlsetdefbkcolor guictrlsetdefcolor guictrlsetfont guictrlsetgraphic - guictrlsetimage guictrlsetlimit guictrlsetonevent guictrlsetpos - guictrlsetresizing guictrlsetstate guictrlsetstyle guictrlsettip guidelete - guigetcursorinfo guigetmsg guigetstyle guiregistermsg guisetaccelerators - guisetbkcolor guisetcoord guisetcursor guisetfont guisethelp guiseticon - guisetonevent guisetstate guisetstyle guistartgroup guiswitch hex hotkeyset - httpsetproxy httpsetuseragent hwnd inetclose inetget inetgetinfo inetgetsize - inetread inidelete iniread inireadsection inireadsectionnames - inirenamesection iniwrite iniwritesection inputbox int isadmin isarray - isbinary isbool isdeclared isdllstruct isfloat ishwnd isint iskeyword - isnumber isobj isptr isstring log memgetstats mod mouseclick mouseclickdrag - mousedown mousegetcursor mousegetpos mousemove mouseup mousewheel msgbox - number objcreate objcreateinterface objevent objevent objget objname - onautoitexitregister onautoitexitunregister opt ping pixelchecksum - pixelgetcolor pixelsearch pluginclose pluginopen processclose processexists - processgetstats processlist processsetpriority processwait processwaitclose - progressoff progresson progressset ptr random regdelete regenumkey - regenumval regread regwrite round run runas runaswait runwait send - sendkeepactive seterror setextended shellexecute shellexecutewait shutdown - sin sleep soundplay soundsetwavevolume splashimageon splashoff splashtexton - sqrt srandom statusbargettext stderrread stdinwrite stdioclose stdoutread - string stringaddcr stringcompare stringformat stringfromasciiarray - stringinstr stringisalnum stringisalpha stringisascii stringisdigit - stringisfloat stringisint stringislower stringisspace stringisupper - stringisxdigit stringleft stringlen stringlower stringmid stringregexp - stringregexpreplace stringreplace stringright stringsplit stringstripcr - stringstripws stringtoasciiarray stringtobinary stringtrimleft - stringtrimright stringupper tan tcpaccept tcpclosesocket tcpconnect - tcplisten tcpnametoip tcprecv tcpsend tcpshutdown tcpstartup timerdiff - timerinit tooltip traycreateitem traycreatemenu traygetmsg trayitemdelete - trayitemgethandle trayitemgetstate trayitemgettext trayitemsetonevent - trayitemsetstate trayitemsettext traysetclick trayseticon traysetonevent - traysetpauseicon traysetstate traysettooltip traytip ubound udpbind - udpclosesocket udpopen udprecv udpsend udpshutdown udpstartup vargettype - winactivate winactive winclose winexists winflash wingetcaretpos - wingetclasslist wingetclientsize wingethandle wingetpos wingetprocess - wingetstate wingettext wingettitle winkill winlist winmenuselectitem - winminimizeall winminimizeallundo winmove winsetontop winsetstate - winsettitle winsettrans winwait winwaitactive winwaitclose - winwaitnotactive""".split() - - macros = """\ - @appdatacommondir @appdatadir @autoitexe @autoitpid @autoitversion - @autoitx64 @com_eventobj @commonfilesdir @compiled @computername @comspec - @cpuarch @cr @crlf @desktopcommondir @desktopdepth @desktopdir - @desktopheight @desktoprefresh @desktopwidth @documentscommondir @error - @exitcode @exitmethod @extended @favoritescommondir @favoritesdir - @gui_ctrlhandle @gui_ctrlid @gui_dragfile @gui_dragid @gui_dropid - @gui_winhandle @homedrive @homepath @homeshare @hotkeypressed @hour - @ipaddress1 @ipaddress2 @ipaddress3 @ipaddress4 @kblayout @lf - @logondnsdomain @logondomain @logonserver @mday @min @mon @msec @muilang - @mydocumentsdir @numparams @osarch @osbuild @oslang @osservicepack @ostype - @osversion @programfilesdir @programscommondir @programsdir @scriptdir - @scriptfullpath @scriptlinenumber @scriptname @sec @startmenucommondir - @startmenudir @startupcommondir @startupdir @sw_disable @sw_enable @sw_hide - @sw_lock @sw_maximize @sw_minimize @sw_restore @sw_show @sw_showdefault - @sw_showmaximized @sw_showminimized @sw_showminnoactive @sw_showna - @sw_shownoactivate @sw_shownormal @sw_unlock @systemdir @tab @tempdir - @tray_id @trayiconflashing @trayiconvisible @username @userprofiledir @wday - @windowsdir @workingdir @yday @year""".split() - - tokens = { - 'root': [ - (r';.*\n', Comment.Single), - (r'(#comments-start|#cs).*?(#comments-end|#ce)', Comment.Multiline), - (r'[\[\]{}(),;]', Punctuation), - (r'(and|or|not)\b', Operator.Word), - (r'[\$|@][a-zA-Z_]\w*', Name.Variable), - (r'!=|==|:=|\.=|<<|>>|[-~+/*%=<>&^|?:!.]', Operator), - include('commands'), - include('labels'), - include('builtInFunctions'), - include('builtInMarcros'), - (r'"', String, combined('stringescape', 'dqs')), - include('numbers'), - (r'[a-zA-Z_#@$][\w#@$]*', Name), - (r'\\|\'', Text), - (r'\`([\,\%\`abfnrtv\-\+;])', String.Escape), - (r'_\n', Text), # Line continuation - include('garbage'), - ], - 'commands': [ - (r'(?i)(\s*)(%s)\b' % '|'.join(keywords), - bygroups(Text, Name.Builtin)), - ], - 'builtInFunctions': [ - (r'(?i)(%s)\b' % '|'.join(functions), - Name.Function), - ], - 'builtInMarcros': [ - (r'(?i)(%s)\b' % '|'.join(macros), - Name.Variable.Global), - ], - 'labels': [ - # sendkeys - (r'(^\s*)({\S+?})', bygroups(Text, Name.Label)), - ], - 'numbers': [ - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+[eE][+-]?[0-9]+', Number.Float), - (r'0\d+', Number.Oct), - (r'0[xX][a-fA-F0-9]+', Number.Hex), - (r'\d+L', Number.Integer.Long), - (r'\d+', Number.Integer) - ], - 'stringescape': [ - (r'\"\"|\`([\,\%\`abfnrtv])', String.Escape), - ], - 'strings': [ - (r'[^"\n]+', String), - ], - 'dqs': [ - (r'"', String, '#pop'), - include('strings') - ], - 'garbage': [ - (r'[^\S\n]', Text), - ], - } - - -class RexxLexer(RegexLexer): - """ - `Rexx `_ is a scripting language available for - a wide range of different platforms with its roots found on mainframe - systems. It is popular for I/O- and data based tasks and can act as glue - language to bind different applications together. - - .. versionadded:: 2.0 - """ - name = 'Rexx' - aliases = ['rexx', 'arexx'] - filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx'] - mimetypes = ['text/x-rexx'] - flags = re.IGNORECASE - - tokens = { - 'root': [ - (r'\s', Whitespace), - (r'/\*', Comment.Multiline, 'comment'), - (r'"', String, 'string_double'), - (r"'", String, 'string_single'), - (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number), - (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b', - bygroups(Name.Function, Whitespace, Operator, Whitespace, - Keyword.Declaration)), - (r'([a-z_]\w*)(\s*)(:)', - bygroups(Name.Label, Whitespace, Operator)), - include('function'), - include('keyword'), - include('operator'), - (r'[a-z_]\w*', Text), - ], - 'function': [ - (r'(abbrev|abs|address|arg|b2x|bitand|bitor|bitxor|c2d|c2x|' - r'center|charin|charout|chars|compare|condition|copies|d2c|' - r'd2x|datatype|date|delstr|delword|digits|errortext|form|' - r'format|fuzz|insert|lastpos|left|length|linein|lineout|lines|' - r'max|min|overlay|pos|queued|random|reverse|right|sign|' - r'sourceline|space|stream|strip|substr|subword|symbol|time|' - r'trace|translate|trunc|value|verify|word|wordindex|' - r'wordlength|wordpos|words|x2b|x2c|x2d|xrange)(\s*)(\()', - bygroups(Name.Builtin, Whitespace, Operator)), - ], - 'keyword': [ - (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' - r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' - r'pull|push|queue|return|say|select|signal|to|then|trace|until|' - r'while)\b', Keyword.Reserved), - ], - 'operator': [ - (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||' - r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' - r'¬>>|¬>|¬|\.|,)', Operator), - ], - 'string_double': [ - (r'[^"\n]+', String), - (r'""', String), - (r'"', String, '#pop'), - (r'\n', Text, '#pop'), # Stray linefeed also terminates strings. - ], - 'string_single': [ - (r'[^\'\n]', String), - (r'\'\'', String), - (r'\'', String, '#pop'), - (r'\n', Text, '#pop'), # Stray linefeed also terminates strings. - ], - 'comment': [ - (r'[^*]+', Comment.Multiline), - (r'\*/', Comment.Multiline, '#pop'), - (r'\*', Comment.Multiline), - ] - } - - _c = lambda s: re.compile(s, re.MULTILINE) - _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b') - _ADDRESS_PATTERN = _c(r'^\s*address\s+') - _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b') - _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$') - _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b') - _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$') - _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b') - PATTERNS_AND_WEIGHTS = ( - (_ADDRESS_COMMAND_PATTERN, 0.2), - (_ADDRESS_PATTERN, 0.05), - (_DO_WHILE_PATTERN, 0.1), - (_ELSE_DO_PATTERN, 0.1), - (_IF_THEN_DO_PATTERN, 0.1), - (_PROCEDURE_PATTERN, 0.5), - (_PARSE_ARG_PATTERN, 0.2), - ) - - def analyse_text(text): - """ - Check for inital comment and patterns that distinguish Rexx from other - C-like languages. - """ - if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE): - # Header matches MVS Rexx requirements, this is certainly a Rexx - # script. - return 1.0 - elif text.startswith('/*'): - # Header matches general Rexx requirements; the source code might - # still be any language using C comments such as C++, C# or Java. - lowerText = text.lower() - result = sum(weight - for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS - if pattern.search(lowerText)) + 0.01 - return min(result, 1.0) - - -class APLLexer(RegexLexer): - """ - A simple APL lexer. - - .. versionadded:: 2.0 - """ - name = 'APL' - aliases = ['apl'] - filenames = ['*.apl'] - - tokens = { - 'root': [ - # Whitespace - # ========== - (r'\s+', Text), - # - # Comment - # ======= - # '⍝' is traditional; '#' is supported by GNU APL and NGN (but not Dyalog) - (u'[⍝#].*$', Comment.Single), - # - # Strings - # ======= - (r'\'((\'\')|[^\'])*\'', String.Single), - (r'"(("")|[^"])*"', String.Double), # supported by NGN APL - # - # Punctuation - # =========== - # This token type is used for diamond and parenthesis - # but not for bracket and ; (see below) - (u'[⋄◇()]', Punctuation), - # - # Array indexing - # ============== - # Since this token type is very important in APL, it is not included in - # the punctuation token type but rather in the following one - (r'[\[\];]', String.Regex), - # - # Distinguished names - # =================== - # following IBM APL2 standard - (u'⎕[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Function), - # - # Labels - # ====== - # following IBM APL2 standard - # (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*:', Name.Label), - # - # Variables - # ========= - # following IBM APL2 standard - (u'[A-Za-zΔ∆⍙][A-Za-zΔ∆⍙_¯0-9]*', Name.Variable), - # - # Numbers - # ======= - (u'¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞)' - u'([Jj]¯?(0[Xx][0-9A-Fa-f]+|[0-9]*\.?[0-9]+([Ee][+¯]?[0-9]+)?|¯|∞))?', - Number), - # - # Operators - # ========== - (u'[\.\\\/⌿⍀¨⍣⍨⍠⍤∘]', Name.Attribute), # closest token type - (u'[+\-×÷⌈⌊∣|⍳?*⍟○!⌹<≤=>≥≠≡≢∊⍷∪∩~∨∧⍱⍲⍴,⍪⌽⊖⍉↑↓⊂⊃⌷⍋⍒⊤⊥⍕⍎⊣⊢⍁⍂≈⌸⍯↗]', - Operator), - # - # Constant - # ======== - (u'⍬', Name.Constant), - # - # Quad symbol - # =========== - (u'[⎕⍞]', Name.Variable.Global), - # - # Arrows left/right - # ================= - (u'[←→]', Keyword.Declaration), - # - # D-Fn - # ==== - (u'[⍺⍵⍶⍹∇:]', Name.Builtin.Pseudo), - (r'[{}]', Keyword.Type), - ], - } - - class AmbientTalkLexer(RegexLexer): """ Lexer for `AmbientTalk `_ source code. @@ -2009,17 +467,17 @@ class AmbientTalkLexer(RegexLexer): flags = re.MULTILINE | re.DOTALL - builtin = ['if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:', - 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:', - 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:', - 'mirroredBy:', 'is:'] + builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:', + 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:', + 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:', + 'mirroredBy:', 'is:')) tokens = { 'root' : [ (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'(def|deftype|import|alias|exclude)\b', Keyword), - (r"(%s)" % "|".join(builtin), Name.Builtin), + (builtin, Name.Builtin), (r'(true|false|nil)\b', Keyword.Constant), (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'), (r'"(\\\\|\\"|[^"])*"', String), @@ -2054,130 +512,6 @@ class AmbientTalkLexer(RegexLexer): } -class PawnLexer(RegexLexer): - """ - For Pawn source code - """ - - name = 'Pawn' - aliases = ['pawn'] - filenames = ['*.p', '*.pwn', '*.inc'] - mimetypes = ['text/x-pawn'] - - #: optional Comment or Whitespace - _ws = r'(?:\s|//.*?\n|/[*][\w\W]*?[*]/)+' - - tokens = { - 'root': [ - # preprocessor directives: without whitespace - ('^#if\s+0', Comment.Preproc, 'if0'), - ('^#', Comment.Preproc, 'macro'), - # or with whitespace - ('^' + _ws + r'#if\s+0', Comment.Preproc, 'if0'), - ('^' + _ws + '#', Comment.Preproc, 'macro'), - (r'\n', Text), - (r'\s+', Text), - (r'\\\n', Text), # line continuation - (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single), - (r'/(\\\n)?\*[\w\W]*?\*(\\\n)?/', Comment.Multiline), - (r'[{}]', Punctuation), - (r'L?"', String, 'string'), - (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), - (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex), - (r'0[0-7]+[LlUu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), - (r'\*/', Error), - (r'[~!%^&*+=|?:<>/-]', Operator), - (r'[()\[\],.;]', Punctuation), - (r'(switch|case|default|const|new|static|char|continue|break|' - r'if|else|for|while|do|operator|enum|' - r'public|return|sizeof|tagof|state|goto)\b', Keyword), - (r'(bool|Float)\b', Keyword.Type), - (r'(true|false)\b', Keyword.Constant), - ('[a-zA-Z_]\w*', Name), - ], - 'string': [ - (r'"', String, '#pop'), - (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), - (r'[^\\"\n]+', String), # all other characters - (r'\\\n', String), # line continuation - (r'\\', String), # stray backslash - ], - 'macro': [ - (r'[^/\n]+', Comment.Preproc), - (r'/\*(.|\n)*?\*/', Comment.Multiline), - (r'//.*?\n', Comment.Single, '#pop'), - (r'/', Comment.Preproc), - (r'(?<=\\)\n', Comment.Preproc), - (r'\n', Comment.Preproc, '#pop'), - ], - 'if0': [ - (r'^\s*#if.*?(?`_ is the formal specification - language used in RAISE (Rigorous Approach to Industrial Software Engineering) - method. - - .. versionadded:: 2.0 - """ - name = 'RSL' - aliases = ['rsl'] - filenames = ['*.rsl'] - mimetypes = ['text/rsl'] - - flags = re.MULTILINE | re.DOTALL - - tokens = { - 'root':[ - (r'\b(Bool|Char|Int|Nat|Real|Text|Unit|abs|all|always|any|as|' - r'axiom|card|case|channel|chaos|class|devt_relation|dom|elems|' - r'else|elif|end|exists|extend|false|for|hd|hide|if|in|is|inds|' - r'initialise|int|inter|isin|len|let|local|ltl_assertion|object|' - r'of|out|post|pre|read|real|rng|scheme|skip|stop|swap|then|' - r'thoery|test_case|tl|transition_system|true|type|union|until|' - r'use|value|variable|while|with|write|~isin|-inflist|-infset|' - r'-list|-set)\b', Keyword), - (r'(variable|value)\b', Keyword.Declaration), - (r'--.*?\n', Comment), - (r'<:.*?:>', Comment), - (r'\{!.*?!\}', Comment), - (r'/\*.*?\*/', Comment), - (r'^[ \t]*([\w]+)[ \t]*:[^:]', Name.Function), - (r'(^[ \t]*)([\w]+)([ \t]*\([\w\s,]*\)[ \t]*)(is|as)', - bygroups(Text, Name.Function, Text, Keyword)), - (r'\b[A-Z]\w*\b',Keyword.Type), - (r'(true|false)\b', Keyword.Constant), - (r'".*"',String), - (r'\'.\'',String.Char), - (r'(><|->|-m->|/\\|<=|<<=|<\.|\|\||\|\^\||-~->|-~m->|\\/|>=|>>|' - r'\.>|\+\+|-\\|<->|=>|:-|~=|\*\*|<<|>>=|\+>|!!|\|=\||#)', - Operator), - (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'0x[0-9a-f]+', Number.Hex), - (r'[0-9]+', Number.Integer), - (r'.', Text), - ], - } - - def analyse_text(text): - """ - Check for the most common text in the beginning of a RSL file. - """ - if re.search(r'scheme\s*.*?=\s*class\s*type', text, re.I) is not None: - return 1.0 - else: - return 0.01 - - class PanLexer(RegexLexer): """ Lexer for `pan `_ source files. @@ -2239,253 +573,3 @@ class PanLexer(RegexLexer): include('root'), ], } - - -class RedLexer(RegexLexer): - """ - A `Red-language `_ lexer. - - .. versionadded:: 2.0 - """ - name = 'Red' - aliases = ['red', 'red/system'] - filenames = ['*.red', '*.reds'] - mimetypes = ['text/x-red', 'text/x-red-system'] - - flags = re.IGNORECASE | re.MULTILINE - - escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)' - - def word_callback(lexer, match): - word = match.group() - - if re.match(".*:$", word): - yield match.start(), Generic.Subheading, word - elif re.match( - r'(if|unless|either|any|all|while|until|loop|repeat|' - r'foreach|forall|func|function|does|has|switch|' - r'case|reduce|compose|get|set|print|prin|equal\?|' - r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|' - r'greater-or-equal\?|same\?|not|type\?|stats|' - r'bind|union|replace|charset|routine)$', word): - yield match.start(), Name.Builtin, word - elif re.match( - r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|' - r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|' - r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|' - r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|' - r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|' - r'update|write)$', word): - yield match.start(), Name.Function, word - elif re.match( - r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|' - r'none|crlf|dot|null-byte)$', word): - yield match.start(), Name.Builtin.Pseudo, word - elif re.match( - r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|' - r'#switch|#default|#get-definition)$', word): - yield match.start(), Keyword.Namespace, word - elif re.match( - r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|' - r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|' - r'quote|forever)$', word): - yield match.start(), Name.Exception, word - elif re.match( - r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|' - r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|' - r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|' - r'any-struct\?|none\?|word\?|any-series\?)$', word): - yield match.start(), Keyword, word - elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word): - yield match.start(), Keyword.Namespace, word - elif re.match("to-.*", word): - yield match.start(), Keyword, word - elif re.match('(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|===|<>|<|>|<=|>=|<<|>>|<<<|>>>|%|-\*\*)$', word): - yield match.start(), Operator, word - elif re.match(".*\!$", word): - yield match.start(), Keyword.Type, word - elif re.match("'.*", word): - yield match.start(), Name.Variable.Instance, word # lit-word - elif re.match("#.*", word): - yield match.start(), Name.Label, word # issue - elif re.match("%.*", word): - yield match.start(), Name.Decorator, word # file - elif re.match(":.*", word): - yield match.start(), Generic.Subheading, word # get-word - else: - yield match.start(), Name.Variable, word - - tokens = { - 'root': [ - (r'[^R]+', Comment), - (r'Red/System\s+\[', Generic.Strong, 'script'), - (r'Red\s+\[', Generic.Strong, 'script'), - (r'R', Comment) - ], - 'script': [ - (r'\s+', Text), - (r'#"', String.Char, 'char'), - (r'#{[0-9a-f\s]*}', Number.Hex), - (r'2#{', Number.Hex, 'bin2'), - (r'64#{[0-9a-z+/=\s]*}', Number.Hex), - (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}""\(\)]))', - bygroups(Number.Hex, Name.Variable, Whitespace)), - (r'"', String, 'string'), - (r'{', String, 'string2'), - (r';#+.*\n', Comment.Special), - (r';\*+.*\n', Comment.Preproc), - (r';.*\n', Comment), - (r'%"', Name.Decorator, 'stringFile'), - (r'%[^(\^{^")\s\[\]]+', Name.Decorator), - (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float), # money - (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other), # time - (r'\d+[\-\/][0-9a-z]+[\-\/]\d+(\/\d+\:\d+((\:\d+)?' - r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other), # date - (r'\d+(\.\d+)+\.\d+', Keyword.Constant), # tuple - (r'\d+[xX]\d+', Keyword.Constant), # pair - (r'[+-]?\d+(\'\d+)?([\.,]\d*)?[eE][+-]?\d+', Number.Float), - (r'[+-]?\d+(\'\d+)?[\.,]\d*', Number.Float), - (r'[+-]?\d+(\'\d+)?', Number), - (r'[\[\]\(\)]', Generic.Strong), - (r'[a-z]+[^(\^{"\s:)]*://[^(\^{"\s)]*', Name.Decorator), # url - (r'mailto:[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # url - (r'[^(\^{"@\s)]+@[^(\^{"@\s)]+', Name.Decorator), # email - (r'comment\s"', Comment, 'commentString1'), - (r'comment\s{', Comment, 'commentString2'), - (r'comment\s\[', Comment, 'commentBlock'), - (r'comment\s[^(\s{\"\[]+', Comment), - (r'/[^(\^{^")\s/[\]]*', Name.Attribute), - (r'([^(\^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback), - (r'<[\w:.-]*>', Name.Tag), - (r'<[^(<>\s")]+', Name.Tag, 'tag'), - (r'([^(\^{^")\s]+)', Text), - ], - 'string': [ - (r'[^(\^")]+', String), - (escape_re, String.Escape), - (r'[\(|\)]+', String), - (r'\^.', String.Escape), - (r'"', String, '#pop'), - ], - 'string2': [ - (r'[^(\^{^})]+', String), - (escape_re, String.Escape), - (r'[\(|\)]+', String), - (r'\^.', String.Escape), - (r'{', String, '#push'), - (r'}', String, '#pop'), - ], - 'stringFile': [ - (r'[^(\^")]+', Name.Decorator), - (escape_re, Name.Decorator), - (r'\^.', Name.Decorator), - (r'"', Name.Decorator, '#pop'), - ], - 'char': [ - (escape_re + '"', String.Char, '#pop'), - (r'\^."', String.Char, '#pop'), - (r'."', String.Char, '#pop'), - ], - 'tag': [ - (escape_re, Name.Tag), - (r'"', Name.Tag, 'tagString'), - (r'[^(<>\r\n")]+', Name.Tag), - (r'>', Name.Tag, '#pop'), - ], - 'tagString': [ - (r'[^(\^")]+', Name.Tag), - (escape_re, Name.Tag), - (r'[\(|\)]+', Name.Tag), - (r'\^.', Name.Tag), - (r'"', Name.Tag, '#pop'), - ], - 'tuple': [ - (r'(\d+\.)+', Keyword.Constant), - (r'\d+', Keyword.Constant, '#pop'), - ], - 'bin2': [ - (r'\s+', Number.Hex), - (r'([0-1]\s*){8}', Number.Hex), - (r'}', Number.Hex, '#pop'), - ], - 'commentString1': [ - (r'[^(\^")]+', Comment), - (escape_re, Comment), - (r'[\(|\)]+', Comment), - (r'\^.', Comment), - (r'"', Comment, '#pop'), - ], - 'commentString2': [ - (r'[^(\^{^})]+', Comment), - (escape_re, Comment), - (r'[\(|\)]+', Comment), - (r'\^.', Comment), - (r'{', Comment, '#push'), - (r'}', Comment, '#pop'), - ], - 'commentBlock': [ - (r'\[', Comment, '#push'), - (r'\]', Comment, '#pop'), - (r'"', Comment, "commentString1"), - (r'{', Comment, "commentString2"), - (r'[^(\[\]\"{)]+', Comment), - ], - } - - -class AlloyLexer(RegexLexer): - """ - For `Alloy `_ source code. - """ - - name = 'Alloy' - aliases = ['alloy'] - filenames = ['*.als'] - mimetypes = ['text/x-alloy'] - - flags = re.MULTILINE | re.DOTALL - - iden_rex = r'[a-zA-Z_][a-zA-Z0-9_\']*' - text_tuple = (r'[^\S\n]+', Text) - - tokens = { - 'sig': [ - (r'(extends)\b', Keyword, '#pop'), - (iden_rex, Name), - text_tuple, - (r',', Punctuation), - (r'\{', Operator, '#pop'), - ], - 'module': [ - text_tuple, - (iden_rex, Name, '#pop'), - ], - 'fun': [ - text_tuple, - (r'\{', Operator, '#pop'), - (iden_rex, Name, '#pop'), - ], - 'root': [ - (r'--.*?$', Comment.Single), - (r'//.*?$', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - text_tuple, - (r'(module|open)(\s+)', bygroups(Keyword.Namespace, Text), - 'module'), - (r'(sig|enum)(\s+)', bygroups(Keyword.Declaration, Text), 'sig'), - (r'(iden|univ|none)\b', Keyword.Constant), - (r'(int|Int)\b', Keyword.Type), - (r'(this|abstract|extends|set|seq|one|lone|let)\b', Keyword), - (r'(all|some|no|sum|disj|when|else)\b', Keyword), - (r'(run|check|for|but|exactly|expect|as)\b', Keyword), - (r'(and|or|implies|iff|in)\b', Operator.Word), - (r'(fun|pred|fact|assert)(\s+)', bygroups(Keyword, Text), 'fun'), - (r'!|#|&&|\+\+|<<|>>|>=|<=>|<=|\.|->', Operator), - (r'[-+/*%=<>&!^|~\{\}\[\]\(\)\.]', Operator), - (iden_rex, Name), - (r'[:,]', Punctuation), - (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String), - (r'\n', Text), - ] - } diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index 0646891c..f6a32bb3 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -18,7 +18,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ from pygments.util import get_bool_opt, get_list_opt, iteritems __all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer', - 'AppleScriptLexer'] + 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer'] class LuaLexer(RegexLexer): @@ -682,3 +682,159 @@ class AppleScriptLexer(RegexLexer): ('[*(]', Comment.Multiline), ], } + + +class RexxLexer(RegexLexer): + """ + `Rexx `_ is a scripting language available for + a wide range of different platforms with its roots found on mainframe + systems. It is popular for I/O- and data based tasks and can act as glue + language to bind different applications together. + + .. versionadded:: 2.0 + """ + name = 'Rexx' + aliases = ['rexx', 'arexx'] + filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx'] + mimetypes = ['text/x-rexx'] + flags = re.IGNORECASE + + tokens = { + 'root': [ + (r'\s', Whitespace), + (r'/\*', Comment.Multiline, 'comment'), + (r'"', String, 'string_double'), + (r"'", String, 'string_single'), + (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number), + (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b', + bygroups(Name.Function, Whitespace, Operator, Whitespace, + Keyword.Declaration)), + (r'([a-z_]\w*)(\s*)(:)', + bygroups(Name.Label, Whitespace, Operator)), + include('function'), + include('keyword'), + include('operator'), + (r'[a-z_]\w*', Text), + ], + 'function': [ + (words(( + 'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor', + 'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare', + 'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr', + 'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert', + 'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max', + 'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign', + 'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol', + 'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word', + 'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d', + 'xrange'), suffix=r'(\s*)(\()'), + bygroups(Name.Builtin, Whitespace, Operator)), + ], + 'keyword': [ + (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|' + r'interpret|iterate|leave|nop|numeric|off|on|options|parse|' + r'pull|push|queue|return|say|select|signal|to|then|trace|until|' + r'while)\b', Keyword.Reserved), + ], + 'operator': [ + (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||' + r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|' + r'¬>>|¬>|¬|\.|,)', Operator), + ], + 'string_double': [ + (r'[^"\n]+', String), + (r'""', String), + (r'"', String, '#pop'), + (r'\n', Text, '#pop'), # Stray linefeed also terminates strings. + ], + 'string_single': [ + (r'[^\'\n]', String), + (r'\'\'', String), + (r'\'', String, '#pop'), + (r'\n', Text, '#pop'), # Stray linefeed also terminates strings. + ], + 'comment': [ + (r'[^*]+', Comment.Multiline), + (r'\*/', Comment.Multiline, '#pop'), + (r'\*', Comment.Multiline), + ] + } + + _c = lambda s: re.compile(s, re.MULTILINE) + _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b') + _ADDRESS_PATTERN = _c(r'^\s*address\s+') + _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b') + _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$') + _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b') + _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$') + _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b') + PATTERNS_AND_WEIGHTS = ( + (_ADDRESS_COMMAND_PATTERN, 0.2), + (_ADDRESS_PATTERN, 0.05), + (_DO_WHILE_PATTERN, 0.1), + (_ELSE_DO_PATTERN, 0.1), + (_IF_THEN_DO_PATTERN, 0.1), + (_PROCEDURE_PATTERN, 0.5), + (_PARSE_ARG_PATTERN, 0.2), + ) + + def analyse_text(text): + """ + Check for inital comment and patterns that distinguish Rexx from other + C-like languages. + """ + if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE): + # Header matches MVS Rexx requirements, this is certainly a Rexx + # script. + return 1.0 + elif text.startswith('/*'): + # Header matches general Rexx requirements; the source code might + # still be any language using C comments such as C++, C# or Java. + lowerText = text.lower() + result = sum(weight + for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS + if pattern.search(lowerText)) + 0.01 + return min(result, 1.0) + + +class MOOCodeLexer(RegexLexer): + """ + For `MOOCode `_ (the MOO scripting + language). + + .. versionadded:: 0.9 + """ + name = 'MOOCode' + filenames = ['*.moo'] + aliases = ['moocode', 'moo'] + mimetypes = ['text/x-moocode'] + + tokens = { + 'root': [ + # Numbers + (r'(0|[1-9][0-9_]*)', Number.Integer), + # Strings + (r'"(\\\\|\\"|[^"])*"', String), + # exceptions + (r'(E_PERM|E_DIV)', Name.Exception), + # db-refs + (r'((#[-0-9]+)|(\$[a-z_A-Z0-9]+))', Name.Entity), + # Keywords + (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while' + r'|endwhile|break|continue|return|try' + r'|except|endtry|finally|in)\b', Keyword), + # builtins + (r'(random|length)', Name.Builtin), + # special variables + (r'(player|caller|this|args)', Name.Variable.Instance), + # skip whitespace + (r'\s+', Text), + (r'\n', Text), + # other operators + (r'([!;=,{}&\|:\.\[\]@\(\)\<\>\?]+)', Operator), + # function call + (r'([a-z_A-Z0-9]+)(\()', bygroups(Name.Function, Operator)), + # variables + (r'([a-zA-Z_0-9]+)', Text), + ] + } -- cgit v1.2.1 From d2d62586a9f4abd910675ff6e16686b6a722528c Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 21:26:56 +0200 Subject: reorganization of other.py, part 3/3 --- pygments/lexers/_mapping.py | 15 +- pygments/lexers/compiled.py | 2 +- pygments/lexers/configs.py | 65 ++++- pygments/lexers/dsls.py | 69 ++++- pygments/lexers/misc/ambient.py | 76 ++++++ pygments/lexers/misc/ecl.py | 125 +++++++++ pygments/lexers/misc/urbi.py | 133 ++++++++++ pygments/lexers/other.py | 553 +--------------------------------------- pygments/lexers/scripting.py | 84 +++++- pygments/lexers/textedit.py | 70 +++++ 10 files changed, 634 insertions(+), 558 deletions(-) create mode 100644 pygments/lexers/misc/ambient.py create mode 100644 pygments/lexers/misc/ecl.py create mode 100644 pygments/lexers/misc/urbi.py create mode 100644 pygments/lexers/textedit.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 66bc6b74..0979b68a 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -23,7 +23,7 @@ LEXERS = { 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), - 'AmbientTalkLexer': ('pygments.lexers.other', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), + 'AmbientTalkLexer': ('pygments.lexers.misc.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), 'AntlrCSharpLexer': ('pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), 'AntlrCppLexer': ('pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), @@ -39,7 +39,7 @@ LEXERS = { 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), - 'AwkLexer': ('pygments.lexers.other', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), + 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()), 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')), @@ -60,7 +60,7 @@ LEXERS = { 'Ca65Lexer': ('pygments.lexers.asm', 'ca65', ('ca65',), ('*.s',), ()), 'CbmBasicV2Lexer': ('pygments.lexers.misc.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()), 'CeylonLexer': ('pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)), - 'Cfengine3Lexer': ('pygments.lexers.other', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), + 'Cfengine3Lexer': ('pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()), 'ChaiscriptLexer': ('pygments.lexers.scripting', 'ChaiScript', ('chai', 'chaiscript'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')), 'ChapelLexer': ('pygments.lexers.misc.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()), 'CheetahHtmlLexer': ('pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')), @@ -107,7 +107,7 @@ LEXERS = { 'DylanConsoleLexer': ('pygments.lexers.misc.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), 'DylanLexer': ('pygments.lexers.misc.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), 'DylanLidLexer': ('pygments.lexers.misc.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), - 'ECLLexer': ('pygments.lexers.other', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), + 'ECLLexer': ('pygments.lexers.misc.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), 'ECLexer': ('pygments.lexers.c_like.other', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), @@ -154,7 +154,7 @@ LEXERS = { 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()), 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), - 'HybrisLexer': ('pygments.lexers.other', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), + 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), @@ -251,9 +251,8 @@ LEXERS = { 'OocLexer': ('pygments.lexers.misc.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), - 'PanLexer': ('pygments.lexers.other', 'Pan', ('pan',), ('*.pan',), ()), + 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), 'PawnLexer': ('pygments.lexers.misc.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), - 'PawnLexer': ('pygments.lexers.other', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), @@ -336,7 +335,7 @@ LEXERS = { 'TodotxtLexer': ('pygments.lexers.text', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), - 'UrbiscriptLexer': ('pygments.lexers.other', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), + 'UrbiscriptLexer': ('pygments.lexers.misc.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), 'ValaLexer': ('pygments.lexers.c_like.other', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index e72bd9c7..8edd6cb5 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -24,7 +24,7 @@ from pygments.lexers.fortran import FortranLexer from pygments.lexers.prolog import PrologLexer from pygments.lexers.python import CythonLexer from pygments.lexers.graphics import GLShaderLexer -from pygments.lexers.misc.blitz import BlitzBasicLexer, BlitzMaxLexer, \ +from pygments.lexers.misc.basic import BlitzBasicLexer, BlitzMaxLexer, \ MonkeyLexer from pygments.lexers.misc.dylan import DylanLexer, DylanLidLexer, \ DylanConsoleLexer diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index e4f18803..44408199 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -9,11 +9,11 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, default, words +from pygments.lexer import RegexLexer, default, words, bygroups from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation -__all__ = ['KconfigLexer'] +__all__ = ['KconfigLexer', 'Cfengine3Lexer'] def _rx_indent(level): @@ -103,3 +103,64 @@ class KconfigLexer(RegexLexer): 'indent2': do_indent(2), 'indent1': do_indent(1), } + + +class Cfengine3Lexer(RegexLexer): + """ + Lexer for `CFEngine3 `_ policy files. + + .. versionadded:: 1.5 + """ + + name = 'CFEngine3' + aliases = ['cfengine3', 'cf3'] + filenames = ['*.cf'] + mimetypes = [] + + tokens = { + 'root': [ + (r'#.*?\n', Comment), + (r'(body)(\s+)(\S+)(\s+)(control)', + bygroups(Keyword, Text, Keyword, Text, Keyword)), + (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()', + bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation), + 'arglist'), + (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)', + bygroups(Keyword, Text, Keyword, Text, Name.Function)), + (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)', + bygroups(Punctuation, Name.Variable, Punctuation, + Text, Keyword.Type, Text, Operator, Text)), + (r'(\S+)(\s*)(=>)(\s*)', + bygroups(Keyword.Reserved, Text, Operator, Text)), + (r'"', String, 'string'), + (r'(\w+)(\()', bygroups(Name.Function, Punctuation)), + (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)), + (r'(\w+)(:)', bygroups(Keyword.Declaration, Punctuation)), + (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable), + (r'[(){},;]', Punctuation), + (r'=>', Operator), + (r'->', Operator), + (r'\d+\.\d+', Number.Float), + (r'\d+', Number.Integer), + (r'\w+', Name.Function), + (r'\s+', Text), + ], + 'string': [ + (r'\$[\{\(]', String.Interpol, 'interpol'), + (r'\\.', String.Escape), + (r'"', String, '#pop'), + (r'\n', String), + (r'.', String), + ], + 'interpol': [ + (r'\$[\{\(]', String.Interpol, '#push'), + (r'[\}\)]', String.Interpol, '#pop'), + (r'[^\$\{\(\)\}]+', String.Interpol), + ], + 'arglist': [ + (r'\)', Punctuation, '#pop'), + (r',', Punctuation), + (r'\w+', Name.Variable), + (r'\s+', Text), + ], + } diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index dc7a5ae2..bfbc860e 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -15,8 +15,8 @@ from pygments.lexer import RegexLexer, bygroups, words, include from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation, Literal -__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', 'MscgenLexer', - 'VGLLexer', 'AlloyLexer'] +__all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', + 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer'] class ProtoBufLexer(RegexLexer): @@ -439,3 +439,68 @@ class AlloyLexer(RegexLexer): (r'\n', Text), ] } + + +class PanLexer(RegexLexer): + """ + Lexer for `pan `_ source files. + + Based on tcsh lexer. + + .. versionadded:: 2.0 + """ + + name = 'Pan' + aliases = ['pan'] + filenames = ['*.pan'] + + tokens = { + 'root': [ + include('basic'), + (r'\(', Keyword, 'paren'), + (r'{', Keyword, 'curly'), + include('data'), + ], + 'basic': [ + (words(( + 'if', 'for', 'with', 'else', 'type', 'bind', 'while', 'valid', 'final', 'prefix', + 'unique', 'object', 'foreach', 'include', 'template', 'function', 'variable', + 'structure', 'extensible', 'declaration'), prefix=r'\b', suffix=r'\s*\b'), + Keyword), + (words(( + 'file_contents', 'format', 'index', 'length', 'match', 'matches', 'replace', + 'splice', 'split', 'substr', 'to_lowercase', 'to_uppercase', 'debug', 'error', + 'traceback', 'deprecated', 'base64_decode', 'base64_encode', 'digest', 'escape', + 'unescape', 'append', 'create', 'first', 'nlist', 'key', 'length', 'list', 'merge', 'next', + 'prepend', 'splice', 'is_boolean', 'is_defined', 'is_double', 'is_list', 'is_long', + 'is_nlist', 'is_null', 'is_number', 'is_property', 'is_resource', 'is_string', + 'to_boolean', 'to_double', 'to_long', 'to_string', 'clone', 'delete', 'exists', + 'path_exists', 'if_exists', 'return', 'value'), prefix=r'\b', suffix=r'\s*\b'), + Name.Builtin), + (r'#.*', Comment), + (r'\\[\w\W]', String.Escape), + (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), + (r'[\[\]{}()=]+', Operator), + (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), + (r';', Punctuation), + ], + 'data': [ + (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), + (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), + (r'\s+', Text), + (r'[^=\s\[\]{}()$"\'`\\;#]+', Text), + (r'\d+(?= |\Z)', Number), + ], + 'curly': [ + (r'}', Keyword, '#pop'), + (r':-', Keyword), + (r'\w+', Name.Variable), + (r'[^}:"\'`$]+', Punctuation), + (r':', Punctuation), + include('root'), + ], + 'paren': [ + (r'\)', Keyword, '#pop'), + include('root'), + ], + } diff --git a/pygments/lexers/misc/ambient.py b/pygments/lexers/misc/ambient.py new file mode 100644 index 00000000..3dcd4da2 --- /dev/null +++ b/pygments/lexers/misc/ambient.py @@ -0,0 +1,76 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.ambient + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for AmbientTalk language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['AmbientTalkLexer'] + + +class AmbientTalkLexer(RegexLexer): + """ + Lexer for `AmbientTalk `_ source code. + + .. versionadded:: 2.0 + """ + name = 'AmbientTalk' + filenames = ['*.at'] + aliases = ['at', 'ambienttalk', 'ambienttalk/2'] + mimetypes = ['text/x-ambienttalk'] + + flags = re.MULTILINE | re.DOTALL + + builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:', + 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:', + 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:', + 'mirroredBy:', 'is:')) + tokens = { + 'root': [ + (r'\s+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'(def|deftype|import|alias|exclude)\b', Keyword), + (builtin, Name.Builtin), + (r'(true|false|nil)\b', Keyword.Constant), + (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'), + (r'"(\\\\|\\"|[^"])*"', String), + (r'\|', Punctuation, 'arglist'), + (r'<:|[\^\*!%&<>+=,./?-]|:=', Operator), + (r"`[a-zA-Z_]\w*", String.Symbol), + (r"[a-zA-Z_]\w*:", Name.Function), + (r"[\{\}()\[\];`]", Punctuation), + (r'(self|super)\b', Name.Variable.Instance), + (r"[a-zA-Z_]\w*", Name.Variable), + (r"@[a-zA-Z_]\w*", Name.Class), + (r"@\[", Name.Class, 'annotations'), + include('numbers'), + ], + 'numbers': [ + (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), + (r'\d+', Number.Integer) + ], + 'namespace': [ + (r'[a-zA-Z_]\w*\.', Name.Namespace), + (r'[a-zA-Z_]\w*:', Name.Function, '#pop'), + (r'[a-zA-Z_]\w*(?!\.)', Name.Function, '#pop') + ], + 'annotations': [ + (r"(.*?)\]", Name.Class, '#pop') + ], + 'arglist': [ + (r'\|', Punctuation, '#pop'), + (r'\s*(,)\s*', Punctuation), + (r'[a-zA-Z_]\w*', Name.Variable), + ], + } diff --git a/pygments/lexers/misc/ecl.py b/pygments/lexers/misc/ecl.py new file mode 100644 index 00000000..1dd8999b --- /dev/null +++ b/pygments/lexers/misc/ecl.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.ecl + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the ECL language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['ECLLexer'] + + +class ECLLexer(RegexLexer): + """ + Lexer for the declarative big-data `ECL + `_ + language. + + .. versionadded:: 1.5 + """ + + name = 'ECL' + aliases = ['ecl'] + filenames = ['*.ecl'] + mimetypes = ['application/x-ecl'] + + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'root': [ + include('whitespace'), + include('statements'), + ], + 'whitespace': [ + (r'\s+', Text), + (r'\/\/.*', Comment.Single), + (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), + ], + 'statements': [ + include('types'), + include('keywords'), + include('functions'), + include('hash'), + (r'"', String, 'string'), + (r'\'', String, 'string'), + (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float), + (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), + (r'0x[0-9a-f]+[lu]*', Number.Hex), + (r'0[0-7]+[lu]*', Number.Oct), + (r'\d+[LlUu]*', Number.Integer), + (r'\*/', Error), + (r'[~!%^&*+=|?:<>/-]+', Operator), + (r'[{}()\[\],.;]', Punctuation), + (r'[a-z_]\w*', Name), + ], + 'hash': [ + (r'^#.*$', Comment.Preproc), + ], + 'types': [ + (r'(RECORD|END)\D', Keyword.Declaration), + (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|' + r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|' + r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)', + bygroups(Keyword.Type, Text)), + ], + 'keywords': [ + (words(( + 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL', + 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT', + 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED', + 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT', + 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS', + 'WAIT', 'WHEN'), suffix=r'\b'), + Keyword.Reserved), + # These are classed differently, check later + (words(( + 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE', 'CONST', + 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT', 'EXCLUSIVE', + 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL', 'FUNCTION', 'GROUP', + 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED', 'KEEP', 'KEYED', 'LAST', + 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO', 'MANY', 'MAXCOUNT', + 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE', 'NOROOT', 'NOSCAN', + 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE', 'PACKED', 'PARTITION', + 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP', 'REPEAT', 'RETURN', + 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW', 'SKIP', 'SQL', 'STORE', + 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM', 'TRUE', 'TYPE', + 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD', 'WITHIN', 'XML', + 'XPATH', '__COMPRESSED__'), suffix=r'\b'), + Keyword.Reserved), + ], + 'functions': [ + (words(( + 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE', + 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS', + 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE', + 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE', + 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE', + 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32', + 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID', + 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP', + 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE', + 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE', + 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED', + 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED', + 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF', + 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH', + 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP', + 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE', + 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'), + Name.Function), + ], + 'string': [ + (r'"', String, '#pop'), + (r'\'', String, '#pop'), + (r'[^"\']+', String), + ], + } diff --git a/pygments/lexers/misc/urbi.py b/pygments/lexers/misc/urbi.py new file mode 100644 index 00000000..ad84d1db --- /dev/null +++ b/pygments/lexers/misc/urbi.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.urbi + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for UrbiScript language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import ExtendedRegexLexer, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['UrbiscriptLexer'] + + +class UrbiscriptLexer(ExtendedRegexLexer): + """ + For UrbiScript source code. + + .. versionadded:: 1.5 + """ + + name = 'UrbiScript' + aliases = ['urbiscript'] + filenames = ['*.u'] + mimetypes = ['application/x-urbiscript'] + + flags = re.DOTALL + + # TODO + # - handle Experimental and deprecated tags with specific tokens + # - handle Angles and Durations with specific tokens + + def blob_callback(lexer, match, ctx): + text_before_blob = match.group(1) + blob_start = match.group(2) + blob_size_str = match.group(3) + blob_size = int(blob_size_str) + yield match.start(), String, text_before_blob + ctx.pos += len(text_before_blob) + + # if blob size doesn't match blob format (example : "\B(2)(aaa)") + # yield blob as a string + if ctx.text[match.end() + blob_size] != ")": + result = "\\B(" + blob_size_str + ")(" + yield match.start(), String, result + ctx.pos += len(result) + return + + # if blob is well formated, yield as Escape + blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")" + yield match.start(), String.Escape, blob_text + ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")" + + tokens = { + 'root': [ + (r'\s+', Text), + # comments + (r'//.*?\n', Comment), + (r'/\*', Comment.Multiline, 'comment'), + (r'(every|for|loop|while)(?:;|&|\||,)', Keyword), + (words(( + 'assert', 'at', 'break', 'case', 'catch', 'closure', 'compl', + 'continue', 'default', 'else', 'enum', 'every', 'external', + 'finally', 'for', 'freezeif', 'if', 'new', 'onleave', 'return', + 'stopif', 'switch', 'this', 'throw', 'timeout', 'try', + 'waituntil', 'whenever', 'while'), suffix=r'\b'), + Keyword), + (words(( + 'asm', 'auto', 'bool', 'char', 'const_cast', 'delete', 'double', + 'dynamic_cast', 'explicit', 'export', 'extern', 'float', 'friend', + 'goto', 'inline', 'int', 'long', 'mutable', 'namespace', 'register', + 'reinterpret_cast', 'short', 'signed', 'sizeof', 'static_cast', + 'struct', 'template', 'typedef', 'typeid', 'typename', 'union', + 'unsigned', 'using', 'virtual', 'volatile', 'wchar_t'), suffix=r'\b'), + Keyword.Reserved), + # deprecated keywords, use a meaningfull token when available + (r'(emit|foreach|internal|loopn|static)\b', Keyword), + # ignored keywords, use a meaningfull token when available + (r'(private|protected|public)\b', Keyword), + (r'(var|do|const|function|class)\b', Keyword.Declaration), + (r'(true|false|nil|void)\b', Keyword.Constant), + (words(( + 'Barrier', 'Binary', 'Boolean', 'CallMessage', 'Channel', 'Code', + 'Comparable', 'Container', 'Control', 'Date', 'Dictionary', 'Directory', + 'Duration', 'Enumeration', 'Event', 'Exception', 'Executable', 'File', + 'Finalizable', 'Float', 'FormatInfo', 'Formatter', 'Global', 'Group', + 'Hash', 'InputStream', 'IoService', 'Job', 'Kernel', 'Lazy', 'List', + 'Loadable', 'Lobby', 'Location', 'Logger', 'Math', 'Mutex', 'nil', + 'Object', 'Orderable', 'OutputStream', 'Pair', 'Path', 'Pattern', + 'Position', 'Primitive', 'Process', 'Profile', 'PseudoLazy', 'PubSub', + 'RangeIterable', 'Regexp', 'Semaphore', 'Server', 'Singleton', 'Socket', + 'StackFrame', 'Stream', 'String', 'System', 'Tag', 'Timeout', + 'Traceable', 'TrajectoryGenerator', 'Triplet', 'Tuple', 'UObject', + 'UValue', 'UVar'), suffix=r'\b'), + Name.Builtin), + (r'(?:this)\b', Name.Builtin.Pseudo), + # don't match single | and & + (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator), + (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b', + Operator.Word), + (r'[{}\[\]()]+', Punctuation), + (r'(?:;|\||,|&|\?|!)+', Punctuation), + (r'[$a-zA-Z_]\w*', Name.Other), + (r'0x[0-9a-fA-F]+', Number.Hex), + # Float, Integer, Angle and Duration + (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?' + r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float), + # handle binary blob in strings + (r'"', String.Double, "string.double"), + (r"'", String.Single, "string.single"), + ], + 'string.double': [ + (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback), + (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'), + ], + 'string.single': [ + (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback), + (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'), + ], + # from http://pygments.org/docs/lexerdevelopment/#changing-states + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline), + ] + } diff --git a/pygments/lexers/other.py b/pygments/lexers/other.py index daf204e5..24b1a847 100644 --- a/pygments/lexers/other.py +++ b/pygments/lexers/other.py @@ -3,19 +3,12 @@ pygments.lexers.other ~~~~~~~~~~~~~~~~~~~~~ - Lexers for other languages. + Just export lexer classes previously contained in this module. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import re -from pygments.lexer import ExtendedRegexLexer, RegexLexer, include, bygroups, \ - default, using, this, combined, words -from pygments.token import Generic, Comment, String, Text, Number, Keyword, Name, \ - Error, Operator, Punctuation, Literal, Whitespace - -# backwards compatibility from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \ TcshLexer @@ -25,9 +18,10 @@ from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer from pygments.lexers.prolog import LogtalkLexer from pygments.lexers.misc.snobol import SnobolLexer from pygments.lexers.misc.rebol import RebolLexer -from pygments.lexers.configs import KconfigLexer +from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer from pygments.lexers.modeling import ModelicaLexer -from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer +from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \ + HybrisLexer from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \ AsymptoteLexer, PovrayLexer from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \ @@ -37,539 +31,10 @@ from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \ MscgenLexer, VGLLexer from pygments.lexers.misc.basic import CbmBasicV2Lexer from pygments.lexers.misc.pawn import SourcePawnLexer, PawnLexer -from pygments.lexers.installers import NSISLexer, RPMSpecLexer +from pygments.lexers.misc.ecl import ECLLexer +from pygments.lexers.misc.urbi import UrbiscriptLexer from pygments.lexers.misc.smalltalk import SmalltalkLexer, NewspeakLexer +from pygments.lexers.installers import NSISLexer, RPMSpecLexer +from pygments.lexers.textedit import AwkLexer -__all__ = ['HybrisLexer', 'AwkLexer', 'Cfengine3Lexer', 'ECLLexer', - 'UrbiscriptLexer', 'AmbientTalkLexer', 'PanLexer'] - - -class ECLLexer(RegexLexer): - """ - Lexer for the declarative big-data `ECL - `_ - language. - - .. versionadded:: 1.5 - """ - - name = 'ECL' - aliases = ['ecl'] - filenames = ['*.ecl'] - mimetypes = ['application/x-ecl'] - - flags = re.IGNORECASE | re.MULTILINE - - tokens = { - 'root': [ - include('whitespace'), - include('statements'), - ], - 'whitespace': [ - (r'\s+', Text), - (r'\/\/.*', Comment.Single), - (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline), - ], - 'statements': [ - include('types'), - include('keywords'), - include('functions'), - include('hash'), - (r'"', String, 'string'), - (r'\'', String, 'string'), - (r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float), - (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), - (r'0x[0-9a-f]+[lu]*', Number.Hex), - (r'0[0-7]+[lu]*', Number.Oct), - (r'\d+[LlUu]*', Number.Integer), - (r'\*/', Error), - (r'[~!%^&*+=|?:<>/-]+', Operator), - (r'[{}()\[\],.;]', Punctuation), - (r'[a-z_]\w*', Name), - ], - 'hash': [ - (r'^#.*$', Comment.Preproc), - ], - 'types': [ - (r'(RECORD|END)\D', Keyword.Declaration), - (r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|' - r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|' - r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)', - bygroups(Keyword.Type, Text)), - ], - 'keywords': [ - (words(( - 'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL', - 'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT', - 'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED', - 'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT', - 'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS', - 'WAIT', 'WHEN'), suffix=r'\b'), - Keyword.Reserved), - # These are classed differently, check later - (words(( - 'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE', 'CONST', - 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT', 'EXCLUSIVE', - 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL', 'FUNCTION', 'GROUP', - 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED', 'KEEP', 'KEYED', 'LAST', - 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO', 'MANY', 'MAXCOUNT', - 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE', 'NOROOT', 'NOSCAN', - 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE', 'PACKED', 'PARTITION', - 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP', 'REPEAT', 'RETURN', - 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW', 'SKIP', 'SQL', 'STORE', - 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM', 'TRUE', 'TYPE', - 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD', 'WITHIN', 'XML', - 'XPATH', '__COMPRESSED__'), suffix=r'\b'), - Keyword.Reserved), - ], - 'functions': [ - (words(( - 'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE', - 'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS', - 'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE', - 'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE', - 'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE', - 'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32', - 'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID', - 'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP', - 'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE', - 'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE', - 'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED', - 'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED', - 'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF', - 'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH', - 'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP', - 'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE', - 'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'), - Name.Function), - ], - 'string': [ - (r'"', String, '#pop'), - (r'\'', String, '#pop'), - (r'[^"\']+', String), - ], - } - - -class HybrisLexer(RegexLexer): - """ - For `Hybris `_ source code. - - .. versionadded:: 1.4 - """ - - name = 'Hybris' - aliases = ['hybris', 'hy'] - filenames = ['*.hy', '*.hyb'] - mimetypes = ['text/x-hybris', 'application/x-hybris'] - - flags = re.MULTILINE | re.DOTALL - - tokens = { - 'root': [ - # method names - (r'^(\s*(?:function|method|operator\s+)+?)' - r'([a-zA-Z_]\w*)' - r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)), - (r'[^\S\n]+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - (r'@[a-zA-Z_][\w\.]*', Name.Decorator), - (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|' - r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword), - (r'(extends|private|protected|public|static|throws|function|method|' - r'operator)\b', Keyword.Declaration), - (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|' - r'__INC_PATH__)\b', Keyword.Constant), - (r'(class|struct)(\s+)', - bygroups(Keyword.Declaration, Text), 'class'), - (r'(import|include)(\s+)', - bygroups(Keyword.Namespace, Text), 'import'), - (words(( - 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold', - 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2', - 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp', 'fabs', 'floor', - 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', - 'isstring', 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring', - 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names', 'var_values', - 'user_functions', 'dyn_functions', 'methods', 'call', 'call_method', 'mknod', - 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep', 'sleep', 'time', 'strtime', - 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', - 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create', - 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill', - 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen', - 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv', - 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open', - 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed', - 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', - 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell', - 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size', - 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split', 'trim', - 'remove', 'contains', 'join'), suffix=r'\b'), - Name.Builtin), - (words(( - 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process', - 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket', - 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'), - Keyword.Type), - (r'"(\\\\|\\"|[^"])*"', String), - (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), - (r'(\.)([a-zA-Z_]\w*)', - bygroups(Operator, Name.Attribute)), - (r'[a-zA-Z_]\w*:', Name.Label), - (r'[a-zA-Z_\$]\w*', Name), - (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator), - (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'0x[0-9a-f]+', Number.Hex), - (r'[0-9]+L?', Number.Integer), - (r'\n', Text), - ], - 'class': [ - (r'[a-zA-Z_]\w*', Name.Class, '#pop') - ], - 'import': [ - (r'[\w.]+\*?', Name.Namespace, '#pop') - ], - } - - -class AwkLexer(RegexLexer): - """ - For Awk scripts. - - .. versionadded:: 1.5 - """ - - name = 'Awk' - aliases = ['awk', 'gawk', 'mawk', 'nawk'] - filenames = ['*.awk'] - mimetypes = ['application/x-awk'] - - tokens = { - 'commentsandwhitespace': [ - (r'\s+', Text), - (r'#.*$', Comment.Single) - ], - 'slashstartsregex': [ - include('commentsandwhitespace'), - (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' - r'\B', String.Regex, '#pop'), - (r'(?=/)', Text, ('#pop', 'badregex')), - default('#pop') - ], - 'badregex': [ - (r'\n', Text, '#pop') - ], - 'root': [ - (r'^(?=\s|/)', Text, 'slashstartsregex'), - include('commentsandwhitespace'), - (r'\+\+|--|\|\||&&|in\b|\$|!?~|' - r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'), - (r'[{(\[;,]', Punctuation, 'slashstartsregex'), - (r'[})\].]', Punctuation), - (r'(break|continue|do|while|exit|for|if|else|' - r'return)\b', Keyword, 'slashstartsregex'), - (r'function\b', Keyword.Declaration, 'slashstartsregex'), - (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|' - r'length|match|split|sprintf|sub|substr|tolower|toupper|close|' - r'fflush|getline|next|nextfile|print|printf|strftime|systime|' - r'delete|system)\b', Keyword.Reserved), - (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|' - r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|' - r'SUBSEP)\b', Name.Builtin), - (r'[$a-zA-Z_]\w*', Name.Other), - (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), - (r'0x[0-9a-fA-F]+', Number.Hex), - (r'[0-9]+', Number.Integer), - (r'"(\\\\|\\"|[^"])*"', String.Double), - (r"'(\\\\|\\'|[^'])*'", String.Single), - ] - } - - -class Cfengine3Lexer(RegexLexer): - """ - Lexer for `CFEngine3 `_ policy files. - - .. versionadded:: 1.5 - """ - - name = 'CFEngine3' - aliases = ['cfengine3', 'cf3'] - filenames = ['*.cf'] - mimetypes = [] - - tokens = { - 'root': [ - (r'#.*?\n', Comment), - (r'(body)(\s+)(\S+)(\s+)(control)', - bygroups(Keyword, Text, Keyword, Text, Keyword)), - (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)(\()', - bygroups(Keyword, Text, Keyword, Text, Name.Function, Punctuation), - 'arglist'), - (r'(body|bundle)(\s+)(\S+)(\s+)(\w+)', - bygroups(Keyword, Text, Keyword, Text, Name.Function)), - (r'(")([^"]+)(")(\s+)(string|slist|int|real)(\s*)(=>)(\s*)', - bygroups(Punctuation,Name.Variable,Punctuation, - Text,Keyword.Type,Text,Operator,Text)), - (r'(\S+)(\s*)(=>)(\s*)', - bygroups(Keyword.Reserved,Text,Operator,Text)), - (r'"', String, 'string'), - (r'(\w+)(\()', bygroups(Name.Function, Punctuation)), - (r'([\w.!&|\(\)]+)(::)', bygroups(Name.Class, Punctuation)), - (r'(\w+)(:)', bygroups(Keyword.Declaration,Punctuation)), - (r'@[\{\(][^\)\}]+[\}\)]', Name.Variable), - (r'[(){},;]', Punctuation), - (r'=>', Operator), - (r'->', Operator), - (r'\d+\.\d+', Number.Float), - (r'\d+', Number.Integer), - (r'\w+', Name.Function), - (r'\s+', Text), - ], - 'string': [ - (r'\$[\{\(]', String.Interpol, 'interpol'), - (r'\\.', String.Escape), - (r'"', String, '#pop'), - (r'\n', String), - (r'.', String), - ], - 'interpol': [ - (r'\$[\{\(]', String.Interpol, '#push'), - (r'[\}\)]', String.Interpol, '#pop'), - (r'[^\$\{\(\)\}]+', String.Interpol), - ], - 'arglist': [ - (r'\)', Punctuation, '#pop'), - (r',', Punctuation), - (r'\w+', Name.Variable), - (r'\s+', Text), - ], - } - - -class UrbiscriptLexer(ExtendedRegexLexer): - """ - For UrbiScript source code. - - .. versionadded:: 1.5 - """ - - name = 'UrbiScript' - aliases = ['urbiscript'] - filenames = ['*.u'] - mimetypes = ['application/x-urbiscript'] - - flags = re.DOTALL - - ## TODO - # - handle Experimental and deprecated tags with specific tokens - # - handle Angles and Durations with specific tokens - - def blob_callback(lexer, match, ctx): - text_before_blob = match.group(1) - blob_start = match.group(2) - blob_size_str = match.group(3) - blob_size = int(blob_size_str) - yield match.start(), String, text_before_blob - ctx.pos += len(text_before_blob) - - # if blob size doesn't match blob format (example : "\B(2)(aaa)") - # yield blob as a string - if ctx.text[match.end() + blob_size] != ")": - result = "\\B(" + blob_size_str + ")(" - yield match.start(), String, result - ctx.pos += len(result) - return - - # if blob is well formated, yield as Escape - blob_text = blob_start + ctx.text[match.end():match.end()+blob_size] + ")" - yield match.start(), String.Escape, blob_text - ctx.pos = match.end() + blob_size + 1 # +1 is the ending ")" - - tokens = { - 'root': [ - (r'\s+', Text), - # comments - (r'//.*?\n', Comment), - (r'/\*', Comment.Multiline, 'comment'), - (r'(?:every|for|loop|while)(?:;|&|\||,)',Keyword), - (r'(?:assert|at|break|case|catch|closure|compl|continue|' - r'default|else|enum|every|external|finally|for|freezeif|if|new|' - r'onleave|return|stopif|switch|this|throw|timeout|try|' - r'waituntil|whenever|while)\b', Keyword), - (r'(?:asm|auto|bool|char|const_cast|delete|double|dynamic_cast|' - r'explicit|export|extern|float|friend|goto|inline|int|' - r'long|mutable|namespace|register|reinterpret_cast|short|' - r'signed|sizeof|static_cast|struct|template|typedef|typeid|' - r'typename|union|unsigned|using|virtual|volatile|' - r'wchar_t)\b', Keyword.Reserved), - # deprecated keywords, use a meaningfull token when available - (r'(?:emit|foreach|internal|loopn|static)\b', Keyword), - # ignored keywords, use a meaningfull token when available - (r'(?:private|protected|public)\b', Keyword), - (r'(?:var|do|const|function|class)\b', Keyword.Declaration), - (r'(?:true|false|nil|void)\b', Keyword.Constant), - (r'(?:Barrier|Binary|Boolean|CallMessage|Channel|Code|' - r'Comparable|Container|Control|Date|Dictionary|Directory|' - r'Duration|Enumeration|Event|Exception|Executable|File|Finalizable|' - r'Float|FormatInfo|Formatter|Global|Group|Hash|InputStream|' - r'IoService|Job|Kernel|Lazy|List|Loadable|Lobby|Location|Logger|Math|' - r'Mutex|nil|Object|Orderable|OutputStream|Pair|Path|Pattern|Position|' - r'Primitive|Process|Profile|PseudoLazy|PubSub|RangeIterable|Regexp|' - r'Semaphore|Server|Singleton|Socket|StackFrame|Stream|String|System|' - r'Tag|Timeout|Traceable|TrajectoryGenerator|Triplet|Tuple' - r'|UObject|UValue|UVar)\b', Name.Builtin), - (r'(?:this)\b', Name.Builtin.Pseudo), - # don't match single | and & - (r'(?:[-=+*%/<>~^:]+|\.&?|\|\||&&)', Operator), - (r'(?:and_eq|and|bitand|bitor|in|not|not_eq|or_eq|or|xor_eq|xor)\b', - Operator.Word), - (r'[{}\[\]()]+', Punctuation), - (r'(?:;|\||,|&|\?|!)+', Punctuation), - (r'[$a-zA-Z_]\w*', Name.Other), - (r'0x[0-9a-fA-F]+', Number.Hex), - # Float, Integer, Angle and Duration - (r'(?:[0-9]+(?:(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?)?' - r'((?:rad|deg|grad)|(?:ms|s|min|h|d))?)\b', Number.Float), - # handle binary blob in strings - (r'"', String.Double, "string.double"), - (r"'", String.Single, "string.single"), - ], - 'string.double': [ - (r'((?:\\\\|\\"|[^"])*?)(\\B\((\d+)\)\()', blob_callback), - (r'(\\\\|\\"|[^"])*?"', String.Double, '#pop'), - ], - 'string.single': [ - (r"((?:\\\\|\\'|[^'])*?)(\\B\((\d+)\)\()", blob_callback), - (r"(\\\\|\\'|[^'])*?'", String.Single, '#pop'), - ], - # from http://pygments.org/docs/lexerdevelopment/#changing-states - 'comment': [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline), - ] - } - - -class AmbientTalkLexer(RegexLexer): - """ - Lexer for `AmbientTalk `_ source code. - - .. versionadded:: 2.0 - """ - name = 'AmbientTalk' - filenames = ['*.at'] - aliases = ['at', 'ambienttalk', 'ambienttalk/2'] - mimetypes = ['text/x-ambienttalk'] - - flags = re.MULTILINE | re.DOTALL - - builtin = words(('if:', 'then:', 'else:', 'when:', 'whenever:', 'discovered:', - 'disconnected:', 'reconnected:', 'takenOffline:', 'becomes:', - 'export:', 'as:', 'object:', 'actor:', 'mirror:', 'taggedAs:', - 'mirroredBy:', 'is:')) - tokens = { - 'root' : [ - (r'\s+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*.*?\*/', Comment.Multiline), - (r'(def|deftype|import|alias|exclude)\b', Keyword), - (builtin, Name.Builtin), - (r'(true|false|nil)\b', Keyword.Constant), - (r'(~|lobby|jlobby|/)\.', Keyword.Constant, 'namespace'), - (r'"(\\\\|\\"|[^"])*"', String), - (r'\|', Punctuation, 'arglist'), - (r'<:|[\^\*!%&<>+=,./?-]|:=', Operator), - (r"`[a-zA-Z_]\w*", String.Symbol), - (r"[a-zA-Z_]\w*:", Name.Function), - (r"[\{\}()\[\];`]", Punctuation), - (r'(self|super)\b', Name.Variable.Instance), - (r"[a-zA-Z_]\w*", Name.Variable), - (r"@[a-zA-Z_]\w*", Name.Class), - (r"@\[", Name.Class, 'annotations'), - include('numbers'), - ], - 'numbers' : [ - (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), - (r'\d+', Number.Integer) - ], - 'namespace': [ - (r'[a-zA-Z_]\w*\.', Name.Namespace), - (r'[a-zA-Z_]\w*:', Name.Function , '#pop'), - (r'[a-zA-Z_]\w*(?!\.)', Name.Function , '#pop') - ], - 'annotations' : [ - (r"(.*?)\]", Name.Class, '#pop') - ], - 'arglist' : [ - (r'\|', Punctuation, '#pop'), - (r'\s*(,)\s*', Punctuation), - (r'[a-zA-Z_]\w*', Name.Variable), - ], - } - - -class PanLexer(RegexLexer): - """ - Lexer for `pan `_ source files. - - Based on tcsh lexer. - - .. versionadded:: 2.0 - """ - - name = 'Pan' - aliases = ['pan'] - filenames = ['*.pan'] - - tokens = { - 'root': [ - include('basic'), - (r'\(', Keyword, 'paren'), - (r'{', Keyword, 'curly'), - include('data'), - ], - 'basic': [ - (r'\b(if|for|with|else|type|bind|while|valid|final|prefix|unique|' - r'object|foreach|include|template|function|variable|structure|' - r'extensible|declaration)\s*\b', - Keyword), - (r'\b(file_contents|format|index|length|match|matches|replace|' - r'splice|split|substr|to_lowercase|to_uppercase|debug|error|' - r'traceback|deprecated|base64_decode|base64_encode|digest|escape|' - r'unescape|append|create|first|nlist|key|length|list|merge|next|' - r'prepend|splice|is_boolean|is_defined|is_double|is_list|is_long|' - r'is_nlist|is_null|is_number|is_property|is_resource|is_string|' - r'to_boolean|to_double|to_long|to_string|clone|delete|exists|' - r'path_exists|if_exists|return|value)\s*\b', - Name.Builtin), - (r'#.*', Comment), - (r'\\[\w\W]', String.Escape), - (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)), - (r'[\[\]{}()=]+', Operator), - (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String), - (r';', Punctuation), - ], - 'data': [ - (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double), - (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single), - (r'\s+', Text), - (r'[^=\s\[\]{}()$"\'`\\;#]+', Text), - (r'\d+(?= |\Z)', Number), - ], - 'curly': [ - (r'}', Keyword, '#pop'), - (r':-', Keyword), - (r'\w+', Name.Variable), - (r'[^}:"\'`$]+', Punctuation), - (r':', Punctuation), - include('root'), - ], - 'paren': [ - (r'\)', Keyword, '#pop'), - include('root'), - ], - } +__all__ = [] diff --git a/pygments/lexers/scripting.py b/pygments/lexers/scripting.py index f6a32bb3..a82f1343 100644 --- a/pygments/lexers/scripting.py +++ b/pygments/lexers/scripting.py @@ -18,7 +18,7 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ from pygments.util import get_bool_opt, get_list_opt, iteritems __all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer', - 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer'] + 'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer'] class LuaLexer(RegexLexer): @@ -838,3 +838,85 @@ class MOOCodeLexer(RegexLexer): (r'([a-zA-Z_0-9]+)', Text), ] } + + +class HybrisLexer(RegexLexer): + """ + For `Hybris `_ source code. + + .. versionadded:: 1.4 + """ + + name = 'Hybris' + aliases = ['hybris', 'hy'] + filenames = ['*.hy', '*.hyb'] + mimetypes = ['text/x-hybris', 'application/x-hybris'] + + flags = re.MULTILINE | re.DOTALL + + tokens = { + 'root': [ + # method names + (r'^(\s*(?:function|method|operator\s+)+?)' + r'([a-zA-Z_]\w*)' + r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)), + (r'[^\S\n]+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'@[a-zA-Z_][\w\.]*', Name.Decorator), + (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|' + r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword), + (r'(extends|private|protected|public|static|throws|function|method|' + r'operator)\b', Keyword.Declaration), + (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|' + r'__INC_PATH__)\b', Keyword.Constant), + (r'(class|struct)(\s+)', + bygroups(Keyword.Declaration, Text), 'class'), + (r'(import|include)(\s+)', + bygroups(Keyword.Namespace, Text), 'import'), + (words(( + 'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold', + 'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32', 'sha2', + 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'cosh', 'exp', 'fabs', 'floor', + 'fmod', 'log', 'log10', 'pow', 'sin', 'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', + 'isstring', 'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring', + 'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names', 'var_values', + 'user_functions', 'dyn_functions', 'methods', 'call', 'call_method', 'mknod', + 'mkfifo', 'mount', 'umount2', 'umount', 'ticks', 'usleep', 'sleep', 'time', 'strtime', + 'strdate', 'dllopen', 'dlllink', 'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', + 'fork', 'getpid', 'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create', + 'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill', + 'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind', 'listen', + 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect', 'server', 'recv', + 'send', 'close', 'print', 'println', 'printf', 'input', 'readline', 'serial_open', + 'serial_fcntl', 'serial_get_attr', 'serial_get_ispeed', 'serial_get_ospeed', + 'serial_set_attr', 'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', + 'serial_read', 'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell', + 'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir', 'pcre_replace', 'size', + 'pop', 'unmap', 'has', 'keys', 'values', 'length', 'find', 'substr', 'replace', 'split', 'trim', + 'remove', 'contains', 'join'), suffix=r'\b'), + Name.Builtin), + (words(( + 'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process', + 'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket', + 'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'), + Keyword.Type), + (r'"(\\\\|\\"|[^"])*"', String), + (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char), + (r'(\.)([a-zA-Z_]\w*)', + bygroups(Operator, Name.Attribute)), + (r'[a-zA-Z_]\w*:', Name.Label), + (r'[a-zA-Z_\$]\w*', Name), + (r'[~\^\*!%&\[\]\(\)\{\}<>\|+=:;,./?\-@]+', Operator), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-f]+', Number.Hex), + (r'[0-9]+L?', Number.Integer), + (r'\n', Text), + ], + 'class': [ + (r'[a-zA-Z_]\w*', Name.Class, '#pop') + ], + 'import': [ + (r'[\w.]+\*?', Name.Namespace, '#pop') + ], + } diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py new file mode 100644 index 00000000..66255fae --- /dev/null +++ b/pygments/lexers/textedit.py @@ -0,0 +1,70 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.textedit + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for languages related to text processing. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['AwkLexer'] + + +class AwkLexer(RegexLexer): + """ + For Awk scripts. + + .. versionadded:: 1.5 + """ + + name = 'Awk' + aliases = ['awk', 'gawk', 'mawk', 'nawk'] + filenames = ['*.awk'] + mimetypes = ['application/x-awk'] + + tokens = { + 'commentsandwhitespace': [ + (r'\s+', Text), + (r'#.*$', Comment.Single) + ], + 'slashstartsregex': [ + include('commentsandwhitespace'), + (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' + r'\B', String.Regex, '#pop'), + (r'(?=/)', Text, ('#pop', 'badregex')), + default('#pop') + ], + 'badregex': [ + (r'\n', Text, '#pop') + ], + 'root': [ + (r'^(?=\s|/)', Text, 'slashstartsregex'), + include('commentsandwhitespace'), + (r'\+\+|--|\|\||&&|in\b|\$|!?~|' + r'(\*\*|[-<>+*%\^/!=])=?', Operator, 'slashstartsregex'), + (r'[{(\[;,]', Punctuation, 'slashstartsregex'), + (r'[})\].]', Punctuation), + (r'(break|continue|do|while|exit|for|if|else|' + r'return)\b', Keyword, 'slashstartsregex'), + (r'function\b', Keyword.Declaration, 'slashstartsregex'), + (r'(atan2|cos|exp|int|log|rand|sin|sqrt|srand|gensub|gsub|index|' + r'length|match|split|sprintf|sub|substr|tolower|toupper|close|' + r'fflush|getline|next|nextfile|print|printf|strftime|systime|' + r'delete|system)\b', Keyword.Reserved), + (r'(ARGC|ARGIND|ARGV|CONVFMT|ENVIRON|ERRNO|FIELDWIDTHS|FILENAME|FNR|FS|' + r'IGNORECASE|NF|NR|OFMT|OFS|ORFS|RLENGTH|RS|RSTART|RT|' + r'SUBSEP)\b', Name.Builtin), + (r'[$a-zA-Z_]\w*', Name.Other), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-fA-F]+', Number.Hex), + (r'[0-9]+', Number.Integer), + (r'"(\\\\|\\"|[^"])*"', String.Double), + (r"'(\\\\|\\'|[^'])*'", String.Single), + ] + } -- cgit v1.2.1 From 03a434d757af1e952d8281da3d1f08139b1781ae Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 21:28:13 +0200 Subject: Fix import. --- tests/test_lexers_other.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_lexers_other.py b/tests/test_lexers_other.py index 91b0dc70..5e1ee098 100644 --- a/tests/test_lexers_other.py +++ b/tests/test_lexers_other.py @@ -12,7 +12,7 @@ import os import unittest from pygments.lexers import guess_lexer -from pygments.lexers.other import RexxLexer +from pygments.lexers.scripting import RexxLexer def _exampleFilePath(filename): -- cgit v1.2.1 From 6688456dd0d2e4e877eaf17fa604df3a9aca689a Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 22:04:29 +0200 Subject: split up functional lexers --- pygments/lexers/_mapping.py | 46 +- pygments/lexers/functional.py | 3679 +--------------------------------------- pygments/lexers/haskell.py | 837 +++++++++ pygments/lexers/lisp.py | 1355 ++++++++++++++- pygments/lexers/misc/basic.py | 145 +- pygments/lexers/misc/erlang.py | 508 ++++++ pygments/lexers/misc/ml.py | 768 +++++++++ pygments/lexers/misc/nix.py | 140 ++ pygments/lexers/qbasic.py | 157 -- pygments/lexers/theorem.py | 156 ++ 10 files changed, 3937 insertions(+), 3854 deletions(-) create mode 100644 pygments/lexers/haskell.py create mode 100644 pygments/lexers/misc/erlang.py create mode 100644 pygments/lexers/misc/ml.py create mode 100644 pygments/lexers/misc/nix.py delete mode 100644 pygments/lexers/qbasic.py create mode 100644 pygments/lexers/theorem.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 0979b68a..928f8a3f 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -21,7 +21,7 @@ LEXERS = { 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), - 'AgdaLexer': ('pygments.lexers.functional', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), + 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), 'AmbientTalkLexer': ('pygments.lexers.misc.ambient', 'AmbientTalk', ('at', 'ambienttalk', 'ambienttalk/2'), ('*.at',), ('text/x-ambienttalk',)), 'AntlrActionScriptLexer': ('pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-as', 'antlr-actionscript'), ('*.G', '*.g'), ()), @@ -77,12 +77,12 @@ LEXERS = { 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), - 'CommonLispLexer': ('pygments.lexers.functional', 'Common Lisp', ('common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)), - 'CoqLexer': ('pygments.lexers.functional', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), + 'CommonLispLexer': ('pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'), ('*.cl', '*.lisp', '*.el'), ('text/x-common-lisp',)), + 'CoqLexer': ('pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), 'CppLexer': ('pygments.lexers.c_like.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), 'CppObjdumpLexer': ('pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), 'CrocLexer': ('pygments.lexers.c_like.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), - 'CryptolLexer': ('pygments.lexers.functional', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), + 'CryptolLexer': ('pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)), 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), @@ -111,11 +111,11 @@ LEXERS = { 'ECLexer': ('pygments.lexers.c_like.other', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), - 'ElixirConsoleLexer': ('pygments.lexers.functional', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), - 'ElixirLexer': ('pygments.lexers.functional', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), + 'ElixirConsoleLexer': ('pygments.lexers.misc.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), + 'ElixirLexer': ('pygments.lexers.misc.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), 'ErbLexer': ('pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), - 'ErlangLexer': ('pygments.lexers.functional', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), - 'ErlangShellLexer': ('pygments.lexers.functional', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), + 'ErlangLexer': ('pygments.lexers.misc.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), + 'ErlangShellLexer': ('pygments.lexers.misc.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), 'EvoqueHtmlLexer': ('pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), 'EvoqueLexer': ('pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), 'EvoqueXmlLexer': ('pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), @@ -144,7 +144,7 @@ LEXERS = { 'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), - 'HaskellLexer': ('pygments.lexers.functional', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), + 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), 'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), @@ -156,7 +156,7 @@ LEXERS = { 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), - 'IdrisLexer': ('pygments.lexers.functional', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), + 'IdrisLexer': ('pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)), 'IgorLexer': ('pygments.lexers.math', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)), 'Inform6Lexer': ('pygments.lexers.inform', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), 'Inform6TemplateLexer': ('pygments.lexers.inform', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), @@ -181,7 +181,7 @@ LEXERS = { 'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), 'KalLexer': ('pygments.lexers.web', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), - 'KokaLexer': ('pygments.lexers.functional', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), + 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), 'LSLLexer': ('pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), @@ -192,10 +192,10 @@ LEXERS = { 'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), - 'LiterateAgdaLexer': ('pygments.lexers.functional', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), - 'LiterateCryptolLexer': ('pygments.lexers.functional', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), - 'LiterateHaskellLexer': ('pygments.lexers.functional', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), - 'LiterateIdrisLexer': ('pygments.lexers.functional', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), + 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), + 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), + 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), + 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), @@ -235,21 +235,21 @@ LEXERS = { 'NasmObjdumpLexer': ('pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), 'NemerleLexer': ('pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), 'NesCLexer': ('pygments.lexers.c_like.other', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), - 'NewLispLexer': ('pygments.lexers.functional', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')), + 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')), 'NewspeakLexer': ('pygments.lexers.misc.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), 'NimrodLexer': ('pygments.lexers.misc.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), 'NitLexer': ('pygments.lexers.misc.nit', 'Nit', ('nit',), ('*.nit',), ()), - 'NixLexer': ('pygments.lexers.functional', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), + 'NixLexer': ('pygments.lexers.misc.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), 'NumPyLexer': ('pygments.lexers.math', 'NumPy', ('numpy',), (), ()), 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), 'ObjectiveCLexer': ('pygments.lexers.c_like.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), 'ObjectiveCppLexer': ('pygments.lexers.c_like.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), - 'OcamlLexer': ('pygments.lexers.functional', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), + 'OcamlLexer': ('pygments.lexers.misc.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)), 'OocLexer': ('pygments.lexers.misc.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), - 'OpaLexer': ('pygments.lexers.functional', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), + 'OpaLexer': ('pygments.lexers.misc.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)), 'OpenEdgeLexer': ('pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')), 'PanLexer': ('pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()), 'PawnLexer': ('pygments.lexers.misc.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), @@ -274,11 +274,11 @@ LEXERS = { 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), - 'QBasicLexer': ('pygments.lexers.qbasic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), + 'QBasicLexer': ('pygments.lexers.misc.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)), 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), - 'RacketLexer': ('pygments.lexers.functional', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), + 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), 'RagelCLexer': ('pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()), 'RagelCppLexer': ('pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()), 'RagelDLexer': ('pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()), @@ -303,11 +303,11 @@ LEXERS = { 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('pygments.lexers.c_like.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), - 'SMLLexer': ('pygments.lexers.functional', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), + 'SMLLexer': ('pygments.lexers.misc.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), 'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), - 'SchemeLexer': ('pygments.lexers.functional', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), + 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), 'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), 'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), 'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)), diff --git a/pygments/lexers/functional.py b/pygments/lexers/functional.py index 01ba03c6..bb382d6c 100644 --- a/pygments/lexers/functional.py +++ b/pygments/lexers/functional.py @@ -3,3678 +3,19 @@ pygments.lexers.functional ~~~~~~~~~~~~~~~~~~~~~~~~~~ - Lexers for functional languages. + Just export lexer classes previously contained in this module. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import re +from pygments.lexers.lisp import SchemeLexer, CommonLispLexer, RacketLexer, \ + NewLispLexer +from pygments.lexers.haskell import HaskellLexer, LiterateHaskellLexer, \ + KokaLexer +from pygments.lexers.theorem import CoqLexer +from pygments.lexers.misc.erlang import ErlangLexer, ErlangShellLexer, \ + ElixirConsoleLexer, ElixirLexer +from pygments.lexers.misc.ml import SMLLexer, OcamlLexer, OpaLexer -from pygments.lexer import Lexer, RegexLexer, bygroups, include, do_insertions, default -from pygments.token import Text, Comment, Operator, Keyword, Name, \ - String, Number, Punctuation, Literal, Generic, Error -from pygments import unistring as uni - -__all__ = ['RacketLexer', 'SchemeLexer', 'CommonLispLexer', 'CryptolLexer', - 'HaskellLexer', 'AgdaLexer', 'LiterateCryptolLexer', - 'LiterateHaskellLexer', 'LiterateAgdaLexer', 'SMLLexer', - 'OcamlLexer', 'ErlangLexer', 'ErlangShellLexer', 'OpaLexer', - 'CoqLexer', 'NewLispLexer', 'NixLexer', 'ElixirLexer', - 'ElixirConsoleLexer', 'KokaLexer', 'IdrisLexer', - 'LiterateIdrisLexer'] - - -line_re = re.compile('.*?\n') - - -class RacketLexer(RegexLexer): - """ - Lexer for `Racket `_ source code (formerly - known as PLT Scheme). - - .. versionadded:: 1.6 - """ - - name = 'Racket' - aliases = ['racket', 'rkt'] - filenames = ['*.rkt', '*.rktd', '*.rktl'] - mimetypes = ['text/x-racket', 'application/x-racket'] - - # Generated by example.rkt - _keywords = [ - '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin', - '#%plain-app', '#%plain-lambda', '#%plain-module-begin', - '#%printing-module-begin', '#%provide', '#%require', - '#%stratified-body', '#%top', '#%top-interaction', - '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i', - '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract', - 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*', - 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin', - 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m', - 'case-lambda', 'class', 'class*', 'class-field-accessor', - 'class-field-mutator', 'class/c', 'class/derived', 'combine-in', - 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer', - 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted', - 'define', 'define-compound-unit', 'define-compound-unit/infer', - 'define-contract-struct', 'define-custom-hash-types', - 'define-custom-set-types', 'define-for-syntax', - 'define-local-member-name', 'define-logger', 'define-match-expander', - 'define-member-name', 'define-module-boundary-contract', - 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax', - 'define-serializable-class', 'define-serializable-class*', - 'define-signature', 'define-signature-form', 'define-struct', - 'define-struct/contract', 'define-struct/derived', 'define-syntax', - 'define-syntax-rule', 'define-syntaxes', 'define-unit', - 'define-unit-binding', 'define-unit-from-context', - 'define-unit/contract', 'define-unit/new-import-export', - 'define-unit/s', 'define-values', 'define-values-for-export', - 'define-values-for-syntax', 'define-values/invoke-unit', - 'define-values/invoke-unit/infer', 'define/augment', - 'define/augment-final', 'define/augride', 'define/contract', - 'define/final-prop', 'define/match', 'define/overment', - 'define/override', 'define/override-final', 'define/private', - 'define/public', 'define/public-final', 'define/pubment', - 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name', - 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except', - 'except-in', 'except-out', 'export', 'extends', 'failure-cont', - 'false', 'false/c', 'field', 'field-bound?', 'file', - 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and', - 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash', - 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists', - 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv', - 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv', - 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq', - 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax', - 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived', - 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list', - 'for/lists', 'for/mutable-set', 'for/mutable-seteq', - 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq', - 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set', - 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict', - 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field', - 'if', 'implies', 'import', 'include', 'include-at/relative-to', - 'include-at/relative-to/reader', 'include/reader', 'inherit', - 'inherit-field', 'inherit/inner', 'inherit/super', 'init', - 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect', - 'instantiate', 'interface', 'interface*', 'invoke-unit', - 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values', - 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec', - 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values', - 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug', - 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*', - 'match*/derived', 'match-define', 'match-define-values', - 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let', - 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec', - 'match/derived', 'match/values', 'member-name-key', 'method-contract?', - 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor', - 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in', - 'open', 'opt/c', 'or', 'overment', 'overment*', 'override', - 'override*', 'override-final', 'override-final*', 'parameterize', - 'parameterize*', 'parameterize-break', 'parametric->/c', 'place', - 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private', - 'private*', 'prompt-tag/c', 'protect-out', 'provide', - 'provide-signature-elements', 'provide/contract', 'public', 'public*', - 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote', - 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax', - 'quote-syntax/prune', 'recontract-out', 'recursive-contract', - 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out', - 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic', - 'send/apply', 'send/keyword-apply', 'set!', 'set!-values', - 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*', - 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c', - 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate', - 'super-make-object', 'super-new', 'syntax', 'syntax-case', - 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag', - 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->', - 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export', - 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax', - 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark', - 'with-contract', 'with-handlers', 'with-handlers*', 'with-method', - 'with-syntax', u'λ' - ] - - # Generated by example.rkt - _builtins = [ - '*', '+', '-', '/', '<', '', '>/c', - '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?', - 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c', - 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply', - 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least', - 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper', - 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv', - 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c', - 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and', - 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', - 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context', - 'blame-add-context', 'blame-add-missing-party', - 'blame-add-nth-arg-context', 'blame-add-or-context', - 'blame-add-range-context', 'blame-add-unknown-context', - 'blame-context', 'blame-contract', 'blame-fmt->-string', - 'blame-negative', 'blame-original?', 'blame-positive', - 'blame-replace-negative', 'blame-source', 'blame-swap', - 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?', - 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable', - 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread', - 'build-chaperone-contract-property', 'build-compound-type-name', - 'build-contract-property', 'build-flat-contract-property', - 'build-list', 'build-path', 'build-path/convention-type', - 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?', - 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes', - 'bytes->immutable-bytes', 'bytes->list', 'bytes->path', - 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale', - 'bytes->string/utf-8', 'bytes-append', 'bytes-append*', - 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end', - 'bytes-converter?', 'bytes-copy', 'bytes-copy!', - 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join', - 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref', - 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length', - 'bytes-utf-8-ref', 'bytes?', 'bytes?', 'caaaar', - 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', - 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', - 'call-in-nested-thread', 'call-with-atomic-output-file', - 'call-with-break-parameterization', - 'call-with-composable-continuation', 'call-with-continuation-barrier', - 'call-with-continuation-prompt', 'call-with-current-continuation', - 'call-with-default-reading-parameterization', - 'call-with-escape-continuation', 'call-with-exception-handler', - 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark', - 'call-with-input-bytes', 'call-with-input-file', - 'call-with-input-file*', 'call-with-input-string', - 'call-with-output-bytes', 'call-with-output-file', - 'call-with-output-file*', 'call-with-output-string', - 'call-with-parameterization', 'call-with-semaphore', - 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc', - 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', - 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', - 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put', - 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c', - 'channel?', 'chaperone-box', 'chaperone-channel', - 'chaperone-continuation-mark-key', 'chaperone-contract-property?', - 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash', - 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag', - 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector', - 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?', - 'char-ci<=?', 'char-ci=?', 'char-ci>?', - 'char-downcase', 'char-foldcase', 'char-general-category', - 'char-graphic?', 'char-iso-control?', 'char-lower-case?', - 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?', - 'char-title-case?', 'char-titlecase', 'char-upcase', - 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?', - 'char=?', 'char>?', 'char?', - 'check-duplicate-identifier', 'checked-procedure-check-and-extract', - 'choice-evt', 'class->interface', 'class-info', 'class?', - 'cleanse-path', 'close-input-port', 'close-output-port', - 'coerce-chaperone-contract', 'coerce-chaperone-contracts', - 'coerce-contract', 'coerce-contract/f', 'coerce-contracts', - 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage', - 'collection-file-path', 'collection-path', 'compile', - 'compile-allow-set!-undefined', 'compile-context-preservation-enabled', - 'compile-enforce-module-constants', 'compile-syntax', - 'compiled-expression?', 'compiled-module-expression?', - 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate', - 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c', - 'continuation-mark-key?', 'continuation-mark-set->context', - 'continuation-mark-set->list', 'continuation-mark-set->list*', - 'continuation-mark-set-first', 'continuation-mark-set?', - 'continuation-marks', 'continuation-prompt-available?', - 'continuation-prompt-tag?', 'continuation?', - 'contract-continuation-mark-key', 'contract-first-order', - 'contract-first-order-passes?', 'contract-name', 'contract-proc', - 'contract-projection', 'contract-property?', - 'contract-random-generate', 'contract-stronger?', - 'contract-struct-exercise', 'contract-struct-generate', - 'contract-val-first-projection', 'contract?', 'convert-stream', - 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh', - 'count', 'current-blame-format', 'current-break-parameterization', - 'current-code-inspector', 'current-command-line-arguments', - 'current-compile', 'current-compiled-file-roots', - 'current-continuation-marks', 'current-contract-region', - 'current-custodian', 'current-directory', 'current-directory-for-user', - 'current-drive', 'current-environment-variables', 'current-error-port', - 'current-eval', 'current-evt-pseudo-random-generator', - 'current-future', 'current-gc-milliseconds', - 'current-get-interaction-input-port', 'current-inexact-milliseconds', - 'current-input-port', 'current-inspector', - 'current-library-collection-links', 'current-library-collection-paths', - 'current-load', 'current-load-extension', - 'current-load-relative-directory', 'current-load/use-compiled', - 'current-locale', 'current-logger', 'current-memory-use', - 'current-milliseconds', 'current-module-declare-name', - 'current-module-declare-source', 'current-module-name-resolver', - 'current-module-path-for-load', 'current-namespace', - 'current-output-port', 'current-parameterization', - 'current-preserved-thread-cell-values', 'current-print', - 'current-process-milliseconds', 'current-prompt-read', - 'current-pseudo-random-generator', 'current-read-interaction', - 'current-reader-guard', 'current-readtable', 'current-seconds', - 'current-security-guard', 'current-subprocess-custodian-mode', - 'current-thread', 'current-thread-group', - 'current-thread-initial-stack-size', - 'current-write-relative-directory', 'curry', 'curryr', - 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory', - 'custodian-managed-list', 'custodian-memory-accounting-available?', - 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?', - 'custom-print-quotable-accessor', 'custom-print-quotable?', - 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?', - 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?', - 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month', - 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year', - 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal', - 'default-continuation-prompt-tag', 'degrees->radians', - 'delete-directory', 'delete-directory/files', 'delete-file', - 'denominator', 'dict->list', 'dict-can-functional-set?', - 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy', - 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?', - 'dict-implements/c', 'dict-implements?', 'dict-iter-contract', - 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next', - 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map', - 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove', - 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!', - 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values', - 'dict?', 'directory-exists?', 'directory-list', 'display', - 'display-lines', 'display-lines-to-file', 'display-to-file', - 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf', - 'dropf-right', 'dump-memory-stats', 'dup-input-port', - 'dup-output-port', 'dynamic-get-field', 'dynamic-place', - 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax', - 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth', - 'empty', 'empty-sequence', 'empty-stream', 'empty?', - 'environment-variables-copy', 'environment-variables-names', - 'environment-variables-ref', 'environment-variables-set!', - 'environment-variables?', 'eof', 'eof-evt', 'eof-object?', - 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val', - 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val', - 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code', - 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error', - 'error-display-handler', 'error-escape-handler', - 'error-print-context-length', 'error-print-source-location', - 'error-print-width', 'error-value->string-handler', 'eval', - 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?', - 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?', - 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round', - 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit', - 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message', - 'exn:break', 'exn:break-continuation', 'exn:break:hang-up', - 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?', - 'exn:break?', 'exn:fail', 'exn:fail:contract', - 'exn:fail:contract:arity', 'exn:fail:contract:arity?', - 'exn:fail:contract:blame', 'exn:fail:contract:blame-object', - 'exn:fail:contract:blame?', 'exn:fail:contract:continuation', - 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero', - 'exn:fail:contract:divide-by-zero?', - 'exn:fail:contract:non-fixnum-result', - 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable', - 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?', - 'exn:fail:contract?', 'exn:fail:filesystem', - 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno', - 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists', - 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module', - 'exn:fail:filesystem:missing-module-path', - 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version', - 'exn:fail:filesystem:version?', 'exn:fail:filesystem?', - 'exn:fail:network', 'exn:fail:network:errno', - 'exn:fail:network:errno-errno', 'exn:fail:network:errno?', - 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?', - 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read', - 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?', - 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?', - 'exn:fail:syntax', 'exn:fail:syntax-exprs', - 'exn:fail:syntax:missing-module', - 'exn:fail:syntax:missing-module-path', - 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound', - 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported', - 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?', - 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor', - 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?', - 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once', - 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path', - 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names', - 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines', - 'file->list', 'file->string', 'file->value', 'file-exists?', - 'file-name-from-path', 'file-or-directory-identity', - 'file-or-directory-modify-seconds', 'file-or-directory-permissions', - 'file-position', 'file-position*', 'file-size', - 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate', - 'filename-extension', 'filesystem-change-evt', - 'filesystem-change-evt-cancel', 'filesystem-change-evt?', - 'filesystem-root-list', 'filter', 'filter-map', 'filter-not', - 'filter-read-input-port', 'find-executable-path', 'find-files', - 'find-library-collection-links', 'find-library-collection-paths', - 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?', - 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?', - 'flat-contract?', 'flat-named-contract', 'flatten', - 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output', - 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format', - 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?', - 'free-template-identifier=?', 'free-transformer-identifier=?', - 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?', - 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?', - 'futures-enabled?', 'gcd', 'generate-member-key', - 'generate-temporaries', 'generic-set?', 'generic?', 'gensym', - 'get-output-bytes', 'get-output-string', 'get-preference', - 'get/build-val-first-projection', 'getenv', - 'global-port-print-handler', 'group-execute-bit', 'group-read-bit', - 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?', - 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!', - 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?', - 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each', - 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key', - 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map', - 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove', - 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!', - 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c', - 'hash?', 'hasheq', 'hasheqv', 'identifier-binding', - 'identifier-binding-symbol', 'identifier-label-binding', - 'identifier-prune-lexical-context', - 'identifier-prune-to-source-module', - 'identifier-remove-from-definition-context', - 'identifier-template-binding', 'identifier-transformer-binding', - 'identifier?', 'identity', 'imag-part', 'immutable?', - 'impersonate-box', 'impersonate-channel', - 'impersonate-continuation-mark-key', 'impersonate-hash', - 'impersonate-procedure', 'impersonate-prompt-tag', - 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?', - 'impersonator-ephemeron', 'impersonator-of?', - 'impersonator-prop:application-mark', 'impersonator-prop:contracted', - 'impersonator-property-accessor-procedure?', 'impersonator-property?', - 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes', - 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys', - 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash', - 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed', - 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list', - 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port', - 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream', - 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence', - 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?', - 'infinite?', 'input-port-append', 'input-port?', 'inspector?', - 'instanceof/c', 'integer->char', 'integer->integer-bytes', - 'integer-bytes->integer', 'integer-in', 'integer-length', - 'integer-sqrt', 'integer-sqrt/remainder', 'integer?', - 'interface->method-names', 'interface-extension?', 'interface?', - 'internal-definition-context-seal', 'internal-definition-context?', - 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keywordbytes', 'list->mutable-set', 'list->mutable-seteq', - 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv', - 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq', - 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?', - 'listof', 'load', 'load-extension', 'load-on-demand-enabled', - 'load-relative', 'load-relative-extension', 'load/cd', - 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts', - 'local-transformer-expand', 'local-transformer-expand/capture-lifts', - 'locale-string-encoding', 'log', 'log-level?', 'log-max-level', - 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude', - 'make-arity-at-least', 'make-base-empty-namespace', - 'make-base-namespace', 'make-bytes', 'make-channel', - 'make-chaperone-contract', 'make-continuation-mark-key', - 'make-continuation-prompt-tag', 'make-contract', 'make-custodian', - 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types', - 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*', - 'make-derived-parameter', 'make-directory', 'make-directory*', - 'make-do-sequence', 'make-empty-namespace', - 'make-environment-variables', 'make-ephemeron', 'make-exn', - 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate', - 'make-exn:fail', 'make-exn:fail:contract', - 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame', - 'make-exn:fail:contract:continuation', - 'make-exn:fail:contract:divide-by-zero', - 'make-exn:fail:contract:non-fixnum-result', - 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem', - 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists', - 'make-exn:fail:filesystem:missing-module', - 'make-exn:fail:filesystem:version', 'make-exn:fail:network', - 'make-exn:fail:network:errno', 'make-exn:fail:object', - 'make-exn:fail:out-of-memory', 'make-exn:fail:read', - 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char', - 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module', - 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported', - 'make-exn:fail:user', 'make-file-or-directory-link', - 'make-flat-contract', 'make-fsemaphore', 'make-generic', - 'make-handle-get-preference-locked', 'make-hash', - 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder', - 'make-hasheqv', 'make-hasheqv-placeholder', - 'make-immutable-custom-hash', 'make-immutable-hash', - 'make-immutable-hasheq', 'make-immutable-hasheqv', - 'make-impersonator-property', 'make-input-port', - 'make-input-port/read-to-peek', 'make-inspector', - 'make-keyword-procedure', 'make-known-char-range-list', - 'make-limited-input-port', 'make-list', 'make-lock-file-name', - 'make-log-receiver', 'make-logger', 'make-mixin-contract', - 'make-mutable-custom-set', 'make-none/c', 'make-object', - 'make-output-port', 'make-parameter', 'make-phantom-bytes', - 'make-pipe', 'make-pipe-with-specials', 'make-placeholder', - 'make-polar', 'make-prefab-struct', 'make-primitive-class', - 'make-proj-contract', 'make-pseudo-random-generator', - 'make-reader-graph', 'make-readtable', 'make-rectangular', - 'make-rename-transformer', 'make-resolved-module-path', - 'make-security-guard', 'make-semaphore', 'make-set!-transformer', - 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment', - 'make-srcloc', 'make-string', 'make-struct-field-accessor', - 'make-struct-field-mutator', 'make-struct-type', - 'make-struct-type-property', 'make-syntax-delta-introducer', - 'make-syntax-introducer', 'make-temporary-file', - 'make-tentative-pretty-print-output-port', 'make-thread-cell', - 'make-thread-group', 'make-vector', 'make-weak-box', - 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash', - 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map', - 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', - 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?', - 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input', - 'method-in-interface?', 'min', 'mixin-contract', 'module->exports', - 'module->imports', 'module->language-info', 'module->namespace', - 'module-compiled-cross-phase-persistent?', 'module-compiled-exports', - 'module-compiled-imports', 'module-compiled-language-info', - 'module-compiled-name', 'module-compiled-submodules', - 'module-declared?', 'module-path-index-join', - 'module-path-index-resolve', 'module-path-index-split', - 'module-path-index-submodule', 'module-path-index?', 'module-path?', - 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?', - 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th', - 'nack-guard-evt', 'namespace-anchor->empty-namespace', - 'namespace-anchor->namespace', 'namespace-anchor?', - 'namespace-attach-module', 'namespace-attach-module-declaration', - 'namespace-base-phase', 'namespace-mapped-symbols', - 'namespace-module-identifier', 'namespace-module-registry', - 'namespace-require', 'namespace-require/constant', - 'namespace-require/copy', 'namespace-require/expansion-time', - 'namespace-set-variable-value!', 'namespace-symbol->identifier', - 'namespace-syntax-introduce', 'namespace-undefine-variable!', - 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?', - 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt', - u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof', - 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path', - 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string', - 'number?', 'numerator', 'object%', 'object->vector', 'object-info', - 'object-interface', 'object-method-arity-includes?', 'object-name', - 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes', - 'open-input-file', 'open-input-output-file', 'open-input-string', - 'open-output-bytes', 'open-output-file', 'open-output-nowhere', - 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap', - 'other-execute-bit', 'other-read-bit', 'other-write-bit', - 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c', - 'parameter?', 'parameterization?', 'parse-command-line', 'partition', - 'path->bytes', 'path->complete-path', 'path->directory-path', - 'path->string', 'path-add-suffix', 'path-convention-type', - 'path-element->bytes', 'path-element->string', 'path-element?', - 'path-for-some-system?', 'path-list-string->path-list', 'path-only', - 'path-replace-suffix', 'path-string?', 'pathbytes', 'port->bytes-lines', 'port->lines', - 'port->list', 'port->string', 'port-closed-evt', 'port-closed?', - 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled', - 'port-counts-lines?', 'port-display-handler', 'port-file-identity', - 'port-file-unlock', 'port-next-location', 'port-print-handler', - 'port-progress-evt', 'port-provides-progress-evts?', - 'port-read-handler', 'port-try-file-lock?', 'port-write-handler', - 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?', - 'predicate/c', 'prefab-key->struct-type', 'prefab-key?', - 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp', - 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print', - 'pretty-print-.-symbol-without-bars', - 'pretty-print-abbreviate-read-macros', 'pretty-print-columns', - 'pretty-print-current-style-table', 'pretty-print-depth', - 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table', - 'pretty-print-handler', 'pretty-print-newline', - 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook', - 'pretty-print-print-hook', 'pretty-print-print-line', - 'pretty-print-remap-stylable', 'pretty-print-show-inexactness', - 'pretty-print-size-hook', 'pretty-print-style-table?', - 'pretty-printing', 'pretty-write', 'primitive-closure?', - 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression', - 'print-boolean-long-form', 'print-box', 'print-graph', - 'print-hash-table', 'print-mpair-curly-braces', - 'print-pair-curly-braces', 'print-reader-abbreviations', - 'print-struct', 'print-syntax-width', 'print-unreadable', - 'print-vector-length', 'printable/c', 'printable<%>', 'printf', - 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c', - 'procedure-arity-includes?', 'procedure-arity?', - 'procedure-closure-contents-eq?', 'procedure-extract-target', - 'procedure-keywords', 'procedure-reduce-arity', - 'procedure-reduce-keyword-arity', 'procedure-rename', - 'procedure-struct-type?', 'procedure?', 'process', 'process*', - 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?', - 'promise-forced?', 'promise-running?', 'promise/c', 'promise?', - 'prop:arity-string', 'prop:chaperone-contract', - 'prop:checked-procedure', 'prop:contract', 'prop:contracted', - 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict', - 'prop:dict/contract', 'prop:equal+hash', 'prop:evt', - 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract', - 'prop:impersonator-of', 'prop:input-port', - 'prop:liberal-define-context', 'prop:opt-chaperone-contract', - 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?', - 'prop:output-port', 'prop:place-location', 'prop:procedure', - 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer', - 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector', - 'pseudo-random-generator-vector?', 'pseudo-random-generator?', - 'put-preferences', 'putenv', 'quotient', 'quotient/remainder', - 'radians->degrees', 'raise', 'raise-argument-error', - 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error', - 'raise-contract-error', 'raise-mismatch-error', - 'raise-not-cons-blame-error', 'raise-range-error', - 'raise-result-error', 'raise-syntax-error', 'raise-type-error', - 'raise-user-error', 'random', 'random-seed', 'range', 'rational?', - 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box', - 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph', - 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote', - 'read-accept-reader', 'read-byte', 'read-byte-or-special', - 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!', - 'read-bytes-avail!*', 'read-bytes-avail!-evt', - 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line', - 'read-bytes-line-evt', 'read-case-sensitive', 'read-char', - 'read-char-or-special', 'read-curly-brace-as-paren', - 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language', - 'read-line', 'read-line-evt', 'read-on-demand-source', - 'read-square-bracket-as-paren', 'read-string', 'read-string!', - 'read-string!-evt', 'read-string-evt', 'read-syntax', - 'read-syntax/recursive', 'read/recursive', 'readtable-mapping', - 'readtable?', 'real->decimal-string', 'real->double-flonum', - 'real->floating-point-bytes', 'real->single-flonum', 'real-in', - 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port', - 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt', - 'regexp-match-exact?', 'regexp-match-peek', - 'regexp-match-peek-immediate', 'regexp-match-peek-positions', - 'regexp-match-peek-positions*', - 'regexp-match-peek-positions-immediate', - 'regexp-match-peek-positions-immediate/end', - 'regexp-match-peek-positions/end', 'regexp-match-positions', - 'regexp-match-positions*', 'regexp-match-positions/end', - 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind', - 'regexp-quote', 'regexp-replace', 'regexp-replace*', - 'regexp-replace-quote', 'regexp-replaces', 'regexp-split', - 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port', - 'relocate-output-port', 'remainder', 'remove', 'remove*', - 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*', - 'rename-file-or-directory', 'rename-transformer-target', - 'rename-transformer?', 'reroot-path', 'resolve-path', - 'resolved-module-path-name', 'resolved-module-path?', 'rest', - 'reverse', 'round', 'second', 'seconds->date', 'security-guard?', - 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post', - 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break', - 'semaphore?', 'sequence->list', 'sequence->stream', - 'sequence-add-between', 'sequence-andmap', 'sequence-append', - 'sequence-count', 'sequence-filter', 'sequence-fold', - 'sequence-for-each', 'sequence-generate', 'sequence-generate*', - 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref', - 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure', - 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!', - 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear', - 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?', - 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?', - 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!', - 'set-member?', 'set-mutable?', 'set-phantom-bytes!', - 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest', - 'set-subtract', 'set-subtract!', 'set-symmetric-difference', - 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?', - 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn', - 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle', - 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh', - 'sixth', 'skip-projection-wrapper?', 'sleep', - 'some-system-path->string', 'sort', 'special-comment-value', - 'special-comment?', 'special-filter-input-port', 'split-at', - 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr', - 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line', - 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?', - 'stop-after', 'stop-before', 'stream->list', 'stream-add-between', - 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?', - 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each', - 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref', - 'stream-rest', 'stream-tail', 'stream?', 'string', - 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8', - 'string->immutable-string', 'string->keyword', 'string->list', - 'string->number', 'string->path', 'string->path-element', - 'string->some-system-path', 'string->symbol', - 'string->uninterned-symbol', 'string->unreadable-symbol', - 'string-append', 'string-append*', 'string-ci<=?', 'string-ci=?', 'string-ci>?', 'string-copy', - 'string-copy!', 'string-downcase', 'string-environment-variable-name?', - 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c', - 'string-length', 'string-locale-ci?', 'string-locale-downcase', 'string-locale-upcase', - 'string-locale?', - 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd', - 'string-normalize-nfkc', 'string-normalize-nfkd', - 'string-normalize-spaces', 'string-ref', 'string-replace', - 'string-set!', 'string-split', 'string-titlecase', 'string-trim', - 'string-upcase', 'string-utf-8-length', 'string<=?', 'string=?', 'string>?', 'string?', 'struct->vector', - 'struct-accessor-procedure?', 'struct-constructor-procedure?', - 'struct-info', 'struct-mutator-procedure?', - 'struct-predicate-procedure?', 'struct-type-info', - 'struct-type-make-constructor', 'struct-type-make-predicate', - 'struct-type-property-accessor-procedure?', 'struct-type-property/c', - 'struct-type-property?', 'struct-type?', 'struct:arity-at-least', - 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break', - 'struct:exn:break:hang-up', 'struct:exn:break:terminate', - 'struct:exn:fail', 'struct:exn:fail:contract', - 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame', - 'struct:exn:fail:contract:continuation', - 'struct:exn:fail:contract:divide-by-zero', - 'struct:exn:fail:contract:non-fixnum-result', - 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem', - 'struct:exn:fail:filesystem:errno', - 'struct:exn:fail:filesystem:exists', - 'struct:exn:fail:filesystem:missing-module', - 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network', - 'struct:exn:fail:network:errno', 'struct:exn:fail:object', - 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read', - 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char', - 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module', - 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported', - 'struct:exn:fail:user', 'struct:srcloc', - 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes', - 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled', - 'subprocess-kill', 'subprocess-pid', 'subprocess-status', - 'subprocess-wait', 'subprocess?', 'subset?', 'substring', - 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symboldatum', - 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm', - 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes', - 'syntax-local-certifier', 'syntax-local-context', - 'syntax-local-expand-expression', 'syntax-local-get-shadower', - 'syntax-local-introduce', 'syntax-local-lift-context', - 'syntax-local-lift-expression', - 'syntax-local-lift-module-end-declaration', - 'syntax-local-lift-provide', 'syntax-local-lift-require', - 'syntax-local-lift-values-expression', - 'syntax-local-make-definition-context', - 'syntax-local-make-delta-introducer', - 'syntax-local-module-defined-identifiers', - 'syntax-local-module-exports', - 'syntax-local-module-required-identifiers', 'syntax-local-name', - 'syntax-local-phase-level', 'syntax-local-submodules', - 'syntax-local-transforming-module-provides?', 'syntax-local-value', - 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position', - 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect', - 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level', - 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint', - 'syntax-tainted?', 'syntax-track-origin', - 'syntax-transforming-module-expression?', 'syntax-transforming?', - 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code', - 'system-big-endian?', 'system-idle-evt', 'system-language+country', - 'system-library-subpath', 'system-path-convention-type', 'system-type', - 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef', - 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept', - 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break', - 'tcp-addresses', 'tcp-close', 'tcp-connect', - 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?', - 'tentative-pretty-print-port-cancel', - 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?', - 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref', - 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?', - 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive', - 'thread-receive-evt', 'thread-resume', 'thread-resume-evt', - 'thread-rewind-receive', 'thread-running?', 'thread-send', - 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive', - 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply', - 'touch', 'transplant-input-port', 'transplant-output-port', 'true', - 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close', - 'udp-connect!', 'udp-connected?', 'udp-multicast-interface', - 'udp-multicast-join-group!', 'udp-multicast-leave-group!', - 'udp-multicast-loopback?', 'udp-multicast-set-interface!', - 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!', - 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!', - 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break', - 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt', - 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt', - 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox', - 'uncaught-exception-handler', 'unit?', 'unspecified-dom', - 'unsupplied-arg?', 'use-collection-link-paths', - 'use-compiled-file-paths', 'use-user-specific-search-paths', - 'user-execute-bit', 'user-read-bit', 'user-write-bit', - 'value-contract', 'values', 'variable-reference->empty-namespace', - 'variable-reference->module-base-phase', - 'variable-reference->module-declaration-inspector', - 'variable-reference->module-path-index', - 'variable-reference->module-source', 'variable-reference->namespace', - 'variable-reference->phase', - 'variable-reference->resolved-module-path', - 'variable-reference-constant?', 'variable-reference?', 'vector', - 'vector->immutable-vector', 'vector->list', - 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!', - 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin', - 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop', - 'vector-drop-right', 'vector-fill!', 'vector-filter', - 'vector-filter-not', 'vector-immutable', 'vector-immutable/c', - 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!', - 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref', - 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!', - 'vector-split-at', 'vector-split-at-right', 'vector-take', - 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version', - 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set', - 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?', - 'will-register', 'will-try-execute', 'with-input-from-bytes', - 'with-input-from-file', 'with-input-from-string', - 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string', - 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow', - 'wrapped-extra-arg-arrow-extra-neg-party-argument', - 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?', - 'writable<%>', 'write', 'write-byte', 'write-bytes', - 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt', - 'write-bytes-avail/enable-break', 'write-char', 'write-special', - 'write-special-avail*', 'write-special-evt', 'write-string', - 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r', - '~s', '~v' - ] - - _opening_parenthesis = r'[([{]' - _closing_parenthesis = r'[)\]}]' - _delimiters = r'()[\]{}",\'`;\s' - _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters - _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?' - _exponent = r'(?:[defls][-+]?\d+)' - _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)' - _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|' - r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes) - _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes, - _exponent) - _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent) - _inexact_special = r'(?:(?:inf|nan)\.[0f])' - _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal, - _inexact_special) - _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special) - - tokens = { - 'root': [ - (_closing_parenthesis, Error), - (r'(?!\Z)', Text, 'unquoted-datum') - ], - 'datum': [ - (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment), - (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single), - (r'#\|', Comment.Multiline, 'block-comment'), - - # Whitespaces - (r'(?u)\s+', Text), - - # Numbers: Keep in mind Racket reader hash prefixes, which - # can denote the base or the type. These don't map neatly - # onto Pygments token types; some judgment calls here. - - # #d or no prefix - (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters), - Number.Integer, '#pop'), - (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' % - (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'), - (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' % - (_exact_decimal_prefix, _inexact_normal_no_hashes, - _inexact_normal_no_hashes, _inexact_normal_no_hashes, - _delimiters), Number, '#pop'), - - # Inexact without explicit #i - (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' % - (_inexact_real, _inexact_unsigned, _inexact_unsigned, - _inexact_real, _inexact_real, _delimiters), Number.Float, - '#pop'), - - # The remaining extflonums - (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' % - (_inexact_simple, _delimiters), Number.Float, '#pop'), - - # #b - (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'), - - # #o - (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'), - - # #x - (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'), - - # #i is always inexact, i.e. float - (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'), - - # Strings and characters - (r'#?"', String.Double, ('#pop', 'string')), - (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'), - (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'), - (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'), - (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'), - - # Constants - (r'#(true|false|[tTfF])', Name.Constant, '#pop'), - - # Keyword argument names (e.g. #:keyword) - (r'#:%s' % _symbol, Keyword.Declaration, '#pop'), - - # Reader extensions - (r'(#lang |#!)(\S+)', - bygroups(Keyword.Namespace, Name.Namespace)), - (r'#reader', Keyword.Namespace, 'quoted-datum'), - - # Other syntax - (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator), - (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis, - Operator, ('#pop', 'quoted-datum')) - ], - 'datum*': [ - (r'`|,@?', Operator), - (_symbol, String.Symbol, '#pop'), - (r'[|\\]', Error), - default('#pop') - ], - 'list': [ - (_closing_parenthesis, Punctuation, '#pop') - ], - 'unquoted-datum': [ - include('datum'), - (r'quote(?=[%s])' % _delimiters, Keyword, - ('#pop', 'quoted-datum')), - (r'`', Operator, ('#pop', 'quasiquoted-datum')), - (r'quasiquote(?=[%s])' % _delimiters, Keyword, - ('#pop', 'quasiquoted-datum')), - (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')), - (r'(?u)(%s)(?=[%s])' % ('|'.join( - [re.escape(entry) for entry in _keywords]), _delimiters), - Keyword, '#pop'), - (r'(?u)(%s)(?=[%s])' % ('|'.join( - [re.escape(entry) for entry in _builtins]), _delimiters), - Name.Builtin, '#pop'), - (_symbol, Name, '#pop'), - include('datum*') - ], - 'unquoted-list': [ - include('list'), - (r'(?!\Z)', Text, 'unquoted-datum') - ], - 'quasiquoted-datum': [ - include('datum'), - (r',@?', Operator, ('#pop', 'unquoted-datum')), - (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword, - ('#pop', 'unquoted-datum')), - (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')), - include('datum*') - ], - 'quasiquoted-list': [ - include('list'), - (r'(?!\Z)', Text, 'quasiquoted-datum') - ], - 'quoted-datum': [ - include('datum'), - (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')), - include('datum*') - ], - 'quoted-list': [ - include('list'), - (r'(?!\Z)', Text, 'quoted-datum') - ], - 'block-comment': [ - (r'#\|', Comment.Multiline, '#push'), - (r'\|#', Comment.Multiline, '#pop'), - (r'[^#|]+|.', Comment.Multiline) - ], - 'string': [ - (r'"', String.Double, '#pop'), - (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|' - r'U[\da-fA-F]{1,8}|.)', String.Escape), - (r'[^\\"]+', String.Double) - ] - } - - -class SchemeLexer(RegexLexer): - """ - A Scheme lexer, parsing a stream and outputting the tokens - needed to highlight scheme code. - This lexer could be most probably easily subclassed to parse - other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp. - - This parser is checked with pastes from the LISP pastebin - at http://paste.lisp.org/ to cover as much syntax as possible. - - It supports the full Scheme syntax as defined in R5RS. - - .. versionadded:: 0.6 - """ - name = 'Scheme' - aliases = ['scheme', 'scm'] - filenames = ['*.scm', '*.ss'] - mimetypes = ['text/x-scheme', 'application/x-scheme'] - - # list of known keywords and builtins taken form vim 6.4 scheme.vim - # syntax file. - keywords = [ - 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let', - 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote', - 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax', - 'let-syntax', 'letrec-syntax', 'syntax-rules' - ] - builtins = [ - '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle', - 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan', - 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', - 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', - 'cadr', 'call-with-current-continuation', 'call-with-input-file', - 'call-with-output-file', 'call-with-values', 'call/cc', 'car', - 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar', - 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', - 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?', - 'char-ci=?', 'char-ci>?', 'char-downcase', - 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase', - 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char=?', 'char>?', 'char?', 'close-input-port', 'close-output-port', - 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port', - 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?', - 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp', - 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part', - 'inexact->exact', 'inexact?', 'input-port?', 'integer->char', - 'integer?', 'interaction-environment', 'lcm', 'length', 'list', - 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?', - 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular', - 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv', - 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment', - 'null?', 'number->string', 'number?', 'numerator', 'odd?', - 'open-input-file', 'open-output-file', 'output-port?', 'pair?', - 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient', - 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?', - 'remainder', 'reverse', 'round', 'scheme-report-environment', - 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list', - 'string->number', 'string->symbol', 'string-append', 'string-ci<=?', - 'string-ci=?', 'string-ci>?', - 'string-copy', 'string-fill!', 'string-length', 'string-ref', - 'string-set!', 'string<=?', 'string=?', - 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?', - 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values', - 'vector', 'vector->list', 'vector-fill!', 'vector-length', - 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file', - 'with-output-to-file', 'write', 'write-char', 'zero?' - ] - - # valid names for identifiers - # well, names can only not consist fully of numbers - # but this should be good enough for now - valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+' - - tokens = { - 'root' : [ - # the comments - # and going to the end of the line - (r';.*$', Comment.Single), - # multi-line comment - (r'#\|', Comment.Multiline, 'multiline-comment'), - # commented form (entire sexpr folliwng) - (r'#;\s*\(', Comment, 'commented-form'), - # signifies that the program text that follows is written with the - # lexical and datum syntax described in r6rs - (r'#!r6rs', Comment), - - # whitespaces - usually not relevant - (r'\s+', Text), - - # numbers - (r'-?\d+\.\d+', Number.Float), - (r'-?\d+', Number.Integer), - # support for uncommon kinds of numbers - - # have to figure out what the characters mean - #(r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), - - # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), - (r"'" + valid_name, String.Symbol), - (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char), - - # constants - (r'(#t|#f)', Name.Constant), - - # special operators - (r"('|#|`|,@|,|\.)", Operator), - - # highlight the keywords - ('(%s)' % '|'.join([ - re.escape(entry) + ' ' for entry in keywords]), - Keyword - ), - - # first variable in a quoted string like - # '(this is syntactic sugar) - (r"(?<='\()" + valid_name, Name.Variable), - (r"(?<=#\()" + valid_name, Name.Variable), - - # highlight the builtins - ("(?<=\()(%s)" % '|'.join([ - re.escape(entry) + ' ' for entry in builtins]), - Name.Builtin - ), - - # the remaining functions - (r'(?<=\()' + valid_name, Name.Function), - # find the remaining variables - (valid_name, Name.Variable), - - # the famous parentheses! - (r'(\(|\))', Punctuation), - (r'(\[|\])', Punctuation), - ], - 'multiline-comment' : [ - (r'#\|', Comment.Multiline, '#push'), - (r'\|#', Comment.Multiline, '#pop'), - (r'[^|#]+', Comment.Multiline), - (r'[|#]', Comment.Multiline), - ], - 'commented-form' : [ - (r'\(', Comment, '#push'), - (r'\)', Comment, '#pop'), - (r'[^()]+', Comment), - ], - } - - -class CommonLispLexer(RegexLexer): - """ - A Common Lisp lexer. - - .. versionadded:: 0.9 - """ - name = 'Common Lisp' - aliases = ['common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'] - filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too - mimetypes = ['text/x-common-lisp'] - - flags = re.IGNORECASE | re.MULTILINE - - ### couple of useful regexes - - # characters that are not macro-characters and can be used to begin a symbol - nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]' - constituent = nonmacro + '|[#.:]' - terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters - - ### symbol token, reverse-engineered from hyperspec - # Take a deep breath... - symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent) - - def __init__(self, **options): - from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \ - SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \ - BUILTIN_TYPES, BUILTIN_CLASSES - self.builtin_function = BUILTIN_FUNCTIONS - self.special_forms = SPECIAL_FORMS - self.macros = MACROS - self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS - self.declarations = DECLARATIONS - self.builtin_types = BUILTIN_TYPES - self.builtin_classes = BUILTIN_CLASSES - RegexLexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - stack = ['root'] - for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack): - if token is Name.Variable: - if value in self.builtin_function: - yield index, Name.Builtin, value - continue - if value in self.special_forms: - yield index, Keyword, value - continue - if value in self.macros: - yield index, Name.Builtin, value - continue - if value in self.lambda_list_keywords: - yield index, Keyword, value - continue - if value in self.declarations: - yield index, Keyword, value - continue - if value in self.builtin_types: - yield index, Keyword.Type, value - continue - if value in self.builtin_classes: - yield index, Name.Class, value - continue - yield index, token, value - - tokens = { - 'root' : [ - ('', Text, 'body'), - ], - 'multiline-comment' : [ - (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19) - (r'\|#', Comment.Multiline, '#pop'), - (r'[^|#]+', Comment.Multiline), - (r'[|#]', Comment.Multiline), - ], - 'commented-form' : [ - (r'\(', Comment.Preproc, '#push'), - (r'\)', Comment.Preproc, '#pop'), - (r'[^()]+', Comment.Preproc), - ], - 'body' : [ - # whitespace - (r'\s+', Text), - - # single-line comment - (r';.*$', Comment.Single), - - # multi-line comment - (r'#\|', Comment.Multiline, 'multiline-comment'), - - # encoding comment (?) - (r'#\d*Y.*$', Comment.Special), - - # strings and characters - (r'"(\\.|\\\n|[^"\\])*"', String), - # quoting - (r":" + symbol, String.Symbol), - (r"::" + symbol, String.Symbol), - (r":#" + symbol, String.Symbol), - (r"'" + symbol, String.Symbol), - (r"'", Operator), - (r"`", Operator), - - # decimal numbers - (r'[-+]?\d+\.?' + terminated, Number.Integer), - (r'[-+]?\d+/\d+' + terminated, Number), - (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' \ - + terminated, Number.Float), - - # sharpsign strings and characters - (r"#\\." + terminated, String.Char), - (r"#\\" + symbol, String.Char), - - # vector - (r'#\(', Operator, 'body'), - - # bitstring - (r'#\d*\*[01]*', Literal.Other), - - # uninterned symbol - (r'#:' + symbol, String.Symbol), - - # read-time and load-time evaluation - (r'#[.,]', Operator), - - # function shorthand - (r'#\'', Name.Function), - - # binary rational - (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin), - - # octal rational - (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct), - - # hex rational - (r'#[xX][+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex), - - # radix rational - (r'#\d+[rR][+-]?[0-9a-z]+(/[0-9a-z]+)?', Number), - - # complex - (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'), - - # array - (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'), - - # structure - (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'), - - # path - (r'#[pP]?"(\\.|[^"])*"', Literal.Other), - - # reference - (r'#\d+=', Operator), - (r'#\d+#', Operator), - - # read-time comment - (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'), - - # read-time conditional - (r'#[+-]', Operator), - - # special operators that should have been parsed already - (r'(,@|,|\.)', Operator), - - # special constants - (r'(t|nil)' + terminated, Name.Constant), - - # functions and variables - (r'\*' + symbol + '\*', Name.Variable.Global), - (symbol, Name.Variable), - - # parentheses - (r'\(', Punctuation, 'body'), - (r'\)', Punctuation, '#pop'), - ], - } - - -class CryptolLexer(RegexLexer): - """ - FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report. - - .. versionadded:: 2.0 - """ - name = 'Cryptol' - aliases = ['cryptol', 'cry'] - filenames = ['*.cry'] - mimetypes = ['text/x-cryptol'] - - reserved = ['Arith','Bit','Cmp','False','Inf','True','else', - 'export','extern','fin','if','import','inf','lg2', - 'max','min','module','newtype','pragma','property', - 'then','type','where','width'] - ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK', - 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE', - 'DC[1-4]','NAK','SYN','ETB','CAN', - 'EM','SUB','ESC','[FGRU]S','SP','DEL'] - - tokens = { - 'root': [ - # Whitespace: - (r'\s+', Text), - #(r'--\s*|.*$', Comment.Doc), - (r'//.*$', Comment.Single), - (r'/\*', Comment.Multiline, 'comment'), - # Lexemes: - # Identifiers - (r'\bimport\b', Keyword.Reserved, 'import'), - (r'\bmodule\b', Keyword.Reserved, 'module'), - (r'\berror\b', Name.Exception), - (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), - (r'^[_a-z][\w\']*', Name.Function), - (r"'?[_a-z][\w']*", Name), - (r"('')?[A-Z][\w\']*", Keyword.Type), - # Operators - (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator - (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials - (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators - (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators - # Numbers - (r'\d+[eE][+-]?\d+', Number.Float), - (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), - (r'0[oO][0-7]+', Number.Oct), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), - # Character/String Literals - (r"'", String.Char, 'character'), - (r'"', String, 'string'), - # Special - (r'\[\]', Keyword.Type), - (r'\(\)', Name.Builtin), - (r'[][(),;`{}]', Punctuation), - ], - 'import': [ - # Import statements - (r'\s+', Text), - (r'"', String, 'string'), - # after "funclist" state - (r'\)', Punctuation, '#pop'), - (r'qualified\b', Keyword), - # import X as Y - (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)', - bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'), - # import X hiding (functions) - (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()', - bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'), - # import X (functions) - (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()', - bygroups(Name.Namespace, Text, Punctuation), 'funclist'), - # import X - (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'), - ], - 'module': [ - (r'\s+', Text), - (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()', - bygroups(Name.Namespace, Text, Punctuation), 'funclist'), - (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'), - ], - 'funclist': [ - (r'\s+', Text), - (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type), - (r'(_[\w\']+|[a-z][\w\']*)', Name.Function), - (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), - (r',', Punctuation), - (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), - # (HACK, but it makes sense to push two instances, believe me) - (r'\(', Punctuation, ('funclist', 'funclist')), - (r'\)', Punctuation, '#pop:2'), - ], - 'comment': [ - # Multiline Comments - (r'[^/\*]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[\*/]', Comment.Multiline), - ], - 'character': [ - # Allows multi-chars, incorrectly. - (r"[^\\']'", String.Char, '#pop'), - (r"\\", String.Escape, 'escape'), - ("'", String.Char, '#pop'), - ], - 'string': [ - (r'[^\\"]+', String), - (r"\\", String.Escape, 'escape'), - ('"', String, '#pop'), - ], - 'escape': [ - (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), - (r'\^[][A-Z@\^_]', String.Escape, '#pop'), - ('|'.join(ascii), String.Escape, '#pop'), - (r'o[0-7]+', String.Escape, '#pop'), - (r'x[\da-fA-F]+', String.Escape, '#pop'), - (r'\d+', String.Escape, '#pop'), - (r'\s+\\', String.Escape, '#pop'), - ], - } - - EXTRA_KEYWORDS = ['join', 'split', 'reverse', 'transpose', 'width', - 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const', - 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error', - 'trace'] - - def get_tokens_unprocessed(self, text): - stack = ['root'] - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text, stack): - if token is Name and value in self.EXTRA_KEYWORDS: - yield index, Name.Builtin, value - else: - yield index, token, value - - -class HaskellLexer(RegexLexer): - """ - A Haskell lexer based on the lexemes defined in the Haskell 98 Report. - - .. versionadded:: 0.8 - """ - name = 'Haskell' - aliases = ['haskell', 'hs'] - filenames = ['*.hs'] - mimetypes = ['text/x-haskell'] - - flags = re.MULTILINE | re.UNICODE - - reserved = ['case','class','data','default','deriving','do','else', - 'if','in','infix[lr]?','instance', - 'let','newtype','of','then','type','where','_'] - ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK', - 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE', - 'DC[1-4]','NAK','SYN','ETB','CAN', - 'EM','SUB','ESC','[FGRU]S','SP','DEL'] - - tokens = { - 'root': [ - # Whitespace: - (r'\s+', Text), - #(r'--\s*|.*$', Comment.Doc), - (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), - # Lexemes: - # Identifiers - (r'\bimport\b', Keyword.Reserved, 'import'), - (r'\bmodule\b', Keyword.Reserved, 'module'), - (r'\berror\b', Name.Exception), - (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), - (r'^[_' + uni.Ll + r'][\w\']*', Name.Function), - (r"'?[_" + uni.Ll + r"'][\w']*", Name), - (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type), - # Operators - (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator - (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials - (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators - (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators - # Numbers - (r'\d+[eE][+-]?\d+', Number.Float), - (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), - (r'0[oO][0-7]+', Number.Oct), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), - # Character/String Literals - (r"'", String.Char, 'character'), - (r'"', String, 'string'), - # Special - (r'\[\]', Keyword.Type), - (r'\(\)', Name.Builtin), - (r'[][(),;`{}]', Punctuation), - ], - 'import': [ - # Import statements - (r'\s+', Text), - (r'"', String, 'string'), - # after "funclist" state - (r'\)', Punctuation, '#pop'), - (r'qualified\b', Keyword), - # import X as Y - (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)', - bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'), - # import X hiding (functions) - (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()', - bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'), - # import X (functions) - (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()', - bygroups(Name.Namespace, Text, Punctuation), 'funclist'), - # import X - (r'[\w.]+', Name.Namespace, '#pop'), - ], - 'module': [ - (r'\s+', Text), - (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()', - bygroups(Name.Namespace, Text, Punctuation), 'funclist'), - (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'), - ], - 'funclist': [ - (r'\s+', Text), - (r'[' + uni.Lu + r']\w*', Keyword.Type), - (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function), - (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), - (r',', Punctuation), - (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), - # (HACK, but it makes sense to push two instances, believe me) - (r'\(', Punctuation, ('funclist', 'funclist')), - (r'\)', Punctuation, '#pop:2'), - ], - # NOTE: the next four states are shared in the AgdaLexer; make sure - # any change is compatible with Agda as well or copy over and change - 'comment': [ - # Multiline Comments - (r'[^-{}]+', Comment.Multiline), - (r'{-', Comment.Multiline, '#push'), - (r'-}', Comment.Multiline, '#pop'), - (r'[-{}]', Comment.Multiline), - ], - 'character': [ - # Allows multi-chars, incorrectly. - (r"[^\\']'", String.Char, '#pop'), - (r"\\", String.Escape, 'escape'), - ("'", String.Char, '#pop'), - ], - 'string': [ - (r'[^\\"]+', String), - (r"\\", String.Escape, 'escape'), - ('"', String, '#pop'), - ], - 'escape': [ - (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), - (r'\^[][' + uni.Lu + r'@\^_]', String.Escape, '#pop'), - ('|'.join(ascii), String.Escape, '#pop'), - (r'o[0-7]+', String.Escape, '#pop'), - (r'x[\da-fA-F]+', String.Escape, '#pop'), - (r'\d+', String.Escape, '#pop'), - (r'\s+\\', String.Escape, '#pop'), - ], - } - - -class IdrisLexer(RegexLexer): - """ - A lexer for the dependently typed programming language Idris. - - Based on the Haskell and Agda Lexer. - - .. versionadded:: 2.0 - """ - name = 'Idris' - aliases = ['idris', 'idr'] - filenames = ['*.idr'] - mimetypes = ['text/x-idris'] - - reserved = ['case','class','data','default','using','do','else', - 'if','in','infix[lr]?','instance','rewrite','auto', - 'namespace','codata','mutual','private','public','abstract', - 'total','partial', - 'let','proof','of','then','static','where','_','with', - 'pattern', 'term', 'syntax','prefix', - 'postulate','parameters','record','dsl','impossible','implicit', - 'tactics','intros','intro','compute','refine','exact','trivial'] - - ascii = ['NUL','SOH','[SE]TX','EOT','ENQ','ACK', - 'BEL','BS','HT','LF','VT','FF','CR','S[OI]','DLE', - 'DC[1-4]','NAK','SYN','ETB','CAN', - 'EM','SUB','ESC','[FGRU]S','SP','DEL'] - - directives = ['lib','link','flag','include','hide','freeze','access', - 'default','logging','dynamic','name','error_handlers','language'] - - tokens = { - 'root': [ - # Comments - (r'^(\s*)(%%%s)' % '|'.join(directives), - bygroups(Text, Keyword.Reserved)), - (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), - (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)), - (r'(\s*)({-)', bygroups(Text, Comment.Multiline), 'comment'), - # Declaration - (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', - bygroups(Text, Name.Function, Text, Operator.Word, Text)), - # Identifiers - (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), - (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), - (r"('')?[A-Z][\w\']*", Keyword.Type), - (r'[a-z][\w\']*', Text), - # Special Symbols - (r'(<-|::|->|=>|=)', Operator.Word), # specials - (r'([\(\)\{\}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials - # Numbers - (r'\d+[eE][+-]?\d+', Number.Float), - (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), - # Strings - (r"'", String.Char, 'character'), - (r'"', String, 'string'), - (r'[^\s\(\)\{\}]+', Text), - (r'\s+?', Text), # Whitespace - ], - 'module': [ - (r'\s+', Text), - (r'([A-Z][\w.]*)(\s+)(\()', - bygroups(Name.Namespace, Text, Punctuation), 'funclist'), - (r'[A-Z][\w.]*', Name.Namespace, '#pop'), - ], - 'funclist': [ - (r'\s+', Text), - (r'[A-Z]\w*', Keyword.Type), - (r'(_[\w\']+|[a-z][\w\']*)', Name.Function), - (r'--.*$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), - (r',', Punctuation), - (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), - # (HACK, but it makes sense to push two instances, believe me) - (r'\(', Punctuation, ('funclist', 'funclist')), - (r'\)', Punctuation, '#pop:2'), - ], - # NOTE: the next four states are shared in the AgdaLexer; make sure - # any change is compatible with Agda as well or copy over and change - 'comment': [ - # Multiline Comments - (r'[^-{}]+', Comment.Multiline), - (r'{-', Comment.Multiline, '#push'), - (r'-}', Comment.Multiline, '#pop'), - (r'[-{}]', Comment.Multiline), - ], - 'character': [ - # Allows multi-chars, incorrectly. - (r"[^\\']", String.Char), - (r"\\", String.Escape, 'escape'), - ("'", String.Char, '#pop'), - ], - 'string': [ - (r'[^\\"]+', String), - (r"\\", String.Escape, 'escape'), - ('"', String, '#pop'), - ], - 'escape': [ - (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), - (r'\^[][A-Z@\^_]', String.Escape, '#pop'), - ('|'.join(ascii), String.Escape, '#pop'), - (r'o[0-7]+', String.Escape, '#pop'), - (r'x[\da-fA-F]+', String.Escape, '#pop'), - (r'\d+', String.Escape, '#pop'), - (r'\s+\\', String.Escape, '#pop') - ], - } - - -class AgdaLexer(RegexLexer): - """ - For the `Agda `_ - dependently typed functional programming language and proof assistant. - - .. versionadded:: 2.0 - """ - - name = 'Agda' - aliases = ['agda'] - filenames = ['*.agda'] - mimetypes = ['text/x-agda'] - - reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data', - 'field', 'forall', 'hiding', 'in', 'inductive', 'infix', - 'infixl', 'infixr', 'let', 'open', 'pattern', 'primitive', - 'private', 'mutual', 'quote', 'quoteGoal', 'quoteTerm', - 'record', 'syntax', 'rewrite', 'unquote', 'using', 'where', - 'with'] - - tokens = { - 'root': [ - # Declaration - (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', - bygroups(Text, Name.Function, Text, Operator.Word, Text)), - # Comments - (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), - (r'{-', Comment.Multiline, 'comment'), - # Holes - (r'{!', Comment.Directive, 'hole'), - # Lexemes: - # Identifiers - (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), - (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), - (r'\b(Set|Prop)\b', Keyword.Type), - # Special Symbols - (r'(\(|\)|\{|\})', Operator), - (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word), - # Numbers - (r'\d+[eE][+-]?\d+', Number.Float), - (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'\d+', Number.Integer), - # Strings - (r"'", String.Char, 'character'), - (r'"', String, 'string'), - (r'[^\s\(\)\{\}]+', Text), - (r'\s+?', Text), # Whitespace - ], - 'hole': [ - # Holes - (r'[^!{}]+', Comment.Directive), - (r'{!', Comment.Directive, '#push'), - (r'!}', Comment.Directive, '#pop'), - (r'[!{}]', Comment.Directive), - ], - 'module': [ - (r'{-', Comment.Multiline, 'comment'), - (r'[a-zA-Z][\w.]*', Name, '#pop'), - (r'[^a-zA-Z]*', Text) - ], - 'comment': HaskellLexer.tokens['comment'], - 'character': HaskellLexer.tokens['character'], - 'string': HaskellLexer.tokens['string'], - 'escape': HaskellLexer.tokens['escape'] - } - - -class LiterateLexer(Lexer): - """ - Base class for lexers of literate file formats based on LaTeX or Bird-style - (prefixing each code line with ">"). - - Additional options accepted: - - `litstyle` - If given, must be ``"bird"`` or ``"latex"``. If not given, the style - is autodetected: if the first non-whitespace character in the source - is a backslash or percent character, LaTeX is assumed, else Bird. - """ - - bird_re = re.compile(r'(>[ \t]*)(.*\n)') - - def __init__(self, baselexer, **options): - self.baselexer = baselexer - Lexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - style = self.options.get('litstyle') - if style is None: - style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird' - - code = '' - insertions = [] - if style == 'bird': - # bird-style - for match in line_re.finditer(text): - line = match.group() - m = self.bird_re.match(line) - if m: - insertions.append((len(code), - [(0, Comment.Special, m.group(1))])) - code += m.group(2) - else: - insertions.append((len(code), [(0, Text, line)])) - else: - # latex-style - from pygments.lexers.text import TexLexer - lxlexer = TexLexer(**self.options) - codelines = 0 - latex = '' - for match in line_re.finditer(text): - line = match.group() - if codelines: - if line.lstrip().startswith('\\end{code}'): - codelines = 0 - latex += line - else: - code += line - elif line.lstrip().startswith('\\begin{code}'): - codelines = 1 - latex += line - insertions.append((len(code), - list(lxlexer.get_tokens_unprocessed(latex)))) - latex = '' - else: - latex += line - insertions.append((len(code), - list(lxlexer.get_tokens_unprocessed(latex)))) - for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)): - yield item - - -class LiterateCryptolLexer(LiterateLexer): - """ - For Literate Cryptol (Bird-style or LaTeX) source. - - Additional options accepted: - - `litstyle` - If given, must be ``"bird"`` or ``"latex"``. If not given, the style - is autodetected: if the first non-whitespace character in the source - is a backslash or percent character, LaTeX is assumed, else Bird. - - .. versionadded:: 0.9 - """ - name = 'Literate Cryptol' - aliases = ['lcry', 'literate-cryptol', 'lcryptol'] - filenames = ['*.lcry'] - mimetypes = ['text/x-literate-cryptol'] - - def __init__(self, **options): - crylexer = CryptolLexer(**options) - LiterateLexer.__init__(self, crylexer, **options) - - -class LiterateHaskellLexer(LiterateLexer): - """ - For Literate Haskell (Bird-style or LaTeX) source. - - Additional options accepted: - - `litstyle` - If given, must be ``"bird"`` or ``"latex"``. If not given, the style - is autodetected: if the first non-whitespace character in the source - is a backslash or percent character, LaTeX is assumed, else Bird. - - .. versionadded:: 0.9 - """ - name = 'Literate Haskell' - aliases = ['lhs', 'literate-haskell', 'lhaskell'] - filenames = ['*.lhs'] - mimetypes = ['text/x-literate-haskell'] - - def __init__(self, **options): - hslexer = HaskellLexer(**options) - LiterateLexer.__init__(self, hslexer, **options) - - -class LiterateIdrisLexer(LiterateLexer): - """ - For Literate Idris (Bird-style or LaTeX) source. - - Additional options accepted: - - `litstyle` - If given, must be ``"bird"`` or ``"latex"``. If not given, the style - is autodetected: if the first non-whitespace character in the source - is a backslash or percent character, LaTeX is assumed, else Bird. - - .. versionadded:: 2.0 - """ - name = 'Literate Idris' - aliases = ['lidr', 'literate-idris', 'lidris'] - filenames = ['*.lidr'] - mimetypes = ['text/x-literate-idris'] - - def __init__(self, **options): - hslexer = IdrisLexer(**options) - LiterateLexer.__init__(self, hslexer, **options) - - -class LiterateAgdaLexer(LiterateLexer): - """ - For Literate Agda source. - - Additional options accepted: - - `litstyle` - If given, must be ``"bird"`` or ``"latex"``. If not given, the style - is autodetected: if the first non-whitespace character in the source - is a backslash or percent character, LaTeX is assumed, else Bird. - - .. versionadded:: 2.0 - """ - name = 'Literate Agda' - aliases = ['lagda', 'literate-agda'] - filenames = ['*.lagda'] - mimetypes = ['text/x-literate-agda'] - - def __init__(self, **options): - agdalexer = AgdaLexer(**options) - LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options) - - -class SMLLexer(RegexLexer): - """ - For the Standard ML language. - - .. versionadded:: 1.5 - """ - - name = 'Standard ML' - aliases = ['sml'] - filenames = ['*.sml', '*.sig', '*.fun',] - mimetypes = ['text/x-standardml', 'application/x-standardml'] - - alphanumid_reserved = [ - # Core - 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else', - 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix', - 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse', - 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while', - # Modules - 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature', - 'struct', 'structure', 'where', - ] - - symbolicid_reserved = [ - # Core - ':', '\|', '=', '=>', '->', '#', - # Modules - ':>', - ] - - nonid_reserved = [ '(', ')', '[', ']', '{', '}', ',', ';', '...', '_' ] - - alphanumid_re = r"[a-zA-Z][\w']*" - symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+" - - # A character constant is a sequence of the form #s, where s is a string - # constant denoting a string of size one character. This setup just parses - # the entire string as either a String.Double or a String.Char (depending - # on the argument), even if the String.Char is an erronous - # multiple-character string. - def stringy (whatkind): - return [ - (r'[^"\\]', whatkind), - (r'\\[\\\"abtnvfr]', String.Escape), - # Control-character notation is used for codes < 32, - # where \^@ == \000 - (r'\\\^[\x40-\x5e]', String.Escape), - # Docs say 'decimal digits' - (r'\\[0-9]{3}', String.Escape), - (r'\\u[0-9a-fA-F]{4}', String.Escape), - (r'\\\s+\\', String.Interpol), - (r'"', whatkind, '#pop'), - ] - - # Callbacks for distinguishing tokens and reserved words - def long_id_callback(self, match): - if match.group(1) in self.alphanumid_reserved: token = Error - else: token = Name.Namespace - yield match.start(1), token, match.group(1) - yield match.start(2), Punctuation, match.group(2) - - def end_id_callback(self, match): - if match.group(1) in self.alphanumid_reserved: token = Error - elif match.group(1) in self.symbolicid_reserved: token = Error - else: token = Name - yield match.start(1), token, match.group(1) - - def id_callback(self, match): - str = match.group(1) - if str in self.alphanumid_reserved: token = Keyword.Reserved - elif str in self.symbolicid_reserved: token = Punctuation - else: token = Name - yield match.start(1), token, str - - tokens = { - # Whitespace and comments are (almost) everywhere - 'whitespace': [ - (r'\s+', Text), - (r'\(\*', Comment.Multiline, 'comment'), - ], - - 'delimiters': [ - # This lexer treats these delimiters specially: - # Delimiters define scopes, and the scope is how the meaning of - # the `|' is resolved - is it a case/handle expression, or function - # definition by cases? (This is not how the Definition works, but - # it's how MLton behaves, see http://mlton.org/SMLNJDeviations) - (r'\(|\[|{', Punctuation, 'main'), - (r'\)|\]|}', Punctuation, '#pop'), - (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')), - (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'), - (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'), - ], - - 'core': [ - # Punctuation that doesn't overlap symbolic identifiers - (r'(%s)' % '|'.join([re.escape(z) for z in nonid_reserved]), - Punctuation), - - # Special constants: strings, floats, numbers in decimal and hex - (r'#"', String.Char, 'char'), - (r'"', String.Double, 'string'), - (r'~?0x[0-9a-fA-F]+', Number.Hex), - (r'0wx[0-9a-fA-F]+', Number.Hex), - (r'0w\d+', Number.Integer), - (r'~?\d+\.\d+[eE]~?\d+', Number.Float), - (r'~?\d+\.\d+', Number.Float), - (r'~?\d+[eE]~?\d+', Number.Float), - (r'~?\d+', Number.Integer), - - # Labels - (r'#\s*[1-9][0-9]*', Name.Label), - (r'#\s*(%s)' % alphanumid_re, Name.Label), - (r'#\s+(%s)' % symbolicid_re, Name.Label), - # Some reserved words trigger a special, local lexer state change - (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'), - (r'(?=\b(exception)\b(?!\'))', Text, ('ename')), - (r'\b(functor|include|open|signature|structure)\b(?!\')', - Keyword.Reserved, 'sname'), - (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'), - - # Regular identifiers, long and otherwise - (r'\'[\w\']*', Name.Decorator), - (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"), - (r'(%s)' % alphanumid_re, id_callback), - (r'(%s)' % symbolicid_re, id_callback), - ], - 'dotted': [ - (r'(%s)(\.)' % alphanumid_re, long_id_callback), - (r'(%s)' % alphanumid_re, end_id_callback, "#pop"), - (r'(%s)' % symbolicid_re, end_id_callback, "#pop"), - (r'\s+', Error), - (r'\S+', Error), - ], - - - # Main parser (prevents errors in files that have scoping errors) - 'root': [ default('main') ], - - # In this scope, I expect '|' to not be followed by a function name, - # and I expect 'and' to be followed by a binding site - 'main': [ - include('whitespace'), - - # Special behavior of val/and/fun - (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'), - (r'\b(fun)\b(?!\')', Keyword.Reserved, - ('#pop', 'main-fun', 'fname')), - - include('delimiters'), - include('core'), - (r'\S+', Error), - ], - - # In this scope, I expect '|' and 'and' to be followed by a function - 'main-fun': [ - include('whitespace'), - - (r'\s', Text), - (r'\(\*', Comment.Multiline, 'comment'), - - # Special behavior of val/and/fun - (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'), - (r'\b(val)\b(?!\')', Keyword.Reserved, - ('#pop', 'main', 'vname')), - - # Special behavior of '|' and '|'-manipulating keywords - (r'\|', Punctuation, 'fname'), - (r'\b(case|handle)\b(?!\')', Keyword.Reserved, - ('#pop', 'main')), - - include('delimiters'), - include('core'), - (r'\S+', Error), - ], - - # Character and string parsers - 'char': stringy(String.Char), - 'string': stringy(String.Double), - - 'breakout': [ - (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'), - ], - - # Dealing with what comes after module system keywords - 'sname': [ - include('whitespace'), - include('breakout'), - - (r'(%s)' % alphanumid_re, Name.Namespace), - default('#pop'), - ], - - # Dealing with what comes after the 'fun' (or 'and' or '|') keyword - 'fname': [ - include('whitespace'), - (r'\'[0-9a-zA-Z_\']*', Name.Decorator), - (r'\(', Punctuation, 'tyvarseq'), - - (r'(%s)' % alphanumid_re, Name.Function, '#pop'), - (r'(%s)' % symbolicid_re, Name.Function, '#pop'), - - # Ignore interesting function declarations like "fun (x + y) = ..." - default('#pop'), - ], - - # Dealing with what comes after the 'val' (or 'and') keyword - 'vname': [ - include('whitespace'), - (r'\'[0-9a-zA-Z_\']*', Name.Decorator), - (r'\(', Punctuation, 'tyvarseq'), - - (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re), - bygroups(Name.Variable, Text, Punctuation), '#pop'), - (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re), - bygroups(Name.Variable, Text, Punctuation), '#pop'), - (r'(%s)' % alphanumid_re, Name.Variable, '#pop'), - (r'(%s)' % symbolicid_re, Name.Variable, '#pop'), - - # Ignore interesting patterns like 'val (x, y)' - default('#pop'), - ], - - # Dealing with what comes after the 'type' (or 'and') keyword - 'tname': [ - include('whitespace'), - include('breakout'), - - (r'\'[0-9a-zA-Z_\']*', Name.Decorator), - (r'\(', Punctuation, 'tyvarseq'), - (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')), - - (r'(%s)' % alphanumid_re, Keyword.Type), - (r'(%s)' % symbolicid_re, Keyword.Type), - (r'\S+', Error, '#pop'), - ], - - # A type binding includes most identifiers - 'typbind': [ - include('whitespace'), - - (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')), - - include('breakout'), - include('core'), - (r'\S+', Error, '#pop'), - ], - - # Dealing with what comes after the 'datatype' (or 'and') keyword - 'dname': [ - include('whitespace'), - include('breakout'), - - (r'\'[0-9a-zA-Z_\']*', Name.Decorator), - (r'\(', Punctuation, 'tyvarseq'), - (r'(=)(\s*)(datatype)', - bygroups(Punctuation, Text, Keyword.Reserved), '#pop'), - (r'=(?!%s)' % symbolicid_re, Punctuation, - ('#pop', 'datbind', 'datcon')), - - (r'(%s)' % alphanumid_re, Keyword.Type), - (r'(%s)' % symbolicid_re, Keyword.Type), - (r'\S+', Error, '#pop'), - ], - - # common case - A | B | C of int - 'datbind': [ - include('whitespace'), - - (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')), - (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')), - (r'\b(of)\b(?!\')', Keyword.Reserved), - - (r'(\|)(\s*)(%s)' % alphanumid_re, - bygroups(Punctuation, Text, Name.Class)), - (r'(\|)(\s+)(%s)' % symbolicid_re, - bygroups(Punctuation, Text, Name.Class)), - - include('breakout'), - include('core'), - (r'\S+', Error), - ], - - # Dealing with what comes after an exception - 'ename': [ - include('whitespace'), - - (r'(exception|and)\b(\s+)(%s)' % alphanumid_re, - bygroups(Keyword.Reserved, Text, Name.Class)), - (r'(exception|and)\b(\s*)(%s)' % symbolicid_re, - bygroups(Keyword.Reserved, Text, Name.Class)), - (r'\b(of)\b(?!\')', Keyword.Reserved), - - include('breakout'), - include('core'), - (r'\S+', Error), - ], - - 'datcon': [ - include('whitespace'), - (r'(%s)' % alphanumid_re, Name.Class, '#pop'), - (r'(%s)' % symbolicid_re, Name.Class, '#pop'), - (r'\S+', Error, '#pop'), - ], - - # Series of type variables - 'tyvarseq': [ - (r'\s', Text), - (r'\(\*', Comment.Multiline, 'comment'), - - (r'\'[0-9a-zA-Z_\']*', Name.Decorator), - (alphanumid_re, Name), - (r',', Punctuation), - (r'\)', Punctuation, '#pop'), - (symbolicid_re, Name), - ], - - 'comment': [ - (r'[^(*)]', Comment.Multiline), - (r'\(\*', Comment.Multiline, '#push'), - (r'\*\)', Comment.Multiline, '#pop'), - (r'[(*)]', Comment.Multiline), - ], - } - - -class OcamlLexer(RegexLexer): - """ - For the OCaml language. - - .. versionadded:: 0.7 - """ - - name = 'OCaml' - aliases = ['ocaml'] - filenames = ['*.ml', '*.mli', '*.mll', '*.mly'] - mimetypes = ['text/x-ocaml'] - - keywords = [ - 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', - 'downto', 'else', 'end', 'exception', 'external', 'false', - 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', - 'inherit', 'initializer', 'lazy', 'let', 'match', 'method', - 'module', 'mutable', 'new', 'object', 'of', 'open', 'private', - 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try', - 'type', 'value', 'val', 'virtual', 'when', 'while', 'with', - ] - keyopts = [ - '!=','#','&','&&','\(','\)','\*','\+',',','-', - '-\.','->','\.','\.\.',':','::',':=',':>',';',';;','<', - '<-','=','>','>]','>}','\?','\?\?','\[','\[<','\[>','\[\|', - ']','_','`','{','{<','\|','\|]','}','~' - ] - - operators = r'[!$%&*+\./:<=>?@^|~-]' - word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or'] - prefix_syms = r'[!?~]' - infix_syms = r'[=<>@^|&+\*/$%-]' - primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array'] - - tokens = { - 'escape-sequence': [ - (r'\\[\\\"\'ntbr]', String.Escape), - (r'\\[0-9]{3}', String.Escape), - (r'\\x[0-9a-fA-F]{2}', String.Escape), - ], - 'root': [ - (r'\s+', Text), - (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo), - (r'\b([A-Z][\w\']*)(?=\s*\.)', - Name.Namespace, 'dotted'), - (r'\b([A-Z][\w\']*)', Name.Class), - (r'\(\*(?![)])', Comment, 'comment'), - (r'\b(%s)\b' % '|'.join(keywords), Keyword), - (r'(%s)' % '|'.join(keyopts[::-1]), Operator), - (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), - (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word), - (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), - - (r"[^\W\d][\w']*", Name), - - (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), - (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), - (r'0[oO][0-7][0-7_]*', Number.Oct), - (r'0[bB][01][01_]*', Number.Bin), - (r'\d[\d_]*', Number.Integer), - - (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", - String.Char), - (r"'.'", String.Char), - (r"'", Keyword), # a stray quote is another syntax element - - (r'"', String.Double, 'string'), - - (r'[~?][a-z][\w\']*:', Name.Variable), - ], - 'comment': [ - (r'[^(*)]+', Comment), - (r'\(\*', Comment, '#push'), - (r'\*\)', Comment, '#pop'), - (r'[(*)]', Comment), - ], - 'string': [ - (r'[^\\"]+', String.Double), - include('escape-sequence'), - (r'\\\n', String.Double), - (r'"', String.Double, '#pop'), - ], - 'dotted': [ - (r'\s+', Text), - (r'\.', Punctuation), - (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), - (r'[A-Z][\w\']*', Name.Class, '#pop'), - (r'[a-z_][\w\']*', Name, '#pop'), - ], - } - - -class ErlangLexer(RegexLexer): - """ - For the Erlang functional programming language. - - Blame Jeremy Thurgood (http://jerith.za.net/). - - .. versionadded:: 0.9 - """ - - name = 'Erlang' - aliases = ['erlang'] - filenames = ['*.erl', '*.hrl', '*.es', '*.escript'] - mimetypes = ['text/x-erlang'] - - keywords = [ - 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', - 'let', 'of', 'query', 'receive', 'try', 'when', - ] - - builtins = [ # See erlang(3) man page - 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list', - 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions', - 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module', - 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit', - 'float', 'float_to_list', 'fun_info', 'fun_to_list', - 'function_exported', 'garbage_collect', 'get', 'get_keys', - 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary', - 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean', - 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list', - 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record', - 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom', - 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom', - 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple', - 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5', - 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor', - 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2', - 'pid_to_list', 'port_close', 'port_command', 'port_connect', - 'port_control', 'port_call', 'port_info', 'port_to_list', - 'process_display', 'process_flag', 'process_info', 'purge_module', - 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process', - 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie', - 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor', - 'spawn_opt', 'split_binary', 'start_timer', 'statistics', - 'suspend_process', 'system_flag', 'system_info', 'system_monitor', - 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered', - 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list', - 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis' - ] - - operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)' - word_operators = [ - 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor', - 'div', 'not', 'or', 'orelse', 'rem', 'xor' - ] - - atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')" - - variable_re = r'(?:[A-Z_]\w*)' - - escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))' - - macro_re = r'(?:'+variable_re+r'|'+atom_re+r')' - - base_re = r'(?:[2-9]|[12][0-9]|3[0-6])' - - tokens = { - 'root': [ - (r'\s+', Text), - (r'%.*\n', Comment), - ('(' + '|'.join(keywords) + r')\b', Keyword), - ('(' + '|'.join(builtins) + r')\b', Name.Builtin), - ('(' + '|'.join(word_operators) + r')\b', Operator.Word), - (r'^-', Punctuation, 'directive'), - (operators, Operator), - (r'"', String, 'string'), - (r'<<', Name.Label), - (r'>>', Name.Label), - ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)), - ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()', - bygroups(Name.Function, Text, Punctuation)), - (r'[+-]?'+base_re+r'#[0-9a-zA-Z]+', Number.Integer), - (r'[+-]?\d+', Number.Integer), - (r'[+-]?\d+.\d+', Number.Float), - (r'[]\[:_@\".{}()|;,]', Punctuation), - (variable_re, Name.Variable), - (atom_re, Name), - (r'\?'+macro_re, Name.Constant), - (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char), - (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label), - ], - 'string': [ - (escape_re, String.Escape), - (r'"', String, '#pop'), - (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol), - (r'[^"\\~]+', String), - (r'~', String), - ], - 'directive': [ - (r'(define)(\s*)(\()('+macro_re+r')', - bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'), - (r'(record)(\s*)(\()('+macro_re+r')', - bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'), - (atom_re, Name.Entity, '#pop'), - ], - } - - -class ErlangShellLexer(Lexer): - """ - Shell sessions in erl (for Erlang code). - - .. versionadded:: 1.1 - """ - name = 'Erlang erl session' - aliases = ['erl'] - filenames = ['*.erl-sh'] - mimetypes = ['text/x-erl-shellsession'] - - _prompt_re = re.compile(r'\d+>(?=\s|\Z)') - - def get_tokens_unprocessed(self, text): - erlexer = ErlangLexer(**self.options) - - curcode = '' - insertions = [] - for match in line_re.finditer(text): - line = match.group() - m = self._prompt_re.match(line) - if m is not None: - end = m.end() - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:end])])) - curcode += line[end:] - else: - if curcode: - for item in do_insertions(insertions, - erlexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - if line.startswith('*'): - yield match.start(), Generic.Traceback, line - else: - yield match.start(), Generic.Output, line - if curcode: - for item in do_insertions(insertions, - erlexer.get_tokens_unprocessed(curcode)): - yield item - - -class OpaLexer(RegexLexer): - """ - Lexer for the Opa language (http://opalang.org). - - .. versionadded:: 1.5 - """ - - name = 'Opa' - aliases = ['opa'] - filenames = ['*.opa'] - mimetypes = ['text/x-opa'] - - # most of these aren't strictly keywords - # but if you color only real keywords, you might just - # as well not color anything - keywords = [ - 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do', - 'else', 'end', 'external', 'forall', 'function', 'if', 'import', - 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then', - 'type', 'val', 'with', 'xml_parser', - ] - - # matches both stuff and `stuff` - ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))' - - op_re = r'[.=\-<>,@~%/+?*&^!]' - punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere - # because they are also used for inserts - - tokens = { - # copied from the caml lexer, should be adapted - 'escape-sequence': [ - (r'\\[\\\"\'ntr}]', String.Escape), - (r'\\[0-9]{3}', String.Escape), - (r'\\x[0-9a-fA-F]{2}', String.Escape), - ], - - # factorizing these rules, because they are inserted many times - 'comments': [ - (r'/\*', Comment, 'nested-comment'), - (r'//.*?$', Comment), - ], - 'comments-and-spaces': [ - include('comments'), - (r'\s+', Text), - ], - - 'root': [ - include('comments-and-spaces'), - # keywords - (r'\b(%s)\b' % '|'.join(keywords), Keyword), - # directives - # we could parse the actual set of directives instead of anything - # starting with @, but this is troublesome - # because it needs to be adjusted all the time - # and assuming we parse only sources that compile, it is useless - (r'@'+ident_re+r'\b', Name.Builtin.Pseudo), - - # number literals - (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float), - (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float), - (r'-?\d+[eE][+\-]?\d+', Number.Float), - (r'0[xX][\da-fA-F]+', Number.Hex), - (r'0[oO][0-7]+', Number.Oct), - (r'0[bB][01]+', Number.Bin), - (r'\d+', Number.Integer), - # color literals - (r'#[\da-fA-F]{3,6}', Number.Integer), - - # string literals - (r'"', String.Double, 'string'), - # char literal, should be checked because this is the regexp from - # the caml lexer - (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'", - String.Char), - - # this is meant to deal with embedded exprs in strings - # every time we find a '}' we pop a state so that if we were - # inside a string, we are back in the string state - # as a consequence, we must also push a state every time we find a - # '{' or else we will have errors when parsing {} for instance - (r'{', Operator, '#push'), - (r'}', Operator, '#pop'), - - # html literals - # this is a much more strict that the actual parser, - # since a])', String.Single, 'html-open-tag'), - - # db path - # matching the '[_]' in '/a[_]' because it is a part - # of the syntax of the db path definition - # unfortunately, i don't know how to match the ']' in - # /a[1], so this is somewhat inconsistent - (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable), - # putting the same color on <- as on db path, since - # it can be used only to mean Db.write - (r'<-(?!'+op_re+r')', Name.Variable), - - # 'modules' - # although modules are not distinguished by their names as in caml - # the standard library seems to follow the convention that modules - # only area capitalized - (r'\b([A-Z]\w*)(?=\.)', Name.Namespace), - - # operators - # = has a special role because this is the only - # way to syntactic distinguish binding constructions - # unfortunately, this colors the equal in {x=2} too - (r'=(?!'+op_re+r')', Keyword), - (r'(%s)+' % op_re, Operator), - (r'(%s)+' % punc_re, Operator), - - # coercions - (r':', Operator, 'type'), - # type variables - # we need this rule because we don't parse specially type - # definitions so in "type t('a) = ...", "'a" is parsed by 'root' - ("'"+ident_re, Keyword.Type), - - # id literal, #something, or #{expr} - (r'#'+ident_re, String.Single), - (r'#(?={)', String.Single), - - # identifiers - # this avoids to color '2' in 'a2' as an integer - (ident_re, Text), - - # default, not sure if that is needed or not - # (r'.', Text), - ], - - # it is quite painful to have to parse types to know where they end - # this is the general rule for a type - # a type is either: - # * -> ty - # * type-with-slash - # * type-with-slash -> ty - # * type-with-slash (, type-with-slash)+ -> ty - # - # the code is pretty funky in here, but this code would roughly - # translate in caml to: - # let rec type stream = - # match stream with - # | [< "->"; stream >] -> type stream - # | [< ""; stream >] -> - # type_with_slash stream - # type_lhs_1 stream; - # and type_1 stream = ... - 'type': [ - include('comments-and-spaces'), - (r'->', Keyword.Type), - default(('#pop', 'type-lhs-1', 'type-with-slash')), - ], - - # parses all the atomic or closed constructions in the syntax of type - # expressions: record types, tuple types, type constructors, basic type - # and type variables - 'type-1': [ - include('comments-and-spaces'), - (r'\(', Keyword.Type, ('#pop', 'type-tuple')), - (r'~?{', Keyword.Type, ('#pop', 'type-record')), - (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')), - (ident_re, Keyword.Type, '#pop'), - ("'"+ident_re, Keyword.Type), - # this case is not in the syntax but sometimes - # we think we are parsing types when in fact we are parsing - # some css, so we just pop the states until we get back into - # the root state - default('#pop'), - ], - - # type-with-slash is either: - # * type-1 - # * type-1 (/ type-1)+ - 'type-with-slash': [ - include('comments-and-spaces'), - default(('#pop', 'slash-type-1', 'type-1')), - ], - 'slash-type-1': [ - include('comments-and-spaces'), - ('/', Keyword.Type, ('#pop', 'type-1')), - # same remark as above - default('#pop'), - ], - - # we go in this state after having parsed a type-with-slash - # while trying to parse a type - # and at this point we must determine if we are parsing an arrow - # type (in which case we must continue parsing) or not (in which - # case we stop) - 'type-lhs-1': [ - include('comments-and-spaces'), - (r'->', Keyword.Type, ('#pop', 'type')), - (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')), - default('#pop'), - ], - 'type-arrow': [ - include('comments-and-spaces'), - # the look ahead here allows to parse f(x : int, y : float -> truc) - # correctly - (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'), - (r'->', Keyword.Type, ('#pop', 'type')), - # same remark as above - default('#pop'), - ], - - # no need to do precise parsing for tuples and records - # because they are closed constructions, so we can simply - # find the closing delimiter - # note that this function would be not work if the source - # contained identifiers like `{)` (although it could be patched - # to support it) - 'type-tuple': [ - include('comments-and-spaces'), - (r'[^\(\)/*]+', Keyword.Type), - (r'[/*]', Keyword.Type), - (r'\(', Keyword.Type, '#push'), - (r'\)', Keyword.Type, '#pop'), - ], - 'type-record': [ - include('comments-and-spaces'), - (r'[^{}/*]+', Keyword.Type), - (r'[/*]', Keyword.Type), - (r'{', Keyword.Type, '#push'), - (r'}', Keyword.Type, '#pop'), - ], - -# 'type-tuple': [ -# include('comments-and-spaces'), -# (r'\)', Keyword.Type, '#pop'), -# default(('#pop', 'type-tuple-1', 'type-1')), -# ], -# 'type-tuple-1': [ -# include('comments-and-spaces'), -# (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,) -# (r',', Keyword.Type, 'type-1'), -# ], -# 'type-record':[ -# include('comments-and-spaces'), -# (r'}', Keyword.Type, '#pop'), -# (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'), -# ], -# 'type-record-field-expr': [ -# -# ], - - 'nested-comment': [ - (r'[^/*]+', Comment), - (r'/\*', Comment, '#push'), - (r'\*/', Comment, '#pop'), - (r'[/*]', Comment), - ], - - # the copy pasting between string and single-string - # is kinda sad. Is there a way to avoid that?? - 'string': [ - (r'[^\\"{]+', String.Double), - (r'"', String.Double, '#pop'), - (r'{', Operator, 'root'), - include('escape-sequence'), - ], - 'single-string': [ - (r'[^\\\'{]+', String.Double), - (r'\'', String.Double, '#pop'), - (r'{', Operator, 'root'), - include('escape-sequence'), - ], - - # all the html stuff - # can't really reuse some existing html parser - # because we must be able to parse embedded expressions - - # we are in this state after someone parsed the '<' that - # started the html literal - 'html-open-tag': [ - (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')), - (r'>', String.Single, ('#pop', 'html-content')), - ], - - # we are in this state after someone parsed the ' is allowed - (r'[\w\-:]*>', String.Single, '#pop'), - ], - - # we are in this state after having parsed '', String.Single, '#pop'), - (r'>', String.Single, ('#pop', 'html-content')), - ], - - 'html-attr-value': [ - (r"'", String.Single, ('#pop', 'single-string')), - (r'"', String.Single, ('#pop', 'string')), - (r'#'+ident_re, String.Single, '#pop'), - (r'#(?={)', String.Single, ('#pop', 'root')), - (r'[^"\'{`=<>]+', String.Single, '#pop'), - (r'{', Operator, ('#pop', 'root')), # this is a tail call! - ], - - # we should probably deal with '\' escapes here - 'html-content': [ - (r'', Comment, '#pop'), - (r'[^\-]+|-', Comment), - ], - } - - -class CoqLexer(RegexLexer): - """ - For the `Coq `_ theorem prover. - - .. versionadded:: 1.5 - """ - - name = 'Coq' - aliases = ['coq'] - filenames = ['*.v'] - mimetypes = ['text/x-coq'] - - keywords1 = [ - # Vernacular commands - 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable', - 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis', - 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope', - 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac', - 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit', - 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex', - 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure', - 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary', - 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save', - 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search', - 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside', - 'outside', - ] - keywords2 = [ - # Gallina - 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct', - 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else', - 'for', 'of', 'nosimpl', 'with', 'as', - ] - keywords3 = [ - # Sorts - 'Type', 'Prop', - ] - keywords4 = [ - # Tactics - 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro', - 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct', - 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite', - 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold', - 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog', - 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial', - 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto', - 'split', 'left', 'right', 'autorewrite', - ] - keywords5 = [ - # Terminators - 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega', - 'assumption', 'solve', 'contradiction', 'discriminate', - ] - keywords6 = [ - # Control - 'do', 'last', 'first', 'try', 'idtac', 'repeat', - ] - # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', - # 'downto', 'else', 'end', 'exception', 'external', 'false', - # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', - # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method', - # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private', - # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try', - # 'type', 'val', 'virtual', 'when', 'while', 'with' - keyopts = [ - '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', - r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', - '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>', - r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>', - r'/\\', r'\\/', - u'Π', u'λ', - ] - operators = r'[!$%&*+\./:<=>?@^|~-]' - word_operators = ['and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or'] - prefix_syms = r'[!?~]' - infix_syms = r'[=<>@^|&+\*/$%-]' - primitives = ['unit', 'int', 'float', 'bool', 'string', 'char', 'list', - 'array'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo), - (r'\(\*', Comment, 'comment'), - (r'\b(%s)\b' % '|'.join(keywords1), Keyword.Namespace), - (r'\b(%s)\b' % '|'.join(keywords2), Keyword), - (r'\b(%s)\b' % '|'.join(keywords3), Keyword.Type), - (r'\b(%s)\b' % '|'.join(keywords4), Keyword), - (r'\b(%s)\b' % '|'.join(keywords5), Keyword.Pseudo), - (r'\b(%s)\b' % '|'.join(keywords6), Keyword.Reserved), - (r'\b([A-Z][\w\']*)(?=\s*\.)', - Name.Namespace, 'dotted'), - (r'\b([A-Z][\w\']*)', Name.Class), - (r'(%s)' % '|'.join(keyopts[::-1]), Operator), - (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), - (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word), - (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), - - (r"[^\W\d][\w']*", Name), - - (r'\d[\d_]*', Number.Integer), - (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), - (r'0[oO][0-7][0-7_]*', Number.Oct), - (r'0[bB][01][01_]*', Number.Bin), - (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), - - (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", - String.Char), - (r"'.'", String.Char), - (r"'", Keyword), # a stray quote is another syntax element - - (r'"', String.Double, 'string'), - - (r'[~?][a-z][\w\']*:', Name.Variable), - ], - 'comment': [ - (r'[^(*)]+', Comment), - (r'\(\*', Comment, '#push'), - (r'\*\)', Comment, '#pop'), - (r'[(*)]', Comment), - ], - 'string': [ - (r'[^"]+', String.Double), - (r'""', String.Double), - (r'"', String.Double, '#pop'), - ], - 'dotted': [ - (r'\s+', Text), - (r'\.', Punctuation), - (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), - (r'[A-Z][\w\']*', Name.Class, '#pop'), - (r'[a-z][a-z0-9_\']*', Name, '#pop'), - default('#pop') - ], - } - - def analyse_text(text): - if text.startswith('(*'): - return True - - -class NewLispLexer(RegexLexer): - """ - For `newLISP. `_ source code (version 10.3.0). - - .. versionadded:: 1.5 - """ - - name = 'NewLisp' - aliases = ['newlisp'] - filenames = ['*.lsp', '*.nl'] - mimetypes = ['text/x-newlisp', 'application/x-newlisp'] - - flags = re.IGNORECASE | re.MULTILINE | re.UNICODE - - # list of built-in functions for newLISP version 10.3 - builtins = [ - '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++', - '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10', - '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7', - '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs', - 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file', - 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin', - 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec', - 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin', - 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case', - 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean', - 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant', - 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count', - 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry', - 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec', - 'def-new', 'default', 'define-macro', 'define-macro', 'define', - 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device', - 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while', - 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup', - 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event', - 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand', - 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter', - 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt', - 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln', - 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string', - 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc', - 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert', - 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error', - 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn', - 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup', - 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat', - 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply', - 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error', - 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local', - 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping', - 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select', - 'net-send-to', 'net-send-udp', 'net-send', 'net-service', - 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper', - 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack', - 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop', - 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print', - 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event', - 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand', - 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file', - 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event', - 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex', - 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse', - 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self', - 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all', - 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent', - 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt', - 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?', - 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term', - 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case', - 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?', - 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until', - 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while', - 'write', 'write-char', 'write-file', 'write-line', 'write', - 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?', - ] - - # valid names - valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+' - - tokens = { - 'root': [ - # shebang - (r'#!(.*?)$', Comment.Preproc), - # comments starting with semicolon - (r';.*$', Comment.Single), - # comments starting with # - (r'#.*$', Comment.Single), - - # whitespace - (r'\s+', Text), - - # strings, symbols and characters - (r'"(\\\\|\\"|[^"])*"', String), - - # braces - (r"{", String, "bracestring"), - - # [text] ... [/text] delimited strings - (r'\[text\]*', String, "tagstring"), - - # 'special' operators... - (r"('|:)", Operator), - - # highlight the builtins - ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins), - Keyword), - - # the remaining functions - (r'(?<=\()' + valid_name, Name.Variable), - - # the remaining variables - (valid_name, String.Symbol), - - # parentheses - (r'(\(|\))', Punctuation), - ], - - # braced strings... - 'bracestring': [ - ("{", String, "#push"), - ("}", String, "#pop"), - ("[^{}]+", String), - ], - - # tagged [text]...[/text] delimited strings... - 'tagstring': [ - (r'(?s)(.*?)(\[/text\])', String, '#pop'), - ], - } - - -class NixLexer(RegexLexer): - """ - For the `Nix language `_. - - .. versionadded:: 2.0 - """ - - name = 'Nix' - aliases = ['nixos', 'nix'] - filenames = ['*.nix'] - mimetypes = ['text/x-nix'] - - flags = re.MULTILINE | re.UNICODE - - keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if', - 'else', 'then', '...'] - builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins', - 'map', 'removeAttrs', 'throw', 'toString', 'derivation'] - operators = ['++', '+', '?', '.', '!', '//', '==', - '!=', '&&', '||', '->', '='] - - punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"] - - tokens = { - 'root': [ - # comments starting with # - (r'#.*$', Comment.Single), - - # multiline comments - (r'/\*', Comment.Multiline, 'comment'), - - # whitespace - (r'\s+', Text), - - # keywords - ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword), - - # highlight the builtins - ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins), - Name.Builtin), - - (r'\b(true|false|null)\b', Name.Constant), - - # operators - ('(%s)' % '|'.join(re.escape(entry) for entry in operators), - Operator), - - # word operators - (r'\b(or|and)\b', Operator.Word), - - # punctuations - ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation), - - # integers - (r'[0-9]+', Number.Integer), - - # strings - (r'"', String.Double, 'doublequote'), - (r"''", String.Single, 'singlequote'), - - # paths - (r'[\w.+-]*(\/[\w.+-]+)+', Literal), - (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal), - - # urls - (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal), - - # names of variables - (r'[\w-]+\s*=', String.Symbol), - (r'[a-zA-Z_][\w\'-]*', Text), - - ], - 'comment': [ - (r'[^/\*]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[\*/]', Comment.Multiline), - ], - 'singlequote': [ - (r"'''", String.Escape), - (r"''\$\{", String.Escape), - (r"''\n", String.Escape), - (r"''\r", String.Escape), - (r"''\t", String.Escape), - (r"''", String.Single, '#pop'), - (r'\$\{', String.Interpol, 'antiquote'), - (r"[^']", String.Single), - ], - 'doublequote': [ - (r'\\', String.Escape), - (r'\\"', String.Escape), - (r'\\${', String.Escape), - (r'"', String.Double, '#pop'), - (r'\$\{', String.Interpol, 'antiquote'), - (r'[^"]', String.Double), - ], - 'antiquote': [ - (r"}", String.Interpol, '#pop'), - # TODO: we should probably escape also here ''${ \${ - (r"\$\{", String.Interpol, '#push'), - include('root'), - ], - } - - def analyse_text(text): - rv = 0.0 - # TODO: let/in - if re.search(r'import.+?<[^>]+>', text): - rv += 0.4 - if re.search(r'mkDerivation\s+(\(|\{|rec)', text): - rv += 0.4 - if re.search(r'with\s+[a-zA-Z\.]+;', text): - rv += 0.2 - if re.search(r'inherit\s+[a-zA-Z()\.];', text): - rv += 0.2 - if re.search(r'=\s+mkIf\s+', text): - rv += 0.4 - if re.search(r'\{[a-zA-Z,\s]+\}:', text): - rv += 0.1 - return rv - - -def gen_elixir_string_rules(name, symbol, token): - states = {} - states['string_' + name] = [ - (r'[^#%s\\]+' % (symbol,), token), - include('escapes'), - (r'\\.', token), - (r'(%s)' % (symbol,), bygroups(token), "#pop"), - include('interpol') - ] - return states - -def gen_elixir_sigstr_rules(term, token, interpol=True): - if interpol: - return [ - (r'[^#%s\\]+' % (term,), token), - include('escapes'), - (r'\\.', token), - (r'%s[a-zA-Z]*' % (term,), token, '#pop'), - include('interpol') - ] - else: - return [ - (r'[^%s\\]+' % (term,), token), - (r'\\.', token), - (r'%s[a-zA-Z]*' % (term,), token, '#pop'), - ] - -class ElixirLexer(RegexLexer): - """ - For the `Elixir language `_. - - .. versionadded:: 1.5 - """ - - name = 'Elixir' - aliases = ['elixir', 'ex', 'exs'] - filenames = ['*.ex', '*.exs'] - mimetypes = ['text/x-elixir'] - - KEYWORD = ['fn', 'do', 'end', 'after', 'else', 'rescue', 'catch'] - KEYWORD_OPERATOR = ['not', 'and', 'or', 'when', 'in'] - BUILTIN = [ - 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise', - 'quote', 'unquote', 'unquote_splicing', 'throw', 'super' - ] - BUILTIN_DECLARATION = [ - 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop', - 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback' - ] - - BUILTIN_NAMESPACE = ['import', 'require', 'use', 'alias'] - CONSTANT = ['nil', 'true', 'false'] - - PSEUDO_VAR = ['_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__'] - - - OPERATORS3 = [ - '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', - '~>>', '<~>', '|~>', '<|>', - ] - OPERATORS2 = [ - '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', - '->', '<-', '|', '.', '=', '~>', '<~', - ] - OPERATORS1 = ['<', '>', '+', '-', '*', '/', '!', '^', '&'] - - PUNCTUATION = [ - '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']' - ] - - def get_tokens_unprocessed(self, text): - for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): - if token is Name: - if value in self.KEYWORD: - yield index, Keyword, value - elif value in self.KEYWORD_OPERATOR: - yield index, Operator.Word, value - elif value in self.BUILTIN: - yield index, Keyword, value - elif value in self.BUILTIN_DECLARATION: - yield index, Keyword.Declaration, value - elif value in self.BUILTIN_NAMESPACE: - yield index, Keyword.Namespace, value - elif value in self.CONSTANT: - yield index, Name.Constant, value - elif value in self.PSEUDO_VAR: - yield index, Name.Builtin.Pseudo, value - else: - yield index, token, value - else: - yield index, token, value - - def gen_elixir_sigil_rules(): - # all valid sigil terminators (excluding heredocs) - terminators = [ - (r'\{', r'\}', 'cb'), - (r'\[', r'\]', 'sb'), - (r'\(', r'\)', 'pa'), - (r'\<', r'\>', 'ab'), - (r'/', r'/', 'slas'), - (r'\|', r'\|', 'pipe'), - ('"', '"', 'quot'), - ("'", "'", 'apos'), - ] - - # heredocs have slightly different rules - triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')] - - token = String.Other - states = {'sigils': []} - - for term, name in triquotes: - states['sigils'] += [ - (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc), - (name + '-end', name + '-intp')), - (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc), - (name + '-end', name + '-no-intp')), - ] - - states[name +'-end'] = [(r'[a-zA-Z]*', token, '#pop')] - states[name +'-intp'] = [ - (r'^\s*' + term, String.Heredoc, '#pop'), - include('heredoc_interpol'), - ] - states[name +'-no-intp'] = [ - (r'^\s*' + term, String.Heredoc, '#pop'), - include('heredoc_no_interpol'), - ] - - for lterm, rterm, name in terminators: - states['sigils'] += [ - (r'~[a-z]' + lterm, token, name + '-intp'), - (r'~[A-Z]' + lterm, token, name + '-no-intp'), - ] - states[name +'-intp'] = gen_elixir_sigstr_rules(rterm, token) - states[name +'-no-intp'] = \ - gen_elixir_sigstr_rules(rterm, token, interpol=False) - - return states - - op3_re = "|".join(re.escape(s) for s in OPERATORS3) - op2_re = "|".join(re.escape(s) for s in OPERATORS2) - op1_re = "|".join(re.escape(s) for s in OPERATORS1) - ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) - punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) - alnum = '[A-Za-z_0-9]' - name_re = r'(?:\.\.\.|[a-z_]%s*[!\?]?)' % alnum - modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} - complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) - special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})' - - long_hex_char_re = r'(\\x{)([\da-fA-F]+)(})' - hex_char_re = r'(\\x[\da-fA-F]{1,2})' - escape_char_re = r'(\\[abdefnrstv])' - - tokens = { - 'root': [ - (r'\s+', Text), - (r'#.*$', Comment.Single), - - # Various kinds of characters - (r'(\?)' + long_hex_char_re, - bygroups(String.Char, - String.Escape, Number.Hex, String.Escape)), - (r'(\?)' + hex_char_re, - bygroups(String.Char, String.Escape)), - (r'(\?)' + escape_char_re, - bygroups(String.Char, String.Escape)), - (r'\?\\?.', String.Char), - - # '::' has to go before atoms - (r':::', String.Symbol), - (r'::', Operator), - - # atoms - (r':' + special_atom_re, String.Symbol), - (r':' + complex_name_re, String.Symbol), - (r':"', String.Symbol, 'string_double_atom'), - (r":'", String.Symbol, 'string_single_atom'), - - # [keywords: ...] - (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re), - bygroups(String.Symbol, Punctuation)), - - # @attributes - (r'@' + name_re, Name.Attribute), - - # identifiers - (name_re, Name), - (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), - - # operators and punctuation - (op3_re, Operator), - (op2_re, Operator), - (punctuation_re, Punctuation), - (r'&\d', Name.Entity), # anon func arguments - (op1_re, Operator), - - # numbers - (r'0b[01]+', Number.Bin), - (r'0o[0-7]+', Number.Oct), - (r'0x[\da-fA-F]+', Number.Hex), - (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float), - (r'\d(_?\d)*', Number.Integer), - - # strings and heredocs - (r'"""\s*', String.Heredoc, 'heredoc_double'), - (r"'''\s*$", String.Heredoc, 'heredoc_single'), - (r'"', String.Double, 'string_double'), - (r"'", String.Single, 'string_single'), - - include('sigils'), - - (r'%{', Punctuation, 'map_key'), - (r'{', Punctuation, 'tuple'), - ], - 'heredoc_double': [ - (r'^\s*"""', String.Heredoc, '#pop'), - include('heredoc_interpol'), - ], - 'heredoc_single': [ - (r"^\s*'''", String.Heredoc, '#pop'), - include('heredoc_interpol'), - ], - 'heredoc_interpol': [ - (r'[^#\\\n]+', String.Heredoc), - include('escapes'), - (r'\\.', String.Heredoc), - (r'\n+', String.Heredoc), - include('interpol'), - ], - 'heredoc_no_interpol': [ - (r'[^\\\n]+', String.Heredoc), - (r'\\.', String.Heredoc), - (r'\n+', String.Heredoc), - ], - 'escapes': [ - (long_hex_char_re, - bygroups(String.Escape, Number.Hex, String.Escape)), - (hex_char_re, String.Escape), - (escape_char_re, String.Escape), - ], - 'interpol': [ - (r'#{', String.Interpol, 'interpol_string'), - ], - 'interpol_string' : [ - (r'}', String.Interpol, "#pop"), - include('root') - ], - 'map_key': [ - include('root'), - (r':', Punctuation, 'map_val'), - (r'=>', Punctuation, 'map_val'), - (r'}', Punctuation, '#pop'), - ], - 'map_val': [ - include('root'), - (r',', Punctuation, '#pop'), - (r'(?=})', Punctuation, '#pop'), - ], - 'tuple': [ - include('root'), - (r'}', Punctuation, '#pop'), - ], - } - tokens.update(gen_elixir_string_rules('double', '"', String.Double)) - tokens.update(gen_elixir_string_rules('single', "'", String.Single)) - tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol)) - tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol)) - tokens.update(gen_elixir_sigil_rules()) - - -class ElixirConsoleLexer(Lexer): - """ - For Elixir interactive console (iex) output like: - - .. sourcecode:: iex - - iex> [head | tail] = [1,2,3] - [1,2,3] - iex> head - 1 - iex> tail - [2,3] - iex> [head | tail] - [1,2,3] - iex> length [head | tail] - 3 - - .. versionadded:: 1.5 - """ - - name = 'Elixir iex session' - aliases = ['iex'] - mimetypes = ['text/x-elixir-shellsession'] - - _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ') - - def get_tokens_unprocessed(self, text): - exlexer = ElixirLexer(**self.options) - - curcode = '' - in_error = False - insertions = [] - for match in line_re.finditer(text): - line = match.group() - if line.startswith(u'** '): - in_error = True - insertions.append((len(curcode), - [(0, Generic.Error, line[:-1])])) - curcode += line[-1:] - else: - m = self._prompt_re.match(line) - if m is not None: - in_error = False - end = m.end() - insertions.append((len(curcode), - [(0, Generic.Prompt, line[:end])])) - curcode += line[end:] - else: - if curcode: - for item in do_insertions(insertions, - exlexer.get_tokens_unprocessed(curcode)): - yield item - curcode = '' - insertions = [] - token = Generic.Error if in_error else Generic.Output - yield match.start(), token, line - if curcode: - for item in do_insertions(insertions, - exlexer.get_tokens_unprocessed(curcode)): - yield item - - -class KokaLexer(RegexLexer): - """ - Lexer for the `Koka `_ - language. - - .. versionadded:: 1.6 - """ - - name = 'Koka' - aliases = ['koka'] - filenames = ['*.kk', '*.kki'] - mimetypes = ['text/x-koka'] - - keywords = [ - 'infix', 'infixr', 'infixl', - 'type', 'cotype', 'rectype', 'alias', - 'struct', 'con', - 'fun', 'function', 'val', 'var', - 'external', - 'if', 'then', 'else', 'elif', 'return', 'match', - 'private', 'public', 'private', - 'module', 'import', 'as', - 'include', 'inline', - 'rec', - 'try', 'yield', 'enum', - 'interface', 'instance', - ] - - # keywords that are followed by a type - typeStartKeywords = [ - 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum', - ] - - # keywords valid in a type - typekeywords = [ - 'forall', 'exists', 'some', 'with', - ] - - # builtin names and special names - builtin = [ - 'for', 'while', 'repeat', - 'foreach', 'foreach-indexed', - 'error', 'catch', 'finally', - 'cs', 'js', 'file', 'ref', 'assigned', - ] - - # symbols that can be in an operator - symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+' - - # symbol boundary: an operator keyword should not be followed by any of these - sboundary = '(?!'+symbols+')' - - # name boundary: a keyword should not be followed by any of these - boundary = '(?![\w/])' - - # koka token abstractions - tokenType = Name.Attribute - tokenTypeDef = Name.Class - tokenConstructor = Generic.Emph - - # main lexer - tokens = { - 'root': [ - include('whitespace'), - - # go into type mode - (r'::?' + sboundary, tokenType, 'type'), - (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), - 'alias-type'), - (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), - 'struct-type'), - ((r'(%s)' % '|'.join(typeStartKeywords)) + - r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), - 'type'), - - # special sequences of tokens (we use ?: for non-capturing group as - # required by 'bygroups') - (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)', - bygroups(Keyword, Text, Keyword, Name.Namespace)), - (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)' - r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)' - r'((?:[a-z]\w*/)*[a-z]\w*))?', - bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text, - Keyword, Name.Namespace)), - - (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))' - r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))', - bygroups(Keyword, Text, Name.Function)), - (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?' - r'([a-z]\w*|\((?:' + symbols + r'|/)\))', - bygroups(Keyword, Text, Keyword, Name.Function)), - - # keywords - (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type), - (r'(%s)' % '|'.join(keywords) + boundary, Keyword), - (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo), - (r'::?|:=|\->|[=\.]' + sboundary, Keyword), - - # names - (r'((?:[a-z]\w*/)*)([A-Z]\w*)', - bygroups(Name.Namespace, tokenConstructor)), - (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)), - (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))', - bygroups(Name.Namespace, Name)), - (r'_\w*', Name.Variable), - - # literal string - (r'@"', String.Double, 'litstring'), - - # operators - (symbols + "|/(?![\*/])", Operator), - (r'`', Operator), - (r'[\{\}\(\)\[\];,]', Punctuation), - - # literals. No check for literal characters with len > 1 - (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float), - (r'0[xX][0-9a-fA-F]+', Number.Hex), - (r'[0-9]+', Number.Integer), - - (r"'", String.Char, 'char'), - (r'"', String.Double, 'string'), - ], - - # type started by alias - 'alias-type': [ - (r'=',Keyword), - include('type') - ], - - # type started by struct - 'struct-type': [ - (r'(?=\((?!,*\)))',Punctuation, '#pop'), - include('type') - ], - - # type started by colon - 'type': [ - (r'[\(\[<]', tokenType, 'type-nested'), - include('type-content') - ], - - # type nested in brackets: can contain parameters, comma etc. - 'type-nested': [ - (r'[\)\]>]', tokenType, '#pop'), - (r'[\(\[<]', tokenType, 'type-nested'), - (r',', tokenType), - (r'([a-z]\w*)(\s*)(:)(?!:)', - bygroups(Name, Text, tokenType)), # parameter name - include('type-content') - ], - - # shared contents of a type - 'type-content': [ - include('whitespace'), - - # keywords - (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword), - (r'(?=((%s)' % '|'.join(keywords) + boundary + '))', - Keyword, '#pop'), # need to match because names overlap... - - # kinds - (r'[EPHVX]' + boundary, tokenType), - - # type names - (r'[a-z][0-9]*(?![\w/])', tokenType ), - (r'_\w*', tokenType.Variable), # Generic.Emph - (r'((?:[a-z]\w*/)*)([A-Z]\w*)', - bygroups(Name.Namespace, tokenType)), - (r'((?:[a-z]\w*/)*)([a-z]\w+)', - bygroups(Name.Namespace, tokenType)), - - # type keyword operators - (r'::|\->|[\.:|]', tokenType), - - #catchall - default('#pop') - ], - - # comments and literals - 'whitespace': [ - (r'\n\s*#.*$', Comment.Preproc), - (r'\s+', Text), - (r'/\*', Comment.Multiline, 'comment'), - (r'//.*$', Comment.Single) - ], - 'comment': [ - (r'[^/\*]+', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[\*/]', Comment.Multiline), - ], - 'litstring': [ - (r'[^"]+', String.Double), - (r'""', String.Escape), - (r'"', String.Double, '#pop'), - ], - 'string': [ - (r'[^\\"\n]+', String.Double), - include('escape-sequence'), - (r'["\n]', String.Double, '#pop'), - ], - 'char': [ - (r'[^\\\'\n]+', String.Char), - include('escape-sequence'), - (r'[\'\n]', String.Char, '#pop'), - ], - 'escape-sequence': [ - (r'\\[nrt\\\"\']', String.Escape), - (r'\\x[0-9a-fA-F]{2}', String.Escape), - (r'\\u[0-9a-fA-F]{4}', String.Escape), - # Yes, \U literals are 6 hex digits. - (r'\\U[0-9a-fA-F]{6}', String.Escape) - ] - } +__all__ = [] diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py new file mode 100644 index 00000000..27593986 --- /dev/null +++ b/pygments/lexers/haskell.py @@ -0,0 +1,837 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.haskell + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Haskell and related languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, bygroups, do_insertions, \ + default, include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic +from pygments import unistring as uni + +__all__ = ['HaskellLexer', 'IdrisLexer', 'AgdaLexer', 'CryptolLexer', + 'LiterateHaskellLexer', 'LiterateIdrisLexer', 'LiterateAgdaLexer', + 'LiterateCryptolLexer', 'KokaLexer'] + + +line_re = re.compile('.*?\n') + + +class HaskellLexer(RegexLexer): + """ + A Haskell lexer based on the lexemes defined in the Haskell 98 Report. + + .. versionadded:: 0.8 + """ + name = 'Haskell' + aliases = ['haskell', 'hs'] + filenames = ['*.hs'] + mimetypes = ['text/x-haskell'] + + flags = re.MULTILINE | re.UNICODE + + reserved = ('case', 'class', 'data', 'default', 'deriving', 'do', 'else', + 'if', 'in', 'infix[lr]?', 'instance', + 'let', 'newtype', 'of', 'then', 'type', 'where', '_') + ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK', + 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE', + 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN', + 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL') + + tokens = { + 'root': [ + # Whitespace: + (r'\s+', Text), + # (r'--\s*|.*$', Comment.Doc), + (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), + (r'{-', Comment.Multiline, 'comment'), + # Lexemes: + # Identifiers + (r'\bimport\b', Keyword.Reserved, 'import'), + (r'\bmodule\b', Keyword.Reserved, 'module'), + (r'\berror\b', Name.Exception), + (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), + (r'^[_' + uni.Ll + r'][\w\']*', Name.Function), + (r"'?[_" + uni.Ll + r"'][\w']*", Name), + (r"('')?[" + uni.Lu + r"][\w\']*", Keyword.Type), + # Operators + (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator + (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials + (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators + (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators + # Numbers + (r'\d+[eE][+-]?\d+', Number.Float), + (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), + (r'0[oO][0-7]+', Number.Oct), + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + # Character/String Literals + (r"'", String.Char, 'character'), + (r'"', String, 'string'), + # Special + (r'\[\]', Keyword.Type), + (r'\(\)', Name.Builtin), + (r'[][(),;`{}]', Punctuation), + ], + 'import': [ + # Import statements + (r'\s+', Text), + (r'"', String, 'string'), + # after "funclist" state + (r'\)', Punctuation, '#pop'), + (r'qualified\b', Keyword), + # import X as Y + (r'([' + uni.Lu + r'][\w.]*)(\s+)(as)(\s+)([' + uni.Lu + r'][\w.]*)', + bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'), + # import X hiding (functions) + (r'([' + uni.Lu + r'][\w.]*)(\s+)(hiding)(\s+)(\()', + bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'), + # import X (functions) + (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()', + bygroups(Name.Namespace, Text, Punctuation), 'funclist'), + # import X + (r'[\w.]+', Name.Namespace, '#pop'), + ], + 'module': [ + (r'\s+', Text), + (r'([' + uni.Lu + r'][\w.]*)(\s+)(\()', + bygroups(Name.Namespace, Text, Punctuation), 'funclist'), + (r'[' + uni.Lu + r'][\w.]*', Name.Namespace, '#pop'), + ], + 'funclist': [ + (r'\s+', Text), + (r'[' + uni.Lu + r']\w*', Keyword.Type), + (r'(_[\w\']+|[' + uni.Ll + r'][\w\']*)', Name.Function), + (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), + (r'{-', Comment.Multiline, 'comment'), + (r',', Punctuation), + (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), + # (HACK, but it makes sense to push two instances, believe me) + (r'\(', Punctuation, ('funclist', 'funclist')), + (r'\)', Punctuation, '#pop:2'), + ], + # NOTE: the next four states are shared in the AgdaLexer; make sure + # any change is compatible with Agda as well or copy over and change + 'comment': [ + # Multiline Comments + (r'[^-{}]+', Comment.Multiline), + (r'{-', Comment.Multiline, '#push'), + (r'-}', Comment.Multiline, '#pop'), + (r'[-{}]', Comment.Multiline), + ], + 'character': [ + # Allows multi-chars, incorrectly. + (r"[^\\']'", String.Char, '#pop'), + (r"\\", String.Escape, 'escape'), + ("'", String.Char, '#pop'), + ], + 'string': [ + (r'[^\\"]+', String), + (r"\\", String.Escape, 'escape'), + ('"', String, '#pop'), + ], + 'escape': [ + (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), + (r'\^[][' + uni.Lu + r'@\^_]', String.Escape, '#pop'), + ('|'.join(ascii), String.Escape, '#pop'), + (r'o[0-7]+', String.Escape, '#pop'), + (r'x[\da-fA-F]+', String.Escape, '#pop'), + (r'\d+', String.Escape, '#pop'), + (r'\s+\\', String.Escape, '#pop'), + ], + } + + +class IdrisLexer(RegexLexer): + """ + A lexer for the dependently typed programming language Idris. + + Based on the Haskell and Agda Lexer. + + .. versionadded:: 2.0 + """ + name = 'Idris' + aliases = ['idris', 'idr'] + filenames = ['*.idr'] + mimetypes = ['text/x-idris'] + + reserved = ('case', 'class', 'data', 'default', 'using', 'do', 'else', + 'if', 'in', 'infix[lr]?', 'instance', 'rewrite', 'auto', + 'namespace', 'codata', 'mutual', 'private', 'public', 'abstract', + 'total', 'partial', + 'let', 'proof', 'of', 'then', 'static', 'where', '_', 'with', + 'pattern', 'term', 'syntax', 'prefix', + 'postulate', 'parameters', 'record', 'dsl', 'impossible', 'implicit', + 'tactics', 'intros', 'intro', 'compute', 'refine', 'exact', 'trivial') + + ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK', + 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE', + 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN', + 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL') + + directives = ('lib', 'link', 'flag', 'include', 'hide', 'freeze', 'access', + 'default', 'logging', 'dynamic', 'name', 'error_handlers', 'language') + + tokens = { + 'root': [ + # Comments + (r'^(\s*)(%%%s)' % '|'.join(directives), + bygroups(Text, Keyword.Reserved)), + (r'(\s*)(--(?![!#$%&*+./<=>?@\^|_~:\\]).*?)$', bygroups(Text, Comment.Single)), + (r'(\s*)(\|{3}.*?)$', bygroups(Text, Comment.Single)), + (r'(\s*)({-)', bygroups(Text, Comment.Multiline), 'comment'), + # Declaration + (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', + bygroups(Text, Name.Function, Text, Operator.Word, Text)), + # Identifiers + (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), + (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), + (r"('')?[A-Z][\w\']*", Keyword.Type), + (r'[a-z][\w\']*', Text), + # Special Symbols + (r'(<-|::|->|=>|=)', Operator.Word), # specials + (r'([\(\)\{\}\[\]:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials + # Numbers + (r'\d+[eE][+-]?\d+', Number.Float), + (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + # Strings + (r"'", String.Char, 'character'), + (r'"', String, 'string'), + (r'[^\s\(\)\{\}]+', Text), + (r'\s+?', Text), # Whitespace + ], + 'module': [ + (r'\s+', Text), + (r'([A-Z][\w.]*)(\s+)(\()', + bygroups(Name.Namespace, Text, Punctuation), 'funclist'), + (r'[A-Z][\w.]*', Name.Namespace, '#pop'), + ], + 'funclist': [ + (r'\s+', Text), + (r'[A-Z]\w*', Keyword.Type), + (r'(_[\w\']+|[a-z][\w\']*)', Name.Function), + (r'--.*$', Comment.Single), + (r'{-', Comment.Multiline, 'comment'), + (r',', Punctuation), + (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), + # (HACK, but it makes sense to push two instances, believe me) + (r'\(', Punctuation, ('funclist', 'funclist')), + (r'\)', Punctuation, '#pop:2'), + ], + # NOTE: the next four states are shared in the AgdaLexer; make sure + # any change is compatible with Agda as well or copy over and change + 'comment': [ + # Multiline Comments + (r'[^-{}]+', Comment.Multiline), + (r'{-', Comment.Multiline, '#push'), + (r'-}', Comment.Multiline, '#pop'), + (r'[-{}]', Comment.Multiline), + ], + 'character': [ + # Allows multi-chars, incorrectly. + (r"[^\\']", String.Char), + (r"\\", String.Escape, 'escape'), + ("'", String.Char, '#pop'), + ], + 'string': [ + (r'[^\\"]+', String), + (r"\\", String.Escape, 'escape'), + ('"', String, '#pop'), + ], + 'escape': [ + (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), + (r'\^[][A-Z@\^_]', String.Escape, '#pop'), + ('|'.join(ascii), String.Escape, '#pop'), + (r'o[0-7]+', String.Escape, '#pop'), + (r'x[\da-fA-F]+', String.Escape, '#pop'), + (r'\d+', String.Escape, '#pop'), + (r'\s+\\', String.Escape, '#pop') + ], + } + + +class AgdaLexer(RegexLexer): + """ + For the `Agda `_ + dependently typed functional programming language and proof assistant. + + .. versionadded:: 2.0 + """ + + name = 'Agda' + aliases = ['agda'] + filenames = ['*.agda'] + mimetypes = ['text/x-agda'] + + reserved = ['abstract', 'codata', 'coinductive', 'constructor', 'data', + 'field', 'forall', 'hiding', 'in', 'inductive', 'infix', + 'infixl', 'infixr', 'let', 'open', 'pattern', 'primitive', + 'private', 'mutual', 'quote', 'quoteGoal', 'quoteTerm', + 'record', 'syntax', 'rewrite', 'unquote', 'using', 'where', + 'with'] + + tokens = { + 'root': [ + # Declaration + (r'^(\s*)([^\s\(\)\{\}]+)(\s*)(:)(\s*)', + bygroups(Text, Name.Function, Text, Operator.Word, Text)), + # Comments + (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), + (r'{-', Comment.Multiline, 'comment'), + # Holes + (r'{!', Comment.Directive, 'hole'), + # Lexemes: + # Identifiers + (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), + (r'(import|module)(\s+)', bygroups(Keyword.Reserved, Text), 'module'), + (r'\b(Set|Prop)\b', Keyword.Type), + # Special Symbols + (r'(\(|\)|\{|\})', Operator), + (u'(\\.{1,3}|\\||\u039B|\u2200|\u2192|:|=|->)', Operator.Word), + # Numbers + (r'\d+[eE][+-]?\d+', Number.Float), + (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + # Strings + (r"'", String.Char, 'character'), + (r'"', String, 'string'), + (r'[^\s\(\)\{\}]+', Text), + (r'\s+?', Text), # Whitespace + ], + 'hole': [ + # Holes + (r'[^!{}]+', Comment.Directive), + (r'{!', Comment.Directive, '#push'), + (r'!}', Comment.Directive, '#pop'), + (r'[!{}]', Comment.Directive), + ], + 'module': [ + (r'{-', Comment.Multiline, 'comment'), + (r'[a-zA-Z][\w.]*', Name, '#pop'), + (r'[^a-zA-Z]*', Text) + ], + 'comment': HaskellLexer.tokens['comment'], + 'character': HaskellLexer.tokens['character'], + 'string': HaskellLexer.tokens['string'], + 'escape': HaskellLexer.tokens['escape'] + } + + +class CryptolLexer(RegexLexer): + """ + FIXME: A Cryptol2 lexer based on the lexemes defined in the Haskell 98 Report. + + .. versionadded:: 2.0 + """ + name = 'Cryptol' + aliases = ['cryptol', 'cry'] + filenames = ['*.cry'] + mimetypes = ['text/x-cryptol'] + + reserved = ('Arith', 'Bit', 'Cmp', 'False', 'Inf', 'True', 'else', + 'export', 'extern', 'fin', 'if', 'import', 'inf', 'lg2', + 'max', 'min', 'module', 'newtype', 'pragma', 'property', + 'then', 'type', 'where', 'width') + ascii = ('NUL', 'SOH', '[SE]TX', 'EOT', 'ENQ', 'ACK', + 'BEL', 'BS', 'HT', 'LF', 'VT', 'FF', 'CR', 'S[OI]', 'DLE', + 'DC[1-4]', 'NAK', 'SYN', 'ETB', 'CAN', + 'EM', 'SUB', 'ESC', '[FGRU]S', 'SP', 'DEL') + + tokens = { + 'root': [ + # Whitespace: + (r'\s+', Text), + # (r'--\s*|.*$', Comment.Doc), + (r'//.*$', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), + # Lexemes: + # Identifiers + (r'\bimport\b', Keyword.Reserved, 'import'), + (r'\bmodule\b', Keyword.Reserved, 'module'), + (r'\berror\b', Name.Exception), + (r'\b(%s)(?!\')\b' % '|'.join(reserved), Keyword.Reserved), + (r'^[_a-z][\w\']*', Name.Function), + (r"'?[_a-z][\w']*", Name), + (r"('')?[A-Z][\w\']*", Keyword.Type), + # Operators + (r'\\(?![:!#$%&*+.\\/<=>?@^|~-]+)', Name.Function), # lambda operator + (r'(<-|::|->|=>|=)(?![:!#$%&*+.\\/<=>?@^|~-]+)', Operator.Word), # specials + (r':[:!#$%&*+.\\/<=>?@^|~-]*', Keyword.Type), # Constructor operators + (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), # Other operators + # Numbers + (r'\d+[eE][+-]?\d+', Number.Float), + (r'\d+\.\d+([eE][+-]?\d+)?', Number.Float), + (r'0[oO][0-7]+', Number.Oct), + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'\d+', Number.Integer), + # Character/String Literals + (r"'", String.Char, 'character'), + (r'"', String, 'string'), + # Special + (r'\[\]', Keyword.Type), + (r'\(\)', Name.Builtin), + (r'[][(),;`{}]', Punctuation), + ], + 'import': [ + # Import statements + (r'\s+', Text), + (r'"', String, 'string'), + # after "funclist" state + (r'\)', Punctuation, '#pop'), + (r'qualified\b', Keyword), + # import X as Y + (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(as)(\s+)([A-Z][a-zA-Z0-9_.]*)', + bygroups(Name.Namespace, Text, Keyword, Text, Name), '#pop'), + # import X hiding (functions) + (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(hiding)(\s+)(\()', + bygroups(Name.Namespace, Text, Keyword, Text, Punctuation), 'funclist'), + # import X (functions) + (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()', + bygroups(Name.Namespace, Text, Punctuation), 'funclist'), + # import X + (r'[a-zA-Z0-9_.]+', Name.Namespace, '#pop'), + ], + 'module': [ + (r'\s+', Text), + (r'([A-Z][a-zA-Z0-9_.]*)(\s+)(\()', + bygroups(Name.Namespace, Text, Punctuation), 'funclist'), + (r'[A-Z][a-zA-Z0-9_.]*', Name.Namespace, '#pop'), + ], + 'funclist': [ + (r'\s+', Text), + (r'[A-Z][a-zA-Z0-9_]*', Keyword.Type), + (r'(_[\w\']+|[a-z][\w\']*)', Name.Function), + (r'--(?![!#$%&*+./<=>?@\^|_~:\\]).*?$', Comment.Single), + (r'{-', Comment.Multiline, 'comment'), + (r',', Punctuation), + (r'[:!#$%&*+.\\/<=>?@^|~-]+', Operator), + # (HACK, but it makes sense to push two instances, believe me) + (r'\(', Punctuation, ('funclist', 'funclist')), + (r'\)', Punctuation, '#pop:2'), + ], + 'comment': [ + # Multiline Comments + (r'[^/\*]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[\*/]', Comment.Multiline), + ], + 'character': [ + # Allows multi-chars, incorrectly. + (r"[^\\']'", String.Char, '#pop'), + (r"\\", String.Escape, 'escape'), + ("'", String.Char, '#pop'), + ], + 'string': [ + (r'[^\\"]+', String), + (r"\\", String.Escape, 'escape'), + ('"', String, '#pop'), + ], + 'escape': [ + (r'[abfnrtv"\'&\\]', String.Escape, '#pop'), + (r'\^[][A-Z@\^_]', String.Escape, '#pop'), + ('|'.join(ascii), String.Escape, '#pop'), + (r'o[0-7]+', String.Escape, '#pop'), + (r'x[\da-fA-F]+', String.Escape, '#pop'), + (r'\d+', String.Escape, '#pop'), + (r'\s+\\', String.Escape, '#pop'), + ], + } + + EXTRA_KEYWORDS = set(('join', 'split', 'reverse', 'transpose', 'width', + 'length', 'tail', '<<', '>>', '<<<', '>>>', 'const', + 'reg', 'par', 'seq', 'ASSERT', 'undefined', 'error', + 'trace')) + + def get_tokens_unprocessed(self, text): + stack = ['root'] + for index, token, value in \ + RegexLexer.get_tokens_unprocessed(self, text, stack): + if token is Name and value in self.EXTRA_KEYWORDS: + yield index, Name.Builtin, value + else: + yield index, token, value + + +class LiterateLexer(Lexer): + """ + Base class for lexers of literate file formats based on LaTeX or Bird-style + (prefixing each code line with ">"). + + Additional options accepted: + + `litstyle` + If given, must be ``"bird"`` or ``"latex"``. If not given, the style + is autodetected: if the first non-whitespace character in the source + is a backslash or percent character, LaTeX is assumed, else Bird. + """ + + bird_re = re.compile(r'(>[ \t]*)(.*\n)') + + def __init__(self, baselexer, **options): + self.baselexer = baselexer + Lexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + style = self.options.get('litstyle') + if style is None: + style = (text.lstrip()[0:1] in '%\\') and 'latex' or 'bird' + + code = '' + insertions = [] + if style == 'bird': + # bird-style + for match in line_re.finditer(text): + line = match.group() + m = self.bird_re.match(line) + if m: + insertions.append((len(code), + [(0, Comment.Special, m.group(1))])) + code += m.group(2) + else: + insertions.append((len(code), [(0, Text, line)])) + else: + # latex-style + from pygments.lexers.text import TexLexer + lxlexer = TexLexer(**self.options) + codelines = 0 + latex = '' + for match in line_re.finditer(text): + line = match.group() + if codelines: + if line.lstrip().startswith('\\end{code}'): + codelines = 0 + latex += line + else: + code += line + elif line.lstrip().startswith('\\begin{code}'): + codelines = 1 + latex += line + insertions.append((len(code), + list(lxlexer.get_tokens_unprocessed(latex)))) + latex = '' + else: + latex += line + insertions.append((len(code), + list(lxlexer.get_tokens_unprocessed(latex)))) + for item in do_insertions(insertions, self.baselexer.get_tokens_unprocessed(code)): + yield item + + +class LiterateHaskellLexer(LiterateLexer): + """ + For Literate Haskell (Bird-style or LaTeX) source. + + Additional options accepted: + + `litstyle` + If given, must be ``"bird"`` or ``"latex"``. If not given, the style + is autodetected: if the first non-whitespace character in the source + is a backslash or percent character, LaTeX is assumed, else Bird. + + .. versionadded:: 0.9 + """ + name = 'Literate Haskell' + aliases = ['lhs', 'literate-haskell', 'lhaskell'] + filenames = ['*.lhs'] + mimetypes = ['text/x-literate-haskell'] + + def __init__(self, **options): + hslexer = HaskellLexer(**options) + LiterateLexer.__init__(self, hslexer, **options) + + +class LiterateIdrisLexer(LiterateLexer): + """ + For Literate Idris (Bird-style or LaTeX) source. + + Additional options accepted: + + `litstyle` + If given, must be ``"bird"`` or ``"latex"``. If not given, the style + is autodetected: if the first non-whitespace character in the source + is a backslash or percent character, LaTeX is assumed, else Bird. + + .. versionadded:: 2.0 + """ + name = 'Literate Idris' + aliases = ['lidr', 'literate-idris', 'lidris'] + filenames = ['*.lidr'] + mimetypes = ['text/x-literate-idris'] + + def __init__(self, **options): + hslexer = IdrisLexer(**options) + LiterateLexer.__init__(self, hslexer, **options) + + +class LiterateAgdaLexer(LiterateLexer): + """ + For Literate Agda source. + + Additional options accepted: + + `litstyle` + If given, must be ``"bird"`` or ``"latex"``. If not given, the style + is autodetected: if the first non-whitespace character in the source + is a backslash or percent character, LaTeX is assumed, else Bird. + + .. versionadded:: 2.0 + """ + name = 'Literate Agda' + aliases = ['lagda', 'literate-agda'] + filenames = ['*.lagda'] + mimetypes = ['text/x-literate-agda'] + + def __init__(self, **options): + agdalexer = AgdaLexer(**options) + LiterateLexer.__init__(self, agdalexer, litstyle='latex', **options) + + +class LiterateCryptolLexer(LiterateLexer): + """ + For Literate Cryptol (Bird-style or LaTeX) source. + + Additional options accepted: + + `litstyle` + If given, must be ``"bird"`` or ``"latex"``. If not given, the style + is autodetected: if the first non-whitespace character in the source + is a backslash or percent character, LaTeX is assumed, else Bird. + + .. versionadded:: 2.0 + """ + name = 'Literate Cryptol' + aliases = ['lcry', 'literate-cryptol', 'lcryptol'] + filenames = ['*.lcry'] + mimetypes = ['text/x-literate-cryptol'] + + def __init__(self, **options): + crylexer = CryptolLexer(**options) + LiterateLexer.__init__(self, crylexer, **options) + + +class KokaLexer(RegexLexer): + """ + Lexer for the `Koka `_ + language. + + .. versionadded:: 1.6 + """ + + name = 'Koka' + aliases = ['koka'] + filenames = ['*.kk', '*.kki'] + mimetypes = ['text/x-koka'] + + keywords = [ + 'infix', 'infixr', 'infixl', + 'type', 'cotype', 'rectype', 'alias', + 'struct', 'con', + 'fun', 'function', 'val', 'var', + 'external', + 'if', 'then', 'else', 'elif', 'return', 'match', + 'private', 'public', 'private', + 'module', 'import', 'as', + 'include', 'inline', + 'rec', + 'try', 'yield', 'enum', + 'interface', 'instance', + ] + + # keywords that are followed by a type + typeStartKeywords = [ + 'type', 'cotype', 'rectype', 'alias', 'struct', 'enum', + ] + + # keywords valid in a type + typekeywords = [ + 'forall', 'exists', 'some', 'with', + ] + + # builtin names and special names + builtin = [ + 'for', 'while', 'repeat', + 'foreach', 'foreach-indexed', + 'error', 'catch', 'finally', + 'cs', 'js', 'file', 'ref', 'assigned', + ] + + # symbols that can be in an operator + symbols = '[\$%&\*\+@!/\\\^~=\.:\-\?\|<>]+' + + # symbol boundary: an operator keyword should not be followed by any of these + sboundary = '(?!'+symbols+')' + + # name boundary: a keyword should not be followed by any of these + boundary = '(?![\w/])' + + # koka token abstractions + tokenType = Name.Attribute + tokenTypeDef = Name.Class + tokenConstructor = Generic.Emph + + # main lexer + tokens = { + 'root': [ + include('whitespace'), + + # go into type mode + (r'::?' + sboundary, tokenType, 'type'), + (r'(alias)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), + 'alias-type'), + (r'(struct)(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), + 'struct-type'), + ((r'(%s)' % '|'.join(typeStartKeywords)) + + r'(\s+)([a-z]\w*)?', bygroups(Keyword, Text, tokenTypeDef), + 'type'), + + # special sequences of tokens (we use ?: for non-capturing group as + # required by 'bygroups') + (r'(module)(\s+)(interface\s+)?((?:[a-z]\w*/)*[a-z]\w*)', + bygroups(Keyword, Text, Keyword, Name.Namespace)), + (r'(import)(\s+)((?:[a-z]\w*/)*[a-z]\w*)' + r'(?:(\s*)(=)(\s*)((?:qualified\s*)?)' + r'((?:[a-z]\w*/)*[a-z]\w*))?', + bygroups(Keyword, Text, Name.Namespace, Text, Keyword, Text, + Keyword, Name.Namespace)), + + (r'(^(?:(?:public|private)\s*)?(?:function|fun|val))' + r'(\s+)([a-z]\w*|\((?:' + symbols + r'|/)\))', + bygroups(Keyword, Text, Name.Function)), + (r'(^(?:(?:public|private)\s*)?external)(\s+)(inline\s+)?' + r'([a-z]\w*|\((?:' + symbols + r'|/)\))', + bygroups(Keyword, Text, Keyword, Name.Function)), + + # keywords + (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword.Type), + (r'(%s)' % '|'.join(keywords) + boundary, Keyword), + (r'(%s)' % '|'.join(builtin) + boundary, Keyword.Pseudo), + (r'::?|:=|\->|[=\.]' + sboundary, Keyword), + + # names + (r'((?:[a-z]\w*/)*)([A-Z]\w*)', + bygroups(Name.Namespace, tokenConstructor)), + (r'((?:[a-z]\w*/)*)([a-z]\w*)', bygroups(Name.Namespace, Name)), + (r'((?:[a-z]\w*/)*)(\((?:' + symbols + r'|/)\))', + bygroups(Name.Namespace, Name)), + (r'_\w*', Name.Variable), + + # literal string + (r'@"', String.Double, 'litstring'), + + # operators + (symbols + "|/(?![\*/])", Operator), + (r'`', Operator), + (r'[\{\}\(\)\[\];,]', Punctuation), + + # literals. No check for literal characters with len > 1 + (r'[0-9]+\.[0-9]+([eE][\-\+]?[0-9]+)?', Number.Float), + (r'0[xX][0-9a-fA-F]+', Number.Hex), + (r'[0-9]+', Number.Integer), + + (r"'", String.Char, 'char'), + (r'"', String.Double, 'string'), + ], + + # type started by alias + 'alias-type': [ + (r'=', Keyword), + include('type') + ], + + # type started by struct + 'struct-type': [ + (r'(?=\((?!,*\)))', Punctuation, '#pop'), + include('type') + ], + + # type started by colon + 'type': [ + (r'[\(\[<]', tokenType, 'type-nested'), + include('type-content') + ], + + # type nested in brackets: can contain parameters, comma etc. + 'type-nested': [ + (r'[\)\]>]', tokenType, '#pop'), + (r'[\(\[<]', tokenType, 'type-nested'), + (r',', tokenType), + (r'([a-z]\w*)(\s*)(:)(?!:)', + bygroups(Name, Text, tokenType)), # parameter name + include('type-content') + ], + + # shared contents of a type + 'type-content': [ + include('whitespace'), + + # keywords + (r'(%s)' % '|'.join(typekeywords) + boundary, Keyword), + (r'(?=((%s)' % '|'.join(keywords) + boundary + '))', + Keyword, '#pop'), # need to match because names overlap... + + # kinds + (r'[EPHVX]' + boundary, tokenType), + + # type names + (r'[a-z][0-9]*(?![\w/])', tokenType), + (r'_\w*', tokenType.Variable), # Generic.Emph + (r'((?:[a-z]\w*/)*)([A-Z]\w*)', + bygroups(Name.Namespace, tokenType)), + (r'((?:[a-z]\w*/)*)([a-z]\w+)', + bygroups(Name.Namespace, tokenType)), + + # type keyword operators + (r'::|\->|[\.:|]', tokenType), + + # catchall + default('#pop') + ], + + # comments and literals + 'whitespace': [ + (r'\n\s*#.*$', Comment.Preproc), + (r'\s+', Text), + (r'/\*', Comment.Multiline, 'comment'), + (r'//.*$', Comment.Single) + ], + 'comment': [ + (r'[^/\*]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[\*/]', Comment.Multiline), + ], + 'litstring': [ + (r'[^"]+', String.Double), + (r'""', String.Escape), + (r'"', String.Double, '#pop'), + ], + 'string': [ + (r'[^\\"\n]+', String.Double), + include('escape-sequence'), + (r'["\n]', String.Double, '#pop'), + ], + 'char': [ + (r'[^\\\'\n]+', String.Char), + include('escape-sequence'), + (r'[\'\n]', String.Char, '#pop'), + ], + 'escape-sequence': [ + (r'\\[nrt\\\"\']', String.Escape), + (r'\\x[0-9a-fA-F]{2}', String.Escape), + (r'\\u[0-9a-fA-F]{4}', String.Escape), + # Yes, \U literals are 6 hex digits. + (r'\\U[0-9a-fA-F]{6}', String.Escape) + ] + } diff --git a/pygments/lexers/lisp.py b/pygments/lexers/lisp.py index f09f2c8e..a1a8cbef 100644 --- a/pygments/lexers/lisp.py +++ b/pygments/lexers/lisp.py @@ -9,13 +9,337 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, include, bygroups, words +import re + +from pygments.lexer import RegexLexer, include, bygroups, words, default from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation + Number, Punctuation, Literal, Error from pygments.lexers.python import PythonLexer -__all__ = ['HyLexer'] +__all__ = ['SchemeLexer', 'CommonLispLexer', 'HyLexer', 'RacketLexer', + 'NewLispLexer'] + + +class SchemeLexer(RegexLexer): + """ + A Scheme lexer, parsing a stream and outputting the tokens + needed to highlight scheme code. + This lexer could be most probably easily subclassed to parse + other LISP-Dialects like Common Lisp, Emacs Lisp or AutoLisp. + + This parser is checked with pastes from the LISP pastebin + at http://paste.lisp.org/ to cover as much syntax as possible. + + It supports the full Scheme syntax as defined in R5RS. + + .. versionadded:: 0.6 + """ + name = 'Scheme' + aliases = ['scheme', 'scm'] + filenames = ['*.scm', '*.ss'] + mimetypes = ['text/x-scheme', 'application/x-scheme'] + + # list of known keywords and builtins taken form vim 6.4 scheme.vim + # syntax file. + keywords = ( + 'lambda', 'define', 'if', 'else', 'cond', 'and', 'or', 'case', 'let', + 'let*', 'letrec', 'begin', 'do', 'delay', 'set!', '=>', 'quote', + 'quasiquote', 'unquote', 'unquote-splicing', 'define-syntax', + 'let-syntax', 'letrec-syntax', 'syntax-rules' + ) + builtins = ( + '*', '+', '-', '/', '<', '<=', '=', '>', '>=', 'abs', 'acos', 'angle', + 'append', 'apply', 'asin', 'assoc', 'assq', 'assv', 'atan', + 'boolean?', 'caaaar', 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', + 'caar', 'cadaar', 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', + 'cadr', 'call-with-current-continuation', 'call-with-input-file', + 'call-with-output-file', 'call-with-values', 'call/cc', 'car', + 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', 'cdadr', 'cdar', + 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', 'cdddr', 'cddr', + 'cdr', 'ceiling', 'char->integer', 'char-alphabetic?', 'char-ci<=?', + 'char-ci=?', 'char-ci>?', 'char-downcase', + 'char-lower-case?', 'char-numeric?', 'char-ready?', 'char-upcase', + 'char-upper-case?', 'char-whitespace?', 'char<=?', 'char=?', 'char>?', 'char?', 'close-input-port', 'close-output-port', + 'complex?', 'cons', 'cos', 'current-input-port', 'current-output-port', + 'denominator', 'display', 'dynamic-wind', 'eof-object?', 'eq?', + 'equal?', 'eqv?', 'eval', 'even?', 'exact->inexact', 'exact?', 'exp', + 'expt', 'floor', 'for-each', 'force', 'gcd', 'imag-part', + 'inexact->exact', 'inexact?', 'input-port?', 'integer->char', + 'integer?', 'interaction-environment', 'lcm', 'length', 'list', + 'list->string', 'list->vector', 'list-ref', 'list-tail', 'list?', + 'load', 'log', 'magnitude', 'make-polar', 'make-rectangular', + 'make-string', 'make-vector', 'map', 'max', 'member', 'memq', 'memv', + 'min', 'modulo', 'negative?', 'newline', 'not', 'null-environment', + 'null?', 'number->string', 'number?', 'numerator', 'odd?', + 'open-input-file', 'open-output-file', 'output-port?', 'pair?', + 'peek-char', 'port?', 'positive?', 'procedure?', 'quotient', + 'rational?', 'rationalize', 'read', 'read-char', 'real-part', 'real?', + 'remainder', 'reverse', 'round', 'scheme-report-environment', + 'set-car!', 'set-cdr!', 'sin', 'sqrt', 'string', 'string->list', + 'string->number', 'string->symbol', 'string-append', 'string-ci<=?', + 'string-ci=?', 'string-ci>?', + 'string-copy', 'string-fill!', 'string-length', 'string-ref', + 'string-set!', 'string<=?', 'string=?', + 'string>?', 'string?', 'substring', 'symbol->string', 'symbol?', + 'tan', 'transcript-off', 'transcript-on', 'truncate', 'values', + 'vector', 'vector->list', 'vector-fill!', 'vector-length', + 'vector-ref', 'vector-set!', 'vector?', 'with-input-from-file', + 'with-output-to-file', 'write', 'write-char', 'zero?' + ) + + # valid names for identifiers + # well, names can only not consist fully of numbers + # but this should be good enough for now + valid_name = r'[a-zA-Z0-9!$%&*+,/:<=>?@^_~|-]+' + + tokens = { + 'root': [ + # the comments + # and going to the end of the line + (r';.*$', Comment.Single), + # multi-line comment + (r'#\|', Comment.Multiline, 'multiline-comment'), + # commented form (entire sexpr folliwng) + (r'#;\s*\(', Comment, 'commented-form'), + # signifies that the program text that follows is written with the + # lexical and datum syntax described in r6rs + (r'#!r6rs', Comment), + + # whitespaces - usually not relevant + (r'\s+', Text), + + # numbers + (r'-?\d+\.\d+', Number.Float), + (r'-?\d+', Number.Integer), + # support for uncommon kinds of numbers - + # have to figure out what the characters mean + # (r'(#e|#i|#b|#o|#d|#x)[\d.]+', Number), + + # strings, symbols and characters + (r'"(\\\\|\\"|[^"])*"', String), + (r"'" + valid_name, String.Symbol), + (r"#\\([()/'\"._!§$%& ?=+-]{1}|[a-zA-Z0-9]+)", String.Char), + + # constants + (r'(#t|#f)', Name.Constant), + + # special operators + (r"('|#|`|,@|,|\.)", Operator), + + # highlight the keywords + ('(%s)' % '|'.join(re.escape(entry) + ' ' for entry in keywords), + Keyword), + + # first variable in a quoted string like + # '(this is syntactic sugar) + (r"(?<='\()" + valid_name, Name.Variable), + (r"(?<=#\()" + valid_name, Name.Variable), + + # highlight the builtins + ("(?<=\()(%s)" % '|'.join(re.escape(entry) + ' ' for entry in builtins), + Name.Builtin), + + # the remaining functions + (r'(?<=\()' + valid_name, Name.Function), + # find the remaining variables + (valid_name, Name.Variable), + + # the famous parentheses! + (r'(\(|\))', Punctuation), + (r'(\[|\])', Punctuation), + ], + 'multiline-comment': [ + (r'#\|', Comment.Multiline, '#push'), + (r'\|#', Comment.Multiline, '#pop'), + (r'[^|#]+', Comment.Multiline), + (r'[|#]', Comment.Multiline), + ], + 'commented-form': [ + (r'\(', Comment, '#push'), + (r'\)', Comment, '#pop'), + (r'[^()]+', Comment), + ], + } + + +class CommonLispLexer(RegexLexer): + """ + A Common Lisp lexer. + + .. versionadded:: 0.9 + """ + name = 'Common Lisp' + aliases = ['common-lisp', 'cl', 'lisp', 'elisp', 'emacs', 'emacs-lisp'] + filenames = ['*.cl', '*.lisp', '*.el'] # use for Elisp too + mimetypes = ['text/x-common-lisp'] + + flags = re.IGNORECASE | re.MULTILINE + + # couple of useful regexes + + # characters that are not macro-characters and can be used to begin a symbol + nonmacro = r'\\.|[\w!$%&*+-/<=>?@\[\]^{}~]' + constituent = nonmacro + '|[#.:]' + terminated = r'(?=[ "()\'\n,;`])' # whitespace or terminating macro characters + + # symbol token, reverse-engineered from hyperspec + # Take a deep breath... + symbol = r'(\|[^|]+\||(?:%s)(?:%s)*)' % (nonmacro, constituent) + + def __init__(self, **options): + from pygments.lexers._clbuiltins import BUILTIN_FUNCTIONS, \ + SPECIAL_FORMS, MACROS, LAMBDA_LIST_KEYWORDS, DECLARATIONS, \ + BUILTIN_TYPES, BUILTIN_CLASSES + self.builtin_function = BUILTIN_FUNCTIONS + self.special_forms = SPECIAL_FORMS + self.macros = MACROS + self.lambda_list_keywords = LAMBDA_LIST_KEYWORDS + self.declarations = DECLARATIONS + self.builtin_types = BUILTIN_TYPES + self.builtin_classes = BUILTIN_CLASSES + RegexLexer.__init__(self, **options) + + def get_tokens_unprocessed(self, text): + stack = ['root'] + for index, token, value in RegexLexer.get_tokens_unprocessed(self, text, stack): + if token is Name.Variable: + if value in self.builtin_function: + yield index, Name.Builtin, value + continue + if value in self.special_forms: + yield index, Keyword, value + continue + if value in self.macros: + yield index, Name.Builtin, value + continue + if value in self.lambda_list_keywords: + yield index, Keyword, value + continue + if value in self.declarations: + yield index, Keyword, value + continue + if value in self.builtin_types: + yield index, Keyword.Type, value + continue + if value in self.builtin_classes: + yield index, Name.Class, value + continue + yield index, token, value + + tokens = { + 'root': [ + ('', Text, 'body'), + ], + 'multiline-comment': [ + (r'#\|', Comment.Multiline, '#push'), # (cf. Hyperspec 2.4.8.19) + (r'\|#', Comment.Multiline, '#pop'), + (r'[^|#]+', Comment.Multiline), + (r'[|#]', Comment.Multiline), + ], + 'commented-form': [ + (r'\(', Comment.Preproc, '#push'), + (r'\)', Comment.Preproc, '#pop'), + (r'[^()]+', Comment.Preproc), + ], + 'body': [ + # whitespace + (r'\s+', Text), + + # single-line comment + (r';.*$', Comment.Single), + + # multi-line comment + (r'#\|', Comment.Multiline, 'multiline-comment'), + + # encoding comment (?) + (r'#\d*Y.*$', Comment.Special), + + # strings and characters + (r'"(\\.|\\\n|[^"\\])*"', String), + # quoting + (r":" + symbol, String.Symbol), + (r"::" + symbol, String.Symbol), + (r":#" + symbol, String.Symbol), + (r"'" + symbol, String.Symbol), + (r"'", Operator), + (r"`", Operator), + + # decimal numbers + (r'[-+]?\d+\.?' + terminated, Number.Integer), + (r'[-+]?\d+/\d+' + terminated, Number), + (r'[-+]?(\d*\.\d+([defls][-+]?\d+)?|\d+(\.\d*)?[defls][-+]?\d+)' + + terminated, Number.Float), + + # sharpsign strings and characters + (r"#\\." + terminated, String.Char), + (r"#\\" + symbol, String.Char), + + # vector + (r'#\(', Operator, 'body'), + + # bitstring + (r'#\d*\*[01]*', Literal.Other), + + # uninterned symbol + (r'#:' + symbol, String.Symbol), + + # read-time and load-time evaluation + (r'#[.,]', Operator), + + # function shorthand + (r'#\'', Name.Function), + + # binary rational + (r'#[bB][+-]?[01]+(/[01]+)?', Number.Bin), + + # octal rational + (r'#[oO][+-]?[0-7]+(/[0-7]+)?', Number.Oct), + + # hex rational + (r'#[xX][+-]?[0-9a-f]+(/[0-9a-f]+)?', Number.Hex), + + # radix rational + (r'#\d+[rR][+-]?[0-9a-z]+(/[0-9a-z]+)?', Number), + + # complex + (r'(#[cC])(\()', bygroups(Number, Punctuation), 'body'), + + # array + (r'(#\d+[aA])(\()', bygroups(Literal.Other, Punctuation), 'body'), + + # structure + (r'(#[sS])(\()', bygroups(Literal.Other, Punctuation), 'body'), + + # path + (r'#[pP]?"(\\.|[^"])*"', Literal.Other), + + # reference + (r'#\d+=', Operator), + (r'#\d+#', Operator), + + # read-time comment + (r'#+nil' + terminated + '\s*\(', Comment.Preproc, 'commented-form'), + + # read-time conditional + (r'#[+-]', Operator), + + # special operators that should have been parsed already + (r'(,@|,|\.)', Operator), + + # special constants + (r'(t|nil)' + terminated, Name.Constant), + + # functions and variables + (r'\*' + symbol + '\*', Name.Variable.Global), + (symbol, Name.Variable), + + # parentheses + (r'\(', Punctuation, 'body'), + (r'\)', Punctuation, '#pop'), + ], + } class HyLexer(RegexLexer): @@ -129,3 +453,1028 @@ class HyLexer(RegexLexer): def analyse_text(text): if '(import ' in text or '(defn ' in text: return 0.9 + + +class RacketLexer(RegexLexer): + """ + Lexer for `Racket `_ source code (formerly + known as PLT Scheme). + + .. versionadded:: 1.6 + """ + + name = 'Racket' + aliases = ['racket', 'rkt'] + filenames = ['*.rkt', '*.rktd', '*.rktl'] + mimetypes = ['text/x-racket', 'application/x-racket'] + + # Generated by example.rkt + _keywords = ( + '#%app', '#%datum', '#%declare', '#%expression', '#%module-begin', + '#%plain-app', '#%plain-lambda', '#%plain-module-begin', + '#%printing-module-begin', '#%provide', '#%require', + '#%stratified-body', '#%top', '#%top-interaction', + '#%variable-reference', '->', '->*', '->*m', '->d', '->dm', '->i', + '->m', '...', ':do-in', '==', '=>', '_', 'absent', 'abstract', + 'all-defined-out', 'all-from-out', 'and', 'any', 'augment', 'augment*', + 'augment-final', 'augment-final*', 'augride', 'augride*', 'begin', + 'begin-for-syntax', 'begin0', 'case', 'case->', 'case->m', + 'case-lambda', 'class', 'class*', 'class-field-accessor', + 'class-field-mutator', 'class/c', 'class/derived', 'combine-in', + 'combine-out', 'command-line', 'compound-unit', 'compound-unit/infer', + 'cond', 'contract', 'contract-out', 'contract-struct', 'contracted', + 'define', 'define-compound-unit', 'define-compound-unit/infer', + 'define-contract-struct', 'define-custom-hash-types', + 'define-custom-set-types', 'define-for-syntax', + 'define-local-member-name', 'define-logger', 'define-match-expander', + 'define-member-name', 'define-module-boundary-contract', + 'define-namespace-anchor', 'define-opt/c', 'define-sequence-syntax', + 'define-serializable-class', 'define-serializable-class*', + 'define-signature', 'define-signature-form', 'define-struct', + 'define-struct/contract', 'define-struct/derived', 'define-syntax', + 'define-syntax-rule', 'define-syntaxes', 'define-unit', + 'define-unit-binding', 'define-unit-from-context', + 'define-unit/contract', 'define-unit/new-import-export', + 'define-unit/s', 'define-values', 'define-values-for-export', + 'define-values-for-syntax', 'define-values/invoke-unit', + 'define-values/invoke-unit/infer', 'define/augment', + 'define/augment-final', 'define/augride', 'define/contract', + 'define/final-prop', 'define/match', 'define/overment', + 'define/override', 'define/override-final', 'define/private', + 'define/public', 'define/public-final', 'define/pubment', + 'define/subexpression-pos-prop', 'delay', 'delay/idle', 'delay/name', + 'delay/strict', 'delay/sync', 'delay/thread', 'do', 'else', 'except', + 'except-in', 'except-out', 'export', 'extends', 'failure-cont', + 'false', 'false/c', 'field', 'field-bound?', 'file', + 'flat-murec-contract', 'flat-rec-contract', 'for', 'for*', 'for*/and', + 'for*/first', 'for*/fold', 'for*/fold/derived', 'for*/hash', + 'for*/hasheq', 'for*/hasheqv', 'for*/last', 'for*/list', 'for*/lists', + 'for*/mutable-set', 'for*/mutable-seteq', 'for*/mutable-seteqv', + 'for*/or', 'for*/product', 'for*/set', 'for*/seteq', 'for*/seteqv', + 'for*/sum', 'for*/vector', 'for*/weak-set', 'for*/weak-seteq', + 'for*/weak-seteqv', 'for-label', 'for-meta', 'for-syntax', + 'for-template', 'for/and', 'for/first', 'for/fold', 'for/fold/derived', + 'for/hash', 'for/hasheq', 'for/hasheqv', 'for/last', 'for/list', + 'for/lists', 'for/mutable-set', 'for/mutable-seteq', + 'for/mutable-seteqv', 'for/or', 'for/product', 'for/set', 'for/seteq', + 'for/seteqv', 'for/sum', 'for/vector', 'for/weak-set', + 'for/weak-seteq', 'for/weak-seteqv', 'gen:custom-write', 'gen:dict', + 'gen:equal+hash', 'gen:set', 'gen:stream', 'generic', 'get-field', + 'if', 'implies', 'import', 'include', 'include-at/relative-to', + 'include-at/relative-to/reader', 'include/reader', 'inherit', + 'inherit-field', 'inherit/inner', 'inherit/super', 'init', + 'init-depend', 'init-field', 'init-rest', 'inner', 'inspect', + 'instantiate', 'interface', 'interface*', 'invoke-unit', + 'invoke-unit/infer', 'lambda', 'lazy', 'let', 'let*', 'let*-values', + 'let-syntax', 'let-syntaxes', 'let-values', 'let/cc', 'let/ec', + 'letrec', 'letrec-syntax', 'letrec-syntaxes', 'letrec-syntaxes+values', + 'letrec-values', 'lib', 'link', 'local', 'local-require', 'log-debug', + 'log-error', 'log-fatal', 'log-info', 'log-warning', 'match', 'match*', + 'match*/derived', 'match-define', 'match-define-values', + 'match-lambda', 'match-lambda*', 'match-lambda**', 'match-let', + 'match-let*', 'match-let*-values', 'match-let-values', 'match-letrec', + 'match/derived', 'match/values', 'member-name-key', 'method-contract?', + 'mixin', 'module', 'module*', 'module+', 'nand', 'new', 'nor', + 'object-contract', 'object/c', 'only', 'only-in', 'only-meta-in', + 'open', 'opt/c', 'or', 'overment', 'overment*', 'override', + 'override*', 'override-final', 'override-final*', 'parameterize', + 'parameterize*', 'parameterize-break', 'parametric->/c', 'place', + 'place*', 'planet', 'prefix', 'prefix-in', 'prefix-out', 'private', + 'private*', 'prompt-tag/c', 'protect-out', 'provide', + 'provide-signature-elements', 'provide/contract', 'public', 'public*', + 'public-final', 'public-final*', 'pubment', 'pubment*', 'quasiquote', + 'quasisyntax', 'quasisyntax/loc', 'quote', 'quote-syntax', + 'quote-syntax/prune', 'recontract-out', 'recursive-contract', + 'relative-in', 'rename', 'rename-in', 'rename-inner', 'rename-out', + 'rename-super', 'require', 'send', 'send*', 'send+', 'send-generic', + 'send/apply', 'send/keyword-apply', 'set!', 'set!-values', + 'set-field!', 'shared', 'stream', 'stream-cons', 'struct', 'struct*', + 'struct-copy', 'struct-field-index', 'struct-out', 'struct/c', + 'struct/ctc', 'struct/dc', 'submod', 'super', 'super-instantiate', + 'super-make-object', 'super-new', 'syntax', 'syntax-case', + 'syntax-case*', 'syntax-id-rules', 'syntax-rules', 'syntax/loc', 'tag', + 'this', 'this%', 'thunk', 'thunk*', 'time', 'unconstrained-domain->', + 'unit', 'unit-from-context', 'unit/c', 'unit/new-import-export', + 'unit/s', 'unless', 'unquote', 'unquote-splicing', 'unsyntax', + 'unsyntax-splicing', 'values/drop', 'when', 'with-continuation-mark', + 'with-contract', 'with-handlers', 'with-handlers*', 'with-method', + 'with-syntax', u'λ' + ) + + # Generated by example.rkt + _builtins = ( + '*', '+', '-', '/', '<', '', '>/c', + '>=', '>=/c', 'abort-current-continuation', 'abs', 'absolute-path?', + 'acos', 'add-between', 'add1', 'alarm-evt', 'always-evt', 'and/c', + 'andmap', 'angle', 'any/c', 'append', 'append*', 'append-map', 'apply', + 'argmax', 'argmin', 'arithmetic-shift', 'arity-at-least', + 'arity-at-least-value', 'arity-at-least?', 'arity-checking-wrapper', + 'arity-includes?', 'arity=?', 'asin', 'assf', 'assoc', 'assq', 'assv', + 'atan', 'bad-number-of-results', 'banner', 'base->-doms/c', + 'base->-rngs/c', 'base->?', 'between/c', 'bitwise-and', + 'bitwise-bit-field', 'bitwise-bit-set?', 'bitwise-ior', 'bitwise-not', + 'bitwise-xor', 'blame-add-car-context', 'blame-add-cdr-context', + 'blame-add-context', 'blame-add-missing-party', + 'blame-add-nth-arg-context', 'blame-add-or-context', + 'blame-add-range-context', 'blame-add-unknown-context', + 'blame-context', 'blame-contract', 'blame-fmt->-string', + 'blame-negative', 'blame-original?', 'blame-positive', + 'blame-replace-negative', 'blame-source', 'blame-swap', + 'blame-swapped?', 'blame-update', 'blame-value', 'blame?', 'boolean=?', + 'boolean?', 'bound-identifier=?', 'box', 'box-cas!', 'box-immutable', + 'box-immutable/c', 'box/c', 'box?', 'break-enabled', 'break-thread', + 'build-chaperone-contract-property', 'build-compound-type-name', + 'build-contract-property', 'build-flat-contract-property', + 'build-list', 'build-path', 'build-path/convention-type', + 'build-string', 'build-vector', 'byte-pregexp', 'byte-pregexp?', + 'byte-ready?', 'byte-regexp', 'byte-regexp?', 'byte?', 'bytes', + 'bytes->immutable-bytes', 'bytes->list', 'bytes->path', + 'bytes->path-element', 'bytes->string/latin-1', 'bytes->string/locale', + 'bytes->string/utf-8', 'bytes-append', 'bytes-append*', + 'bytes-close-converter', 'bytes-convert', 'bytes-convert-end', + 'bytes-converter?', 'bytes-copy', 'bytes-copy!', + 'bytes-environment-variable-name?', 'bytes-fill!', 'bytes-join', + 'bytes-length', 'bytes-no-nuls?', 'bytes-open-converter', 'bytes-ref', + 'bytes-set!', 'bytes-utf-8-index', 'bytes-utf-8-length', + 'bytes-utf-8-ref', 'bytes?', 'bytes?', 'caaaar', + 'caaadr', 'caaar', 'caadar', 'caaddr', 'caadr', 'caar', 'cadaar', + 'cadadr', 'cadar', 'caddar', 'cadddr', 'caddr', 'cadr', + 'call-in-nested-thread', 'call-with-atomic-output-file', + 'call-with-break-parameterization', + 'call-with-composable-continuation', 'call-with-continuation-barrier', + 'call-with-continuation-prompt', 'call-with-current-continuation', + 'call-with-default-reading-parameterization', + 'call-with-escape-continuation', 'call-with-exception-handler', + 'call-with-file-lock/timeout', 'call-with-immediate-continuation-mark', + 'call-with-input-bytes', 'call-with-input-file', + 'call-with-input-file*', 'call-with-input-string', + 'call-with-output-bytes', 'call-with-output-file', + 'call-with-output-file*', 'call-with-output-string', + 'call-with-parameterization', 'call-with-semaphore', + 'call-with-semaphore/enable-break', 'call-with-values', 'call/cc', + 'call/ec', 'car', 'cdaaar', 'cdaadr', 'cdaar', 'cdadar', 'cdaddr', + 'cdadr', 'cdar', 'cddaar', 'cddadr', 'cddar', 'cdddar', 'cddddr', + 'cdddr', 'cddr', 'cdr', 'ceiling', 'channel-get', 'channel-put', + 'channel-put-evt', 'channel-put-evt?', 'channel-try-get', 'channel/c', + 'channel?', 'chaperone-box', 'chaperone-channel', + 'chaperone-continuation-mark-key', 'chaperone-contract-property?', + 'chaperone-contract?', 'chaperone-evt', 'chaperone-hash', + 'chaperone-of?', 'chaperone-procedure', 'chaperone-prompt-tag', + 'chaperone-struct', 'chaperone-struct-type', 'chaperone-vector', + 'chaperone?', 'char->integer', 'char-alphabetic?', 'char-blank?', + 'char-ci<=?', 'char-ci=?', 'char-ci>?', + 'char-downcase', 'char-foldcase', 'char-general-category', + 'char-graphic?', 'char-iso-control?', 'char-lower-case?', + 'char-numeric?', 'char-punctuation?', 'char-ready?', 'char-symbolic?', + 'char-title-case?', 'char-titlecase', 'char-upcase', + 'char-upper-case?', 'char-utf-8-length', 'char-whitespace?', 'char<=?', + 'char=?', 'char>?', 'char?', + 'check-duplicate-identifier', 'checked-procedure-check-and-extract', + 'choice-evt', 'class->interface', 'class-info', 'class?', + 'cleanse-path', 'close-input-port', 'close-output-port', + 'coerce-chaperone-contract', 'coerce-chaperone-contracts', + 'coerce-contract', 'coerce-contract/f', 'coerce-contracts', + 'coerce-flat-contract', 'coerce-flat-contracts', 'collect-garbage', + 'collection-file-path', 'collection-path', 'compile', + 'compile-allow-set!-undefined', 'compile-context-preservation-enabled', + 'compile-enforce-module-constants', 'compile-syntax', + 'compiled-expression?', 'compiled-module-expression?', + 'complete-path?', 'complex?', 'compose', 'compose1', 'conjugate', + 'cons', 'cons/c', 'cons?', 'const', 'continuation-mark-key/c', + 'continuation-mark-key?', 'continuation-mark-set->context', + 'continuation-mark-set->list', 'continuation-mark-set->list*', + 'continuation-mark-set-first', 'continuation-mark-set?', + 'continuation-marks', 'continuation-prompt-available?', + 'continuation-prompt-tag?', 'continuation?', + 'contract-continuation-mark-key', 'contract-first-order', + 'contract-first-order-passes?', 'contract-name', 'contract-proc', + 'contract-projection', 'contract-property?', + 'contract-random-generate', 'contract-stronger?', + 'contract-struct-exercise', 'contract-struct-generate', + 'contract-val-first-projection', 'contract?', 'convert-stream', + 'copy-directory/files', 'copy-file', 'copy-port', 'cos', 'cosh', + 'count', 'current-blame-format', 'current-break-parameterization', + 'current-code-inspector', 'current-command-line-arguments', + 'current-compile', 'current-compiled-file-roots', + 'current-continuation-marks', 'current-contract-region', + 'current-custodian', 'current-directory', 'current-directory-for-user', + 'current-drive', 'current-environment-variables', 'current-error-port', + 'current-eval', 'current-evt-pseudo-random-generator', + 'current-future', 'current-gc-milliseconds', + 'current-get-interaction-input-port', 'current-inexact-milliseconds', + 'current-input-port', 'current-inspector', + 'current-library-collection-links', 'current-library-collection-paths', + 'current-load', 'current-load-extension', + 'current-load-relative-directory', 'current-load/use-compiled', + 'current-locale', 'current-logger', 'current-memory-use', + 'current-milliseconds', 'current-module-declare-name', + 'current-module-declare-source', 'current-module-name-resolver', + 'current-module-path-for-load', 'current-namespace', + 'current-output-port', 'current-parameterization', + 'current-preserved-thread-cell-values', 'current-print', + 'current-process-milliseconds', 'current-prompt-read', + 'current-pseudo-random-generator', 'current-read-interaction', + 'current-reader-guard', 'current-readtable', 'current-seconds', + 'current-security-guard', 'current-subprocess-custodian-mode', + 'current-thread', 'current-thread-group', + 'current-thread-initial-stack-size', + 'current-write-relative-directory', 'curry', 'curryr', + 'custodian-box-value', 'custodian-box?', 'custodian-limit-memory', + 'custodian-managed-list', 'custodian-memory-accounting-available?', + 'custodian-require-memory', 'custodian-shutdown-all', 'custodian?', + 'custom-print-quotable-accessor', 'custom-print-quotable?', + 'custom-write-accessor', 'custom-write-property-proc', 'custom-write?', + 'date', 'date*', 'date*-nanosecond', 'date*-time-zone-name', 'date*?', + 'date-day', 'date-dst?', 'date-hour', 'date-minute', 'date-month', + 'date-second', 'date-time-zone-offset', 'date-week-day', 'date-year', + 'date-year-day', 'date?', 'datum->syntax', 'datum-intern-literal', + 'default-continuation-prompt-tag', 'degrees->radians', + 'delete-directory', 'delete-directory/files', 'delete-file', + 'denominator', 'dict->list', 'dict-can-functional-set?', + 'dict-can-remove-keys?', 'dict-clear', 'dict-clear!', 'dict-copy', + 'dict-count', 'dict-empty?', 'dict-for-each', 'dict-has-key?', + 'dict-implements/c', 'dict-implements?', 'dict-iter-contract', + 'dict-iterate-first', 'dict-iterate-key', 'dict-iterate-next', + 'dict-iterate-value', 'dict-key-contract', 'dict-keys', 'dict-map', + 'dict-mutable?', 'dict-ref', 'dict-ref!', 'dict-remove', + 'dict-remove!', 'dict-set', 'dict-set!', 'dict-set*', 'dict-set*!', + 'dict-update', 'dict-update!', 'dict-value-contract', 'dict-values', + 'dict?', 'directory-exists?', 'directory-list', 'display', + 'display-lines', 'display-lines-to-file', 'display-to-file', + 'displayln', 'double-flonum?', 'drop', 'drop-right', 'dropf', + 'dropf-right', 'dump-memory-stats', 'dup-input-port', + 'dup-output-port', 'dynamic-get-field', 'dynamic-place', + 'dynamic-place*', 'dynamic-require', 'dynamic-require-for-syntax', + 'dynamic-send', 'dynamic-set-field!', 'dynamic-wind', 'eighth', + 'empty', 'empty-sequence', 'empty-stream', 'empty?', + 'environment-variables-copy', 'environment-variables-names', + 'environment-variables-ref', 'environment-variables-set!', + 'environment-variables?', 'eof', 'eof-evt', 'eof-object?', + 'ephemeron-value', 'ephemeron?', 'eprintf', 'eq-contract-val', + 'eq-contract?', 'eq-hash-code', 'eq?', 'equal-contract-val', + 'equal-contract?', 'equal-hash-code', 'equal-secondary-hash-code', + 'equal<%>', 'equal?', 'equal?/recur', 'eqv-hash-code', 'eqv?', 'error', + 'error-display-handler', 'error-escape-handler', + 'error-print-context-length', 'error-print-source-location', + 'error-print-width', 'error-value->string-handler', 'eval', + 'eval-jit-enabled', 'eval-syntax', 'even?', 'evt/c', 'evt?', + 'exact->inexact', 'exact-ceiling', 'exact-floor', 'exact-integer?', + 'exact-nonnegative-integer?', 'exact-positive-integer?', 'exact-round', + 'exact-truncate', 'exact?', 'executable-yield-handler', 'exit', + 'exit-handler', 'exn', 'exn-continuation-marks', 'exn-message', + 'exn:break', 'exn:break-continuation', 'exn:break:hang-up', + 'exn:break:hang-up?', 'exn:break:terminate', 'exn:break:terminate?', + 'exn:break?', 'exn:fail', 'exn:fail:contract', + 'exn:fail:contract:arity', 'exn:fail:contract:arity?', + 'exn:fail:contract:blame', 'exn:fail:contract:blame-object', + 'exn:fail:contract:blame?', 'exn:fail:contract:continuation', + 'exn:fail:contract:continuation?', 'exn:fail:contract:divide-by-zero', + 'exn:fail:contract:divide-by-zero?', + 'exn:fail:contract:non-fixnum-result', + 'exn:fail:contract:non-fixnum-result?', 'exn:fail:contract:variable', + 'exn:fail:contract:variable-id', 'exn:fail:contract:variable?', + 'exn:fail:contract?', 'exn:fail:filesystem', + 'exn:fail:filesystem:errno', 'exn:fail:filesystem:errno-errno', + 'exn:fail:filesystem:errno?', 'exn:fail:filesystem:exists', + 'exn:fail:filesystem:exists?', 'exn:fail:filesystem:missing-module', + 'exn:fail:filesystem:missing-module-path', + 'exn:fail:filesystem:missing-module?', 'exn:fail:filesystem:version', + 'exn:fail:filesystem:version?', 'exn:fail:filesystem?', + 'exn:fail:network', 'exn:fail:network:errno', + 'exn:fail:network:errno-errno', 'exn:fail:network:errno?', + 'exn:fail:network?', 'exn:fail:object', 'exn:fail:object?', + 'exn:fail:out-of-memory', 'exn:fail:out-of-memory?', 'exn:fail:read', + 'exn:fail:read-srclocs', 'exn:fail:read:eof', 'exn:fail:read:eof?', + 'exn:fail:read:non-char', 'exn:fail:read:non-char?', 'exn:fail:read?', + 'exn:fail:syntax', 'exn:fail:syntax-exprs', + 'exn:fail:syntax:missing-module', + 'exn:fail:syntax:missing-module-path', + 'exn:fail:syntax:missing-module?', 'exn:fail:syntax:unbound', + 'exn:fail:syntax:unbound?', 'exn:fail:syntax?', 'exn:fail:unsupported', + 'exn:fail:unsupported?', 'exn:fail:user', 'exn:fail:user?', + 'exn:fail?', 'exn:misc:match?', 'exn:missing-module-accessor', + 'exn:missing-module?', 'exn:srclocs-accessor', 'exn:srclocs?', 'exn?', + 'exp', 'expand', 'expand-once', 'expand-syntax', 'expand-syntax-once', + 'expand-syntax-to-top-form', 'expand-to-top-form', 'expand-user-path', + 'explode-path', 'expt', 'externalizable<%>', 'false?', 'field-names', + 'fifth', 'file->bytes', 'file->bytes-lines', 'file->lines', + 'file->list', 'file->string', 'file->value', 'file-exists?', + 'file-name-from-path', 'file-or-directory-identity', + 'file-or-directory-modify-seconds', 'file-or-directory-permissions', + 'file-position', 'file-position*', 'file-size', + 'file-stream-buffer-mode', 'file-stream-port?', 'file-truncate', + 'filename-extension', 'filesystem-change-evt', + 'filesystem-change-evt-cancel', 'filesystem-change-evt?', + 'filesystem-root-list', 'filter', 'filter-map', 'filter-not', + 'filter-read-input-port', 'find-executable-path', 'find-files', + 'find-library-collection-links', 'find-library-collection-paths', + 'find-relative-path', 'find-system-path', 'findf', 'first', 'fixnum?', + 'flat-contract', 'flat-contract-predicate', 'flat-contract-property?', + 'flat-contract?', 'flat-named-contract', 'flatten', + 'floating-point-bytes->real', 'flonum?', 'floor', 'flush-output', + 'fold-files', 'foldl', 'foldr', 'for-each', 'force', 'format', + 'fourth', 'fprintf', 'free-identifier=?', 'free-label-identifier=?', + 'free-template-identifier=?', 'free-transformer-identifier=?', + 'fsemaphore-count', 'fsemaphore-post', 'fsemaphore-try-wait?', + 'fsemaphore-wait', 'fsemaphore?', 'future', 'future?', + 'futures-enabled?', 'gcd', 'generate-member-key', + 'generate-temporaries', 'generic-set?', 'generic?', 'gensym', + 'get-output-bytes', 'get-output-string', 'get-preference', + 'get/build-val-first-projection', 'getenv', + 'global-port-print-handler', 'group-execute-bit', 'group-read-bit', + 'group-write-bit', 'guard-evt', 'handle-evt', 'handle-evt?', + 'has-contract?', 'hash', 'hash->list', 'hash-clear', 'hash-clear!', + 'hash-copy', 'hash-copy-clear', 'hash-count', 'hash-empty?', + 'hash-eq?', 'hash-equal?', 'hash-eqv?', 'hash-for-each', + 'hash-has-key?', 'hash-iterate-first', 'hash-iterate-key', + 'hash-iterate-next', 'hash-iterate-value', 'hash-keys', 'hash-map', + 'hash-placeholder?', 'hash-ref', 'hash-ref!', 'hash-remove', + 'hash-remove!', 'hash-set', 'hash-set!', 'hash-set*', 'hash-set*!', + 'hash-update', 'hash-update!', 'hash-values', 'hash-weak?', 'hash/c', + 'hash?', 'hasheq', 'hasheqv', 'identifier-binding', + 'identifier-binding-symbol', 'identifier-label-binding', + 'identifier-prune-lexical-context', + 'identifier-prune-to-source-module', + 'identifier-remove-from-definition-context', + 'identifier-template-binding', 'identifier-transformer-binding', + 'identifier?', 'identity', 'imag-part', 'immutable?', + 'impersonate-box', 'impersonate-channel', + 'impersonate-continuation-mark-key', 'impersonate-hash', + 'impersonate-procedure', 'impersonate-prompt-tag', + 'impersonate-struct', 'impersonate-vector', 'impersonator-contract?', + 'impersonator-ephemeron', 'impersonator-of?', + 'impersonator-prop:application-mark', 'impersonator-prop:contracted', + 'impersonator-property-accessor-procedure?', 'impersonator-property?', + 'impersonator?', 'implementation?', 'implementation?/c', 'in-bytes', + 'in-bytes-lines', 'in-cycle', 'in-dict', 'in-dict-keys', + 'in-dict-pairs', 'in-dict-values', 'in-directory', 'in-hash', + 'in-hash-keys', 'in-hash-pairs', 'in-hash-values', 'in-indexed', + 'in-input-port-bytes', 'in-input-port-chars', 'in-lines', 'in-list', + 'in-mlist', 'in-naturals', 'in-parallel', 'in-permutations', 'in-port', + 'in-producer', 'in-range', 'in-sequences', 'in-set', 'in-stream', + 'in-string', 'in-value', 'in-values*-sequence', 'in-values-sequence', + 'in-vector', 'inexact->exact', 'inexact-real?', 'inexact?', + 'infinite?', 'input-port-append', 'input-port?', 'inspector?', + 'instanceof/c', 'integer->char', 'integer->integer-bytes', + 'integer-bytes->integer', 'integer-in', 'integer-length', + 'integer-sqrt', 'integer-sqrt/remainder', 'integer?', + 'interface->method-names', 'interface-extension?', 'interface?', + 'internal-definition-context-seal', 'internal-definition-context?', + 'is-a?', 'is-a?/c', 'keyword->string', 'keyword-apply', 'keywordbytes', 'list->mutable-set', 'list->mutable-seteq', + 'list->mutable-seteqv', 'list->set', 'list->seteq', 'list->seteqv', + 'list->string', 'list->vector', 'list->weak-set', 'list->weak-seteq', + 'list->weak-seteqv', 'list-ref', 'list-tail', 'list/c', 'list?', + 'listof', 'load', 'load-extension', 'load-on-demand-enabled', + 'load-relative', 'load-relative-extension', 'load/cd', + 'load/use-compiled', 'local-expand', 'local-expand/capture-lifts', + 'local-transformer-expand', 'local-transformer-expand/capture-lifts', + 'locale-string-encoding', 'log', 'log-level?', 'log-max-level', + 'log-message', 'log-receiver?', 'logger-name', 'logger?', 'magnitude', + 'make-arity-at-least', 'make-base-empty-namespace', + 'make-base-namespace', 'make-bytes', 'make-channel', + 'make-chaperone-contract', 'make-continuation-mark-key', + 'make-continuation-prompt-tag', 'make-contract', 'make-custodian', + 'make-custodian-box', 'make-custom-hash', 'make-custom-hash-types', + 'make-custom-set', 'make-custom-set-types', 'make-date', 'make-date*', + 'make-derived-parameter', 'make-directory', 'make-directory*', + 'make-do-sequence', 'make-empty-namespace', + 'make-environment-variables', 'make-ephemeron', 'make-exn', + 'make-exn:break', 'make-exn:break:hang-up', 'make-exn:break:terminate', + 'make-exn:fail', 'make-exn:fail:contract', + 'make-exn:fail:contract:arity', 'make-exn:fail:contract:blame', + 'make-exn:fail:contract:continuation', + 'make-exn:fail:contract:divide-by-zero', + 'make-exn:fail:contract:non-fixnum-result', + 'make-exn:fail:contract:variable', 'make-exn:fail:filesystem', + 'make-exn:fail:filesystem:errno', 'make-exn:fail:filesystem:exists', + 'make-exn:fail:filesystem:missing-module', + 'make-exn:fail:filesystem:version', 'make-exn:fail:network', + 'make-exn:fail:network:errno', 'make-exn:fail:object', + 'make-exn:fail:out-of-memory', 'make-exn:fail:read', + 'make-exn:fail:read:eof', 'make-exn:fail:read:non-char', + 'make-exn:fail:syntax', 'make-exn:fail:syntax:missing-module', + 'make-exn:fail:syntax:unbound', 'make-exn:fail:unsupported', + 'make-exn:fail:user', 'make-file-or-directory-link', + 'make-flat-contract', 'make-fsemaphore', 'make-generic', + 'make-handle-get-preference-locked', 'make-hash', + 'make-hash-placeholder', 'make-hasheq', 'make-hasheq-placeholder', + 'make-hasheqv', 'make-hasheqv-placeholder', + 'make-immutable-custom-hash', 'make-immutable-hash', + 'make-immutable-hasheq', 'make-immutable-hasheqv', + 'make-impersonator-property', 'make-input-port', + 'make-input-port/read-to-peek', 'make-inspector', + 'make-keyword-procedure', 'make-known-char-range-list', + 'make-limited-input-port', 'make-list', 'make-lock-file-name', + 'make-log-receiver', 'make-logger', 'make-mixin-contract', + 'make-mutable-custom-set', 'make-none/c', 'make-object', + 'make-output-port', 'make-parameter', 'make-phantom-bytes', + 'make-pipe', 'make-pipe-with-specials', 'make-placeholder', + 'make-polar', 'make-prefab-struct', 'make-primitive-class', + 'make-proj-contract', 'make-pseudo-random-generator', + 'make-reader-graph', 'make-readtable', 'make-rectangular', + 'make-rename-transformer', 'make-resolved-module-path', + 'make-security-guard', 'make-semaphore', 'make-set!-transformer', + 'make-shared-bytes', 'make-sibling-inspector', 'make-special-comment', + 'make-srcloc', 'make-string', 'make-struct-field-accessor', + 'make-struct-field-mutator', 'make-struct-type', + 'make-struct-type-property', 'make-syntax-delta-introducer', + 'make-syntax-introducer', 'make-temporary-file', + 'make-tentative-pretty-print-output-port', 'make-thread-cell', + 'make-thread-group', 'make-vector', 'make-weak-box', + 'make-weak-custom-hash', 'make-weak-custom-set', 'make-weak-hash', + 'make-weak-hasheq', 'make-weak-hasheqv', 'make-will-executor', 'map', + 'match-equality-test', 'matches-arity-exactly?', 'max', 'mcar', 'mcdr', + 'mcons', 'member', 'member-name-key-hash-code', 'member-name-key=?', + 'member-name-key?', 'memf', 'memq', 'memv', 'merge-input', + 'method-in-interface?', 'min', 'mixin-contract', 'module->exports', + 'module->imports', 'module->language-info', 'module->namespace', + 'module-compiled-cross-phase-persistent?', 'module-compiled-exports', + 'module-compiled-imports', 'module-compiled-language-info', + 'module-compiled-name', 'module-compiled-submodules', + 'module-declared?', 'module-path-index-join', + 'module-path-index-resolve', 'module-path-index-split', + 'module-path-index-submodule', 'module-path-index?', 'module-path?', + 'module-predefined?', 'module-provide-protected?', 'modulo', 'mpair?', + 'mutable-set', 'mutable-seteq', 'mutable-seteqv', 'n->th', + 'nack-guard-evt', 'namespace-anchor->empty-namespace', + 'namespace-anchor->namespace', 'namespace-anchor?', + 'namespace-attach-module', 'namespace-attach-module-declaration', + 'namespace-base-phase', 'namespace-mapped-symbols', + 'namespace-module-identifier', 'namespace-module-registry', + 'namespace-require', 'namespace-require/constant', + 'namespace-require/copy', 'namespace-require/expansion-time', + 'namespace-set-variable-value!', 'namespace-symbol->identifier', + 'namespace-syntax-introduce', 'namespace-undefine-variable!', + 'namespace-unprotect-module', 'namespace-variable-value', 'namespace?', + 'nan?', 'natural-number/c', 'negate', 'negative?', 'never-evt', + u'new-∀/c', u'new-∃/c', 'newline', 'ninth', 'non-empty-listof', + 'none/c', 'normal-case-path', 'normalize-arity', 'normalize-path', + 'normalized-arity?', 'not', 'not/c', 'null', 'null?', 'number->string', + 'number?', 'numerator', 'object%', 'object->vector', 'object-info', + 'object-interface', 'object-method-arity-includes?', 'object-name', + 'object=?', 'object?', 'odd?', 'one-of/c', 'open-input-bytes', + 'open-input-file', 'open-input-output-file', 'open-input-string', + 'open-output-bytes', 'open-output-file', 'open-output-nowhere', + 'open-output-string', 'or/c', 'order-of-magnitude', 'ormap', + 'other-execute-bit', 'other-read-bit', 'other-write-bit', + 'output-port?', 'pair?', 'parameter-procedure=?', 'parameter/c', + 'parameter?', 'parameterization?', 'parse-command-line', 'partition', + 'path->bytes', 'path->complete-path', 'path->directory-path', + 'path->string', 'path-add-suffix', 'path-convention-type', + 'path-element->bytes', 'path-element->string', 'path-element?', + 'path-for-some-system?', 'path-list-string->path-list', 'path-only', + 'path-replace-suffix', 'path-string?', 'pathbytes', 'port->bytes-lines', 'port->lines', + 'port->list', 'port->string', 'port-closed-evt', 'port-closed?', + 'port-commit-peeked', 'port-count-lines!', 'port-count-lines-enabled', + 'port-counts-lines?', 'port-display-handler', 'port-file-identity', + 'port-file-unlock', 'port-next-location', 'port-print-handler', + 'port-progress-evt', 'port-provides-progress-evts?', + 'port-read-handler', 'port-try-file-lock?', 'port-write-handler', + 'port-writes-atomic?', 'port-writes-special?', 'port?', 'positive?', + 'predicate/c', 'prefab-key->struct-type', 'prefab-key?', + 'prefab-struct-key', 'preferences-lock-file-mode', 'pregexp', + 'pregexp?', 'pretty-display', 'pretty-format', 'pretty-print', + 'pretty-print-.-symbol-without-bars', + 'pretty-print-abbreviate-read-macros', 'pretty-print-columns', + 'pretty-print-current-style-table', 'pretty-print-depth', + 'pretty-print-exact-as-decimal', 'pretty-print-extend-style-table', + 'pretty-print-handler', 'pretty-print-newline', + 'pretty-print-post-print-hook', 'pretty-print-pre-print-hook', + 'pretty-print-print-hook', 'pretty-print-print-line', + 'pretty-print-remap-stylable', 'pretty-print-show-inexactness', + 'pretty-print-size-hook', 'pretty-print-style-table?', + 'pretty-printing', 'pretty-write', 'primitive-closure?', + 'primitive-result-arity', 'primitive?', 'print', 'print-as-expression', + 'print-boolean-long-form', 'print-box', 'print-graph', + 'print-hash-table', 'print-mpair-curly-braces', + 'print-pair-curly-braces', 'print-reader-abbreviations', + 'print-struct', 'print-syntax-width', 'print-unreadable', + 'print-vector-length', 'printable/c', 'printable<%>', 'printf', + 'procedure->method', 'procedure-arity', 'procedure-arity-includes/c', + 'procedure-arity-includes?', 'procedure-arity?', + 'procedure-closure-contents-eq?', 'procedure-extract-target', + 'procedure-keywords', 'procedure-reduce-arity', + 'procedure-reduce-keyword-arity', 'procedure-rename', + 'procedure-struct-type?', 'procedure?', 'process', 'process*', + 'process*/ports', 'process/ports', 'processor-count', 'progress-evt?', + 'promise-forced?', 'promise-running?', 'promise/c', 'promise?', + 'prop:arity-string', 'prop:chaperone-contract', + 'prop:checked-procedure', 'prop:contract', 'prop:contracted', + 'prop:custom-print-quotable', 'prop:custom-write', 'prop:dict', + 'prop:dict/contract', 'prop:equal+hash', 'prop:evt', + 'prop:exn:missing-module', 'prop:exn:srclocs', 'prop:flat-contract', + 'prop:impersonator-of', 'prop:input-port', + 'prop:liberal-define-context', 'prop:opt-chaperone-contract', + 'prop:opt-chaperone-contract-get-test', 'prop:opt-chaperone-contract?', + 'prop:output-port', 'prop:place-location', 'prop:procedure', + 'prop:rename-transformer', 'prop:sequence', 'prop:set!-transformer', + 'prop:stream', 'proper-subset?', 'pseudo-random-generator->vector', + 'pseudo-random-generator-vector?', 'pseudo-random-generator?', + 'put-preferences', 'putenv', 'quotient', 'quotient/remainder', + 'radians->degrees', 'raise', 'raise-argument-error', + 'raise-arguments-error', 'raise-arity-error', 'raise-blame-error', + 'raise-contract-error', 'raise-mismatch-error', + 'raise-not-cons-blame-error', 'raise-range-error', + 'raise-result-error', 'raise-syntax-error', 'raise-type-error', + 'raise-user-error', 'random', 'random-seed', 'range', 'rational?', + 'rationalize', 'read', 'read-accept-bar-quote', 'read-accept-box', + 'read-accept-compiled', 'read-accept-dot', 'read-accept-graph', + 'read-accept-infix-dot', 'read-accept-lang', 'read-accept-quasiquote', + 'read-accept-reader', 'read-byte', 'read-byte-or-special', + 'read-bytes', 'read-bytes!', 'read-bytes!-evt', 'read-bytes-avail!', + 'read-bytes-avail!*', 'read-bytes-avail!-evt', + 'read-bytes-avail!/enable-break', 'read-bytes-evt', 'read-bytes-line', + 'read-bytes-line-evt', 'read-case-sensitive', 'read-char', + 'read-char-or-special', 'read-curly-brace-as-paren', + 'read-decimal-as-inexact', 'read-eval-print-loop', 'read-language', + 'read-line', 'read-line-evt', 'read-on-demand-source', + 'read-square-bracket-as-paren', 'read-string', 'read-string!', + 'read-string!-evt', 'read-string-evt', 'read-syntax', + 'read-syntax/recursive', 'read/recursive', 'readtable-mapping', + 'readtable?', 'real->decimal-string', 'real->double-flonum', + 'real->floating-point-bytes', 'real->single-flonum', 'real-in', + 'real-part', 'real?', 'reencode-input-port', 'reencode-output-port', + 'regexp', 'regexp-match', 'regexp-match*', 'regexp-match-evt', + 'regexp-match-exact?', 'regexp-match-peek', + 'regexp-match-peek-immediate', 'regexp-match-peek-positions', + 'regexp-match-peek-positions*', + 'regexp-match-peek-positions-immediate', + 'regexp-match-peek-positions-immediate/end', + 'regexp-match-peek-positions/end', 'regexp-match-positions', + 'regexp-match-positions*', 'regexp-match-positions/end', + 'regexp-match/end', 'regexp-match?', 'regexp-max-lookbehind', + 'regexp-quote', 'regexp-replace', 'regexp-replace*', + 'regexp-replace-quote', 'regexp-replaces', 'regexp-split', + 'regexp-try-match', 'regexp?', 'relative-path?', 'relocate-input-port', + 'relocate-output-port', 'remainder', 'remove', 'remove*', + 'remove-duplicates', 'remq', 'remq*', 'remv', 'remv*', + 'rename-file-or-directory', 'rename-transformer-target', + 'rename-transformer?', 'reroot-path', 'resolve-path', + 'resolved-module-path-name', 'resolved-module-path?', 'rest', + 'reverse', 'round', 'second', 'seconds->date', 'security-guard?', + 'semaphore-peek-evt', 'semaphore-peek-evt?', 'semaphore-post', + 'semaphore-try-wait?', 'semaphore-wait', 'semaphore-wait/enable-break', + 'semaphore?', 'sequence->list', 'sequence->stream', + 'sequence-add-between', 'sequence-andmap', 'sequence-append', + 'sequence-count', 'sequence-filter', 'sequence-fold', + 'sequence-for-each', 'sequence-generate', 'sequence-generate*', + 'sequence-length', 'sequence-map', 'sequence-ormap', 'sequence-ref', + 'sequence-tail', 'sequence?', 'set', 'set!-transformer-procedure', + 'set!-transformer?', 'set->list', 'set->stream', 'set-add', 'set-add!', + 'set-box!', 'set-clear', 'set-clear!', 'set-copy', 'set-copy-clear', + 'set-count', 'set-empty?', 'set-eq?', 'set-equal?', 'set-eqv?', + 'set-first', 'set-for-each', 'set-implements/c', 'set-implements?', + 'set-intersect', 'set-intersect!', 'set-map', 'set-mcar!', 'set-mcdr!', + 'set-member?', 'set-mutable?', 'set-phantom-bytes!', + 'set-port-next-location!', 'set-remove', 'set-remove!', 'set-rest', + 'set-subtract', 'set-subtract!', 'set-symmetric-difference', + 'set-symmetric-difference!', 'set-union', 'set-union!', 'set-weak?', + 'set/c', 'set=?', 'set?', 'seteq', 'seteqv', 'seventh', 'sgn', + 'shared-bytes', 'shell-execute', 'shrink-path-wrt', 'shuffle', + 'simple-form-path', 'simplify-path', 'sin', 'single-flonum?', 'sinh', + 'sixth', 'skip-projection-wrapper?', 'sleep', + 'some-system-path->string', 'sort', 'special-comment-value', + 'special-comment?', 'special-filter-input-port', 'split-at', + 'split-at-right', 'split-path', 'splitf-at', 'splitf-at-right', 'sqr', + 'sqrt', 'srcloc', 'srcloc->string', 'srcloc-column', 'srcloc-line', + 'srcloc-position', 'srcloc-source', 'srcloc-span', 'srcloc?', + 'stop-after', 'stop-before', 'stream->list', 'stream-add-between', + 'stream-andmap', 'stream-append', 'stream-count', 'stream-empty?', + 'stream-filter', 'stream-first', 'stream-fold', 'stream-for-each', + 'stream-length', 'stream-map', 'stream-ormap', 'stream-ref', + 'stream-rest', 'stream-tail', 'stream?', 'string', + 'string->bytes/latin-1', 'string->bytes/locale', 'string->bytes/utf-8', + 'string->immutable-string', 'string->keyword', 'string->list', + 'string->number', 'string->path', 'string->path-element', + 'string->some-system-path', 'string->symbol', + 'string->uninterned-symbol', 'string->unreadable-symbol', + 'string-append', 'string-append*', 'string-ci<=?', 'string-ci=?', 'string-ci>?', 'string-copy', + 'string-copy!', 'string-downcase', 'string-environment-variable-name?', + 'string-fill!', 'string-foldcase', 'string-join', 'string-len/c', + 'string-length', 'string-locale-ci?', 'string-locale-downcase', 'string-locale-upcase', + 'string-locale?', + 'string-no-nuls?', 'string-normalize-nfc', 'string-normalize-nfd', + 'string-normalize-nfkc', 'string-normalize-nfkd', + 'string-normalize-spaces', 'string-ref', 'string-replace', + 'string-set!', 'string-split', 'string-titlecase', 'string-trim', + 'string-upcase', 'string-utf-8-length', 'string<=?', 'string=?', 'string>?', 'string?', 'struct->vector', + 'struct-accessor-procedure?', 'struct-constructor-procedure?', + 'struct-info', 'struct-mutator-procedure?', + 'struct-predicate-procedure?', 'struct-type-info', + 'struct-type-make-constructor', 'struct-type-make-predicate', + 'struct-type-property-accessor-procedure?', 'struct-type-property/c', + 'struct-type-property?', 'struct-type?', 'struct:arity-at-least', + 'struct:date', 'struct:date*', 'struct:exn', 'struct:exn:break', + 'struct:exn:break:hang-up', 'struct:exn:break:terminate', + 'struct:exn:fail', 'struct:exn:fail:contract', + 'struct:exn:fail:contract:arity', 'struct:exn:fail:contract:blame', + 'struct:exn:fail:contract:continuation', + 'struct:exn:fail:contract:divide-by-zero', + 'struct:exn:fail:contract:non-fixnum-result', + 'struct:exn:fail:contract:variable', 'struct:exn:fail:filesystem', + 'struct:exn:fail:filesystem:errno', + 'struct:exn:fail:filesystem:exists', + 'struct:exn:fail:filesystem:missing-module', + 'struct:exn:fail:filesystem:version', 'struct:exn:fail:network', + 'struct:exn:fail:network:errno', 'struct:exn:fail:object', + 'struct:exn:fail:out-of-memory', 'struct:exn:fail:read', + 'struct:exn:fail:read:eof', 'struct:exn:fail:read:non-char', + 'struct:exn:fail:syntax', 'struct:exn:fail:syntax:missing-module', + 'struct:exn:fail:syntax:unbound', 'struct:exn:fail:unsupported', + 'struct:exn:fail:user', 'struct:srcloc', + 'struct:wrapped-extra-arg-arrow', 'struct?', 'sub1', 'subbytes', + 'subclass?', 'subclass?/c', 'subprocess', 'subprocess-group-enabled', + 'subprocess-kill', 'subprocess-pid', 'subprocess-status', + 'subprocess-wait', 'subprocess?', 'subset?', 'substring', + 'symbol->string', 'symbol-interned?', 'symbol-unreadable?', 'symboldatum', + 'syntax->list', 'syntax-arm', 'syntax-column', 'syntax-disarm', + 'syntax-e', 'syntax-line', 'syntax-local-bind-syntaxes', + 'syntax-local-certifier', 'syntax-local-context', + 'syntax-local-expand-expression', 'syntax-local-get-shadower', + 'syntax-local-introduce', 'syntax-local-lift-context', + 'syntax-local-lift-expression', + 'syntax-local-lift-module-end-declaration', + 'syntax-local-lift-provide', 'syntax-local-lift-require', + 'syntax-local-lift-values-expression', + 'syntax-local-make-definition-context', + 'syntax-local-make-delta-introducer', + 'syntax-local-module-defined-identifiers', + 'syntax-local-module-exports', + 'syntax-local-module-required-identifiers', 'syntax-local-name', + 'syntax-local-phase-level', 'syntax-local-submodules', + 'syntax-local-transforming-module-provides?', 'syntax-local-value', + 'syntax-local-value/immediate', 'syntax-original?', 'syntax-position', + 'syntax-property', 'syntax-property-symbol-keys', 'syntax-protect', + 'syntax-rearm', 'syntax-recertify', 'syntax-shift-phase-level', + 'syntax-source', 'syntax-source-module', 'syntax-span', 'syntax-taint', + 'syntax-tainted?', 'syntax-track-origin', + 'syntax-transforming-module-expression?', 'syntax-transforming?', + 'syntax/c', 'syntax?', 'system', 'system*', 'system*/exit-code', + 'system-big-endian?', 'system-idle-evt', 'system-language+country', + 'system-library-subpath', 'system-path-convention-type', 'system-type', + 'system/exit-code', 'tail-marks-match?', 'take', 'take-right', 'takef', + 'takef-right', 'tan', 'tanh', 'tcp-abandon-port', 'tcp-accept', + 'tcp-accept-evt', 'tcp-accept-ready?', 'tcp-accept/enable-break', + 'tcp-addresses', 'tcp-close', 'tcp-connect', + 'tcp-connect/enable-break', 'tcp-listen', 'tcp-listener?', 'tcp-port?', + 'tentative-pretty-print-port-cancel', + 'tentative-pretty-print-port-transfer', 'tenth', 'terminal-port?', + 'the-unsupplied-arg', 'third', 'thread', 'thread-cell-ref', + 'thread-cell-set!', 'thread-cell-values?', 'thread-cell?', + 'thread-dead-evt', 'thread-dead?', 'thread-group?', 'thread-receive', + 'thread-receive-evt', 'thread-resume', 'thread-resume-evt', + 'thread-rewind-receive', 'thread-running?', 'thread-send', + 'thread-suspend', 'thread-suspend-evt', 'thread-try-receive', + 'thread-wait', 'thread/suspend-to-kill', 'thread?', 'time-apply', + 'touch', 'transplant-input-port', 'transplant-output-port', 'true', + 'truncate', 'udp-addresses', 'udp-bind!', 'udp-bound?', 'udp-close', + 'udp-connect!', 'udp-connected?', 'udp-multicast-interface', + 'udp-multicast-join-group!', 'udp-multicast-leave-group!', + 'udp-multicast-loopback?', 'udp-multicast-set-interface!', + 'udp-multicast-set-loopback!', 'udp-multicast-set-ttl!', + 'udp-multicast-ttl', 'udp-open-socket', 'udp-receive!', + 'udp-receive!*', 'udp-receive!-evt', 'udp-receive!/enable-break', + 'udp-receive-ready-evt', 'udp-send', 'udp-send*', 'udp-send-evt', + 'udp-send-ready-evt', 'udp-send-to', 'udp-send-to*', 'udp-send-to-evt', + 'udp-send-to/enable-break', 'udp-send/enable-break', 'udp?', 'unbox', + 'uncaught-exception-handler', 'unit?', 'unspecified-dom', + 'unsupplied-arg?', 'use-collection-link-paths', + 'use-compiled-file-paths', 'use-user-specific-search-paths', + 'user-execute-bit', 'user-read-bit', 'user-write-bit', + 'value-contract', 'values', 'variable-reference->empty-namespace', + 'variable-reference->module-base-phase', + 'variable-reference->module-declaration-inspector', + 'variable-reference->module-path-index', + 'variable-reference->module-source', 'variable-reference->namespace', + 'variable-reference->phase', + 'variable-reference->resolved-module-path', + 'variable-reference-constant?', 'variable-reference?', 'vector', + 'vector->immutable-vector', 'vector->list', + 'vector->pseudo-random-generator', 'vector->pseudo-random-generator!', + 'vector->values', 'vector-append', 'vector-argmax', 'vector-argmin', + 'vector-copy', 'vector-copy!', 'vector-count', 'vector-drop', + 'vector-drop-right', 'vector-fill!', 'vector-filter', + 'vector-filter-not', 'vector-immutable', 'vector-immutable/c', + 'vector-immutableof', 'vector-length', 'vector-map', 'vector-map!', + 'vector-member', 'vector-memq', 'vector-memv', 'vector-ref', + 'vector-set!', 'vector-set*!', 'vector-set-performance-stats!', + 'vector-split-at', 'vector-split-at-right', 'vector-take', + 'vector-take-right', 'vector/c', 'vector?', 'vectorof', 'version', + 'void', 'void?', 'weak-box-value', 'weak-box?', 'weak-set', + 'weak-seteq', 'weak-seteqv', 'will-execute', 'will-executor?', + 'will-register', 'will-try-execute', 'with-input-from-bytes', + 'with-input-from-file', 'with-input-from-string', + 'with-output-to-bytes', 'with-output-to-file', 'with-output-to-string', + 'would-be-future', 'wrap-evt', 'wrapped-extra-arg-arrow', + 'wrapped-extra-arg-arrow-extra-neg-party-argument', + 'wrapped-extra-arg-arrow-real-func', 'wrapped-extra-arg-arrow?', + 'writable<%>', 'write', 'write-byte', 'write-bytes', + 'write-bytes-avail', 'write-bytes-avail*', 'write-bytes-avail-evt', + 'write-bytes-avail/enable-break', 'write-char', 'write-special', + 'write-special-avail*', 'write-special-evt', 'write-string', + 'write-to-file', 'xor', 'zero?', '~.a', '~.s', '~.v', '~a', '~e', '~r', + '~s', '~v' + ) + + _opening_parenthesis = r'[([{]' + _closing_parenthesis = r'[)\]}]' + _delimiters = r'()[\]{}",\'`;\s' + _symbol = r'(?u)(?:\|[^|]*\||\\[\w\W]|[^|\\%s]+)+' % _delimiters + _exact_decimal_prefix = r'(?:#e)?(?:#d)?(?:#e)?' + _exponent = r'(?:[defls][-+]?\d+)' + _inexact_simple_no_hashes = r'(?:\d+(?:/\d+|\.\d*)?|\.\d+)' + _inexact_simple = (r'(?:%s|(?:\d+#+(?:\.#*|/\d+#*)?|\.\d+#+|' + r'\d+(?:\.\d*#+|/\d+#+)))' % _inexact_simple_no_hashes) + _inexact_normal_no_hashes = r'(?:%s%s?)' % (_inexact_simple_no_hashes, + _exponent) + _inexact_normal = r'(?:%s%s?)' % (_inexact_simple, _exponent) + _inexact_special = r'(?:(?:inf|nan)\.[0f])' + _inexact_real = r'(?:[-+]?%s|[-+]%s)' % (_inexact_normal, + _inexact_special) + _inexact_unsigned = r'(?:%s|%s)' % (_inexact_normal, _inexact_special) + + tokens = { + 'root': [ + (_closing_parenthesis, Error), + (r'(?!\Z)', Text, 'unquoted-datum') + ], + 'datum': [ + (r'(?s)#;|#![ /]([^\\\n]|\\.)*', Comment), + (u';[^\\n\\r\x85\u2028\u2029]*', Comment.Single), + (r'#\|', Comment.Multiline, 'block-comment'), + + # Whitespaces + (r'(?u)\s+', Text), + + # Numbers: Keep in mind Racket reader hash prefixes, which + # can denote the base or the type. These don't map neatly + # onto Pygments token types; some judgment calls here. + + # #d or no prefix + (r'(?i)%s[-+]?\d+(?=[%s])' % (_exact_decimal_prefix, _delimiters), + Number.Integer, '#pop'), + (r'(?i)%s[-+]?(\d+(\.\d*)?|\.\d+)([deflst][-+]?\d+)?(?=[%s])' % + (_exact_decimal_prefix, _delimiters), Number.Float, '#pop'), + (r'(?i)%s[-+]?(%s([-+]%s?i)?|[-+]%s?i)(?=[%s])' % + (_exact_decimal_prefix, _inexact_normal_no_hashes, + _inexact_normal_no_hashes, _inexact_normal_no_hashes, + _delimiters), Number, '#pop'), + + # Inexact without explicit #i + (r'(?i)(#d)?(%s([-+]%s?i)?|[-+]%s?i|%s@%s)(?=[%s])' % + (_inexact_real, _inexact_unsigned, _inexact_unsigned, + _inexact_real, _inexact_real, _delimiters), Number.Float, + '#pop'), + + # The remaining extflonums + (r'(?i)(([-+]?%st[-+]?\d+)|[-+](inf|nan)\.t)(?=[%s])' % + (_inexact_simple, _delimiters), Number.Float, '#pop'), + + # #b + (r'(?i)(#[ei])?#b%s' % _symbol, Number.Bin, '#pop'), + + # #o + (r'(?i)(#[ei])?#o%s' % _symbol, Number.Oct, '#pop'), + + # #x + (r'(?i)(#[ei])?#x%s' % _symbol, Number.Hex, '#pop'), + + # #i is always inexact, i.e. float + (r'(?i)(#d)?#i%s' % _symbol, Number.Float, '#pop'), + + # Strings and characters + (r'#?"', String.Double, ('#pop', 'string')), + (r'#<<(.+)\n(^(?!\1$).*$\n)*^\1$', String.Heredoc, '#pop'), + (r'#\\(u[\da-fA-F]{1,4}|U[\da-fA-F]{1,8})', String.Char, '#pop'), + (r'(?is)#\\([0-7]{3}|[a-z]+|.)', String.Char, '#pop'), + (r'(?s)#[pr]x#?"(\\?.)*?"', String.Regex, '#pop'), + + # Constants + (r'#(true|false|[tTfF])', Name.Constant, '#pop'), + + # Keyword argument names (e.g. #:keyword) + (r'#:%s' % _symbol, Keyword.Declaration, '#pop'), + + # Reader extensions + (r'(#lang |#!)(\S+)', + bygroups(Keyword.Namespace, Name.Namespace)), + (r'#reader', Keyword.Namespace, 'quoted-datum'), + + # Other syntax + (r"(?i)\.(?=[%s])|#c[is]|#['`]|#,@?" % _delimiters, Operator), + (r"'|#[s&]|#hash(eqv?)?|#\d*(?=%s)" % _opening_parenthesis, + Operator, ('#pop', 'quoted-datum')) + ], + 'datum*': [ + (r'`|,@?', Operator), + (_symbol, String.Symbol, '#pop'), + (r'[|\\]', Error), + default('#pop') + ], + 'list': [ + (_closing_parenthesis, Punctuation, '#pop') + ], + 'unquoted-datum': [ + include('datum'), + (r'quote(?=[%s])' % _delimiters, Keyword, + ('#pop', 'quoted-datum')), + (r'`', Operator, ('#pop', 'quasiquoted-datum')), + (r'quasiquote(?=[%s])' % _delimiters, Keyword, + ('#pop', 'quasiquoted-datum')), + (_opening_parenthesis, Punctuation, ('#pop', 'unquoted-list')), + (words(_keywords, prefix='(?u)', suffix='(?=[%s])' % _delimiters), + Keyword, '#pop'), + (words(_builtins, prefix='(?u)', suffix='(?=[%s])' % _delimiters), + Name.Builtin, '#pop'), + (_symbol, Name, '#pop'), + include('datum*') + ], + 'unquoted-list': [ + include('list'), + (r'(?!\Z)', Text, 'unquoted-datum') + ], + 'quasiquoted-datum': [ + include('datum'), + (r',@?', Operator, ('#pop', 'unquoted-datum')), + (r'unquote(-splicing)?(?=[%s])' % _delimiters, Keyword, + ('#pop', 'unquoted-datum')), + (_opening_parenthesis, Punctuation, ('#pop', 'quasiquoted-list')), + include('datum*') + ], + 'quasiquoted-list': [ + include('list'), + (r'(?!\Z)', Text, 'quasiquoted-datum') + ], + 'quoted-datum': [ + include('datum'), + (_opening_parenthesis, Punctuation, ('#pop', 'quoted-list')), + include('datum*') + ], + 'quoted-list': [ + include('list'), + (r'(?!\Z)', Text, 'quoted-datum') + ], + 'block-comment': [ + (r'#\|', Comment.Multiline, '#push'), + (r'\|#', Comment.Multiline, '#pop'), + (r'[^#|]+|.', Comment.Multiline) + ], + 'string': [ + (r'"', String.Double, '#pop'), + (r'(?s)\\([0-7]{1,3}|x[\da-fA-F]{1,2}|u[\da-fA-F]{1,4}|' + r'U[\da-fA-F]{1,8}|.)', String.Escape), + (r'[^\\"]+', String.Double) + ] + } + + +class NewLispLexer(RegexLexer): + """ + For `newLISP. `_ source code (version 10.3.0). + + .. versionadded:: 1.5 + """ + + name = 'NewLisp' + aliases = ['newlisp'] + filenames = ['*.lsp', '*.nl'] + mimetypes = ['text/x-newlisp', 'application/x-newlisp'] + + flags = re.IGNORECASE | re.MULTILINE | re.UNICODE + + # list of built-in functions for newLISP version 10.3 + builtins = ( + '^', '--', '-', ':', '!', '!=', '?', '@', '*', '/', '&', '%', '+', '++', + '<', '<<', '<=', '=', '>', '>=', '>>', '|', '~', '$', '$0', '$1', '$10', + '$11', '$12', '$13', '$14', '$15', '$2', '$3', '$4', '$5', '$6', '$7', + '$8', '$9', '$args', '$idx', '$it', '$main-args', 'abort', 'abs', + 'acos', 'acosh', 'add', 'address', 'amb', 'and', 'and', 'append-file', + 'append', 'apply', 'args', 'array-list', 'array?', 'array', 'asin', + 'asinh', 'assoc', 'atan', 'atan2', 'atanh', 'atom?', 'base64-dec', + 'base64-enc', 'bayes-query', 'bayes-train', 'begin', 'begin', 'begin', + 'beta', 'betai', 'bind', 'binomial', 'bits', 'callback', 'case', 'case', + 'case', 'catch', 'ceil', 'change-dir', 'char', 'chop', 'Class', 'clean', + 'close', 'command-event', 'cond', 'cond', 'cond', 'cons', 'constant', + 'context?', 'context', 'copy-file', 'copy', 'cos', 'cosh', 'count', + 'cpymem', 'crc32', 'crit-chi2', 'crit-z', 'current-line', 'curry', + 'date-list', 'date-parse', 'date-value', 'date', 'debug', 'dec', + 'def-new', 'default', 'define-macro', 'define-macro', 'define', + 'delete-file', 'delete-url', 'delete', 'destroy', 'det', 'device', + 'difference', 'directory?', 'directory', 'div', 'do-until', 'do-while', + 'doargs', 'dolist', 'dostring', 'dotimes', 'dotree', 'dump', 'dup', + 'empty?', 'encrypt', 'ends-with', 'env', 'erf', 'error-event', + 'eval-string', 'eval', 'exec', 'exists', 'exit', 'exp', 'expand', + 'explode', 'extend', 'factor', 'fft', 'file-info', 'file?', 'filter', + 'find-all', 'find', 'first', 'flat', 'float?', 'float', 'floor', 'flt', + 'fn', 'for-all', 'for', 'fork', 'format', 'fv', 'gammai', 'gammaln', + 'gcd', 'get-char', 'get-float', 'get-int', 'get-long', 'get-string', + 'get-url', 'global?', 'global', 'if-not', 'if', 'ifft', 'import', 'inc', + 'index', 'inf?', 'int', 'integer?', 'integer', 'intersect', 'invert', + 'irr', 'join', 'lambda-macro', 'lambda?', 'lambda', 'last-error', + 'last', 'legal?', 'length', 'let', 'let', 'let', 'letex', 'letn', + 'letn', 'letn', 'list?', 'list', 'load', 'local', 'log', 'lookup', + 'lower-case', 'macro?', 'main-args', 'MAIN', 'make-dir', 'map', 'mat', + 'match', 'max', 'member', 'min', 'mod', 'module', 'mul', 'multiply', + 'NaN?', 'net-accept', 'net-close', 'net-connect', 'net-error', + 'net-eval', 'net-interface', 'net-ipv', 'net-listen', 'net-local', + 'net-lookup', 'net-packet', 'net-peek', 'net-peer', 'net-ping', + 'net-receive-from', 'net-receive-udp', 'net-receive', 'net-select', + 'net-send-to', 'net-send-udp', 'net-send', 'net-service', + 'net-sessions', 'new', 'nil?', 'nil', 'normal', 'not', 'now', 'nper', + 'npv', 'nth', 'null?', 'number?', 'open', 'or', 'ostype', 'pack', + 'parse-date', 'parse', 'peek', 'pipe', 'pmt', 'pop-assoc', 'pop', + 'post-url', 'pow', 'prefix', 'pretty-print', 'primitive?', 'print', + 'println', 'prob-chi2', 'prob-z', 'process', 'prompt-event', + 'protected?', 'push', 'put-url', 'pv', 'quote?', 'quote', 'rand', + 'random', 'randomize', 'read', 'read-char', 'read-expr', 'read-file', + 'read-key', 'read-line', 'read-utf8', 'read', 'reader-event', + 'real-path', 'receive', 'ref-all', 'ref', 'regex-comp', 'regex', + 'remove-dir', 'rename-file', 'replace', 'reset', 'rest', 'reverse', + 'rotate', 'round', 'save', 'search', 'seed', 'seek', 'select', 'self', + 'semaphore', 'send', 'sequence', 'series', 'set-locale', 'set-ref-all', + 'set-ref', 'set', 'setf', 'setq', 'sgn', 'share', 'signal', 'silent', + 'sin', 'sinh', 'sleep', 'slice', 'sort', 'source', 'spawn', 'sqrt', + 'starts-with', 'string?', 'string', 'sub', 'swap', 'sym', 'symbol?', + 'symbols', 'sync', 'sys-error', 'sys-info', 'tan', 'tanh', 'term', + 'throw-error', 'throw', 'time-of-day', 'time', 'timer', 'title-case', + 'trace-highlight', 'trace', 'transpose', 'Tree', 'trim', 'true?', + 'true', 'unicode', 'unify', 'unique', 'unless', 'unpack', 'until', + 'upper-case', 'utf8', 'utf8len', 'uuid', 'wait-pid', 'when', 'while', + 'write', 'write-char', 'write-file', 'write-line', 'write', + 'xfer-event', 'xml-error', 'xml-parse', 'xml-type-tags', 'zero?', + ) + + # valid names + valid_name = r'([\w!$%&*+.,/<=>?@^~|-])+|(\[.*?\])+' + + tokens = { + 'root': [ + # shebang + (r'#!(.*?)$', Comment.Preproc), + # comments starting with semicolon + (r';.*$', Comment.Single), + # comments starting with # + (r'#.*$', Comment.Single), + + # whitespace + (r'\s+', Text), + + # strings, symbols and characters + (r'"(\\\\|\\"|[^"])*"', String), + + # braces + (r"{", String, "bracestring"), + + # [text] ... [/text] delimited strings + (r'\[text\]*', String, "tagstring"), + + # 'special' operators... + (r"('|:)", Operator), + + # highlight the builtins + (words(builtins, suffix=r'\b'), + Keyword), + + # the remaining functions + (r'(?<=\()' + valid_name, Name.Variable), + + # the remaining variables + (valid_name, String.Symbol), + + # parentheses + (r'(\(|\))', Punctuation), + ], + + # braced strings... + 'bracestring': [ + ("{", String, "#push"), + ("}", String, "#pop"), + ("[^{}]+", String), + ], + + # tagged [text]...[/text] delimited strings... + 'tagstring': [ + (r'(?s)(.*?)(\[/text\])', String, '#pop'), + ], + } diff --git a/pygments/lexers/misc/basic.py b/pygments/lexers/misc/basic.py index 104ca47b..5faf205e 100644 --- a/pygments/lexers/misc/basic.py +++ b/pygments/lexers/misc/basic.py @@ -11,11 +11,12 @@ import re -from pygments.lexer import RegexLexer, bygroups, default, words +from pygments.lexer import RegexLexer, bygroups, default, words, include from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation -__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer'] +__all__ = ['BlitzBasicLexer', 'BlitzMaxLexer', 'MonkeyLexer', 'CbmBasicV2Lexer', + 'QBasicLexer'] class BlitzMaxLexer(RegexLexer): @@ -354,3 +355,143 @@ class CbmBasicV2Lexer(RegexLexer): # like VB.net if re.match(r'\d+', text): return True + + +class QBasicLexer(RegexLexer): + """ + For + `QBasic `_ + source code. + """ + + name = 'QBasic' + aliases = ['qbasic', 'basic'] + filenames = ['*.BAS', '*.bas'] + mimetypes = ['text/basic'] + + declarations = ('DATA', 'LET') + + functions = ( + 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG', + 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI', + 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV', + 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE', + 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT', + 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF', + 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$', + 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY', + 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD', + 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC', + 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN', + 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR', + 'VARPTR$', 'VARSEG' + ) + + metacommands = ('$DYNAMIC', '$INCLUDE', '$STATIC') + + operators = ('AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR') + + statements = ( + 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE', + 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR', + 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA', + 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT', + 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP', + 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD', + 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO', + 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY', + 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE', + 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME', + 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY', + 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM', + 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN', + 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING', + 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM', + 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN', + 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP', + 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM', + 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK', + 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE' + ) + + keywords = ( + 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY', + 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF', + 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD', + 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE', + 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND' + ) + + tokens = { + 'root': [ + (r'\n+', Text), + (r'\s+', Text.Whitespace), + (r'^(\s*)(\d*)(\s*)(REM .*)$', + bygroups(Text.Whitespace, Name.Label, Text.Whitespace, + Comment.Single)), + (r'^(\s*)(\d+)(\s*)', + bygroups(Text.Whitespace, Name.Label, Text.Whitespace)), + (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global), + (r'(?=[^"]*)\'.*$', Comment.Single), + (r'"[^\n\"]*"', String.Double), + (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)', + bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)), + (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)', + bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, + Text.Whitespace, Name)), + (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)', + bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, + Text.Whitespace, Name.Variable.Global)), + (r'(DIM)(\s+)([^\s\(]+)', + bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)), + (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)', + bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace, + Operator)), + (r'(GOTO|GOSUB)(\s+)(\w+\:?)', + bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)), + (r'(SUB)(\s+)(\w+\:?)', + bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)), + include('declarations'), + include('functions'), + include('metacommands'), + include('operators'), + include('statements'), + include('keywords'), + (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global), + (r'[a-zA-Z_]\w*\:', Name.Label), + (r'\-?\d*\.\d+[@|#]?', Number.Float), + (r'\-?\d+[@|#]', Number.Float), + (r'\-?\d+#?', Number.Integer.Long), + (r'\-?\d+#?', Number.Integer), + (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator), + (r'[\[\]{}(),;]', Punctuation), + (r'[\w]+', Name.Variable.Global), + ], + # can't use regular \b because of X$() + # XXX: use words() here + 'declarations': [ + (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)), + Keyword.Declaration), + ], + 'functions': [ + (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)), + Keyword.Reserved), + ], + 'metacommands': [ + (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)), + Keyword.Constant), + ], + 'operators': [ + (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word), + ], + 'statements': [ + (r'\b(%s)\b' % '|'.join(map(re.escape, statements)), + Keyword.Reserved), + ], + 'keywords': [ + (r'\b(%s)\b' % '|'.join(keywords), Keyword), + ], + } + + def analyse_text(text): + return 0.2 diff --git a/pygments/lexers/misc/erlang.py b/pygments/lexers/misc/erlang.py new file mode 100644 index 00000000..824cc055 --- /dev/null +++ b/pygments/lexers/misc/erlang.py @@ -0,0 +1,508 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.erlang + ~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Erlang. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, bygroups, words, do_insertions, \ + include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic + +__all__ = ['ErlangLexer', 'ErlangShellLexer', 'ElixirConsoleLexer', + 'ElixirLexer'] + + +line_re = re.compile('.*?\n') + + +class ErlangLexer(RegexLexer): + """ + For the Erlang functional programming language. + + Blame Jeremy Thurgood (http://jerith.za.net/). + + .. versionadded:: 0.9 + """ + + name = 'Erlang' + aliases = ['erlang'] + filenames = ['*.erl', '*.hrl', '*.es', '*.escript'] + mimetypes = ['text/x-erlang'] + + keywords = ( + 'after', 'begin', 'case', 'catch', 'cond', 'end', 'fun', 'if', + 'let', 'of', 'query', 'receive', 'try', 'when', + ) + + builtins = ( # See erlang(3) man page + 'abs', 'append_element', 'apply', 'atom_to_list', 'binary_to_list', + 'bitstring_to_list', 'binary_to_term', 'bit_size', 'bump_reductions', + 'byte_size', 'cancel_timer', 'check_process_code', 'delete_module', + 'demonitor', 'disconnect_node', 'display', 'element', 'erase', 'exit', + 'float', 'float_to_list', 'fun_info', 'fun_to_list', + 'function_exported', 'garbage_collect', 'get', 'get_keys', + 'group_leader', 'hash', 'hd', 'integer_to_list', 'iolist_to_binary', + 'iolist_size', 'is_atom', 'is_binary', 'is_bitstring', 'is_boolean', + 'is_builtin', 'is_float', 'is_function', 'is_integer', 'is_list', + 'is_number', 'is_pid', 'is_port', 'is_process_alive', 'is_record', + 'is_reference', 'is_tuple', 'length', 'link', 'list_to_atom', + 'list_to_binary', 'list_to_bitstring', 'list_to_existing_atom', + 'list_to_float', 'list_to_integer', 'list_to_pid', 'list_to_tuple', + 'load_module', 'localtime_to_universaltime', 'make_tuple', 'md5', + 'md5_final', 'md5_update', 'memory', 'module_loaded', 'monitor', + 'monitor_node', 'node', 'nodes', 'open_port', 'phash', 'phash2', + 'pid_to_list', 'port_close', 'port_command', 'port_connect', + 'port_control', 'port_call', 'port_info', 'port_to_list', + 'process_display', 'process_flag', 'process_info', 'purge_module', + 'put', 'read_timer', 'ref_to_list', 'register', 'resume_process', + 'round', 'send', 'send_after', 'send_nosuspend', 'set_cookie', + 'setelement', 'size', 'spawn', 'spawn_link', 'spawn_monitor', + 'spawn_opt', 'split_binary', 'start_timer', 'statistics', + 'suspend_process', 'system_flag', 'system_info', 'system_monitor', + 'system_profile', 'term_to_binary', 'tl', 'trace', 'trace_delivered', + 'trace_info', 'trace_pattern', 'trunc', 'tuple_size', 'tuple_to_list', + 'universaltime_to_localtime', 'unlink', 'unregister', 'whereis' + ) + + operators = r'(\+\+?|--?|\*|/|<|>|/=|=:=|=/=|=<|>=|==?|<-|!|\?)' + word_operators = ( + 'and', 'andalso', 'band', 'bnot', 'bor', 'bsl', 'bsr', 'bxor', + 'div', 'not', 'or', 'orelse', 'rem', 'xor' + ) + + atom_re = r"(?:[a-z]\w*|'[^\n']*[^\\]')" + + variable_re = r'(?:[A-Z_]\w*)' + + escape_re = r'(?:\\(?:[bdefnrstv\'"\\/]|[0-7][0-7]?[0-7]?|\^[a-zA-Z]))' + + macro_re = r'(?:'+variable_re+r'|'+atom_re+r')' + + base_re = r'(?:[2-9]|[12][0-9]|3[0-6])' + + tokens = { + 'root': [ + (r'\s+', Text), + (r'%.*\n', Comment), + (words(keywords, suffix=r'\b'), Keyword), + (words(builtins, suffix=r'\b'), Name.Builtin), + (words(word_operators, suffix='\b'), Operator.Word), + (r'^-', Punctuation, 'directive'), + (operators, Operator), + (r'"', String, 'string'), + (r'<<', Name.Label), + (r'>>', Name.Label), + ('(' + atom_re + ')(:)', bygroups(Name.Namespace, Punctuation)), + ('(?:^|(?<=:))(' + atom_re + r')(\s*)(\()', + bygroups(Name.Function, Text, Punctuation)), + (r'[+-]?' + base_re + r'#[0-9a-zA-Z]+', Number.Integer), + (r'[+-]?\d+', Number.Integer), + (r'[+-]?\d+.\d+', Number.Float), + (r'[]\[:_@\".{}()|;,]', Punctuation), + (variable_re, Name.Variable), + (atom_re, Name), + (r'\?'+macro_re, Name.Constant), + (r'\$(?:'+escape_re+r'|\\[ %]|[^\\])', String.Char), + (r'#'+atom_re+r'(:?\.'+atom_re+r')?', Name.Label), + ], + 'string': [ + (escape_re, String.Escape), + (r'"', String, '#pop'), + (r'~[0-9.*]*[~#+bBcdefginpPswWxX]', String.Interpol), + (r'[^"\\~]+', String), + (r'~', String), + ], + 'directive': [ + (r'(define)(\s*)(\()('+macro_re+r')', + bygroups(Name.Entity, Text, Punctuation, Name.Constant), '#pop'), + (r'(record)(\s*)(\()('+macro_re+r')', + bygroups(Name.Entity, Text, Punctuation, Name.Label), '#pop'), + (atom_re, Name.Entity, '#pop'), + ], + } + + +class ErlangShellLexer(Lexer): + """ + Shell sessions in erl (for Erlang code). + + .. versionadded:: 1.1 + """ + name = 'Erlang erl session' + aliases = ['erl'] + filenames = ['*.erl-sh'] + mimetypes = ['text/x-erl-shellsession'] + + _prompt_re = re.compile(r'\d+>(?=\s|\Z)') + + def get_tokens_unprocessed(self, text): + erlexer = ErlangLexer(**self.options) + + curcode = '' + insertions = [] + for match in line_re.finditer(text): + line = match.group() + m = self._prompt_re.match(line) + if m is not None: + end = m.end() + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:end])])) + curcode += line[end:] + else: + if curcode: + for item in do_insertions(insertions, + erlexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + if line.startswith('*'): + yield match.start(), Generic.Traceback, line + else: + yield match.start(), Generic.Output, line + if curcode: + for item in do_insertions(insertions, + erlexer.get_tokens_unprocessed(curcode)): + yield item + + +def gen_elixir_string_rules(name, symbol, token): + states = {} + states['string_' + name] = [ + (r'[^#%s\\]+' % (symbol,), token), + include('escapes'), + (r'\\.', token), + (r'(%s)' % (symbol,), bygroups(token), "#pop"), + include('interpol') + ] + return states + + +def gen_elixir_sigstr_rules(term, token, interpol=True): + if interpol: + return [ + (r'[^#%s\\]+' % (term,), token), + include('escapes'), + (r'\\.', token), + (r'%s[a-zA-Z]*' % (term,), token, '#pop'), + include('interpol') + ] + else: + return [ + (r'[^%s\\]+' % (term,), token), + (r'\\.', token), + (r'%s[a-zA-Z]*' % (term,), token, '#pop'), + ] + + +class ElixirLexer(RegexLexer): + """ + For the `Elixir language `_. + + .. versionadded:: 1.5 + """ + + name = 'Elixir' + aliases = ['elixir', 'ex', 'exs'] + filenames = ['*.ex', '*.exs'] + mimetypes = ['text/x-elixir'] + + KEYWORD = ('fn', 'do', 'end', 'after', 'else', 'rescue', 'catch') + KEYWORD_OPERATOR = ('not', 'and', 'or', 'when', 'in') + BUILTIN = ( + 'case', 'cond', 'for', 'if', 'unless', 'try', 'receive', 'raise', + 'quote', 'unquote', 'unquote_splicing', 'throw', 'super' + ) + BUILTIN_DECLARATION = ( + 'def', 'defp', 'defmodule', 'defprotocol', 'defmacro', 'defmacrop', + 'defdelegate', 'defexception', 'defstruct', 'defimpl', 'defcallback' + ) + + BUILTIN_NAMESPACE = ('import', 'require', 'use', 'alias') + CONSTANT = ('nil', 'true', 'false') + + PSEUDO_VAR = ('_', '__MODULE__', '__DIR__', '__ENV__', '__CALLER__') + + OPERATORS3 = ( + '<<<', '>>>', '|||', '&&&', '^^^', '~~~', '===', '!==', + '~>>', '<~>', '|~>', '<|>', + ) + OPERATORS2 = ( + '==', '!=', '<=', '>=', '&&', '||', '<>', '++', '--', '|>', '=~', + '->', '<-', '|', '.', '=', '~>', '<~', + ) + OPERATORS1 = ('<', '>', '+', '-', '*', '/', '!', '^', '&') + + PUNCTUATION = ( + '\\\\', '<<', '>>', '=>', '(', ')', ':', ';', ',', '[', ']' + ) + + def get_tokens_unprocessed(self, text): + for index, token, value in RegexLexer.get_tokens_unprocessed(self, text): + if token is Name: + if value in self.KEYWORD: + yield index, Keyword, value + elif value in self.KEYWORD_OPERATOR: + yield index, Operator.Word, value + elif value in self.BUILTIN: + yield index, Keyword, value + elif value in self.BUILTIN_DECLARATION: + yield index, Keyword.Declaration, value + elif value in self.BUILTIN_NAMESPACE: + yield index, Keyword.Namespace, value + elif value in self.CONSTANT: + yield index, Name.Constant, value + elif value in self.PSEUDO_VAR: + yield index, Name.Builtin.Pseudo, value + else: + yield index, token, value + else: + yield index, token, value + + def gen_elixir_sigil_rules(): + # all valid sigil terminators (excluding heredocs) + terminators = [ + (r'\{', r'\}', 'cb'), + (r'\[', r'\]', 'sb'), + (r'\(', r'\)', 'pa'), + (r'\<', r'\>', 'ab'), + (r'/', r'/', 'slas'), + (r'\|', r'\|', 'pipe'), + ('"', '"', 'quot'), + ("'", "'", 'apos'), + ] + + # heredocs have slightly different rules + triquotes = [(r'"""', 'triquot'), (r"'''", 'triapos')] + + token = String.Other + states = {'sigils': []} + + for term, name in triquotes: + states['sigils'] += [ + (r'(~[a-z])(%s)' % (term,), bygroups(token, String.Heredoc), + (name + '-end', name + '-intp')), + (r'(~[A-Z])(%s)' % (term,), bygroups(token, String.Heredoc), + (name + '-end', name + '-no-intp')), + ] + + states[name + '-end'] = [(r'[a-zA-Z]*', token, '#pop')] + states[name + '-intp'] = [ + (r'^\s*' + term, String.Heredoc, '#pop'), + include('heredoc_interpol'), + ] + states[name + '-no-intp'] = [ + (r'^\s*' + term, String.Heredoc, '#pop'), + include('heredoc_no_interpol'), + ] + + for lterm, rterm, name in terminators: + states['sigils'] += [ + (r'~[a-z]' + lterm, token, name + '-intp'), + (r'~[A-Z]' + lterm, token, name + '-no-intp'), + ] + states[name + '-intp'] = gen_elixir_sigstr_rules(rterm, token) + states[name + '-no-intp'] = \ + gen_elixir_sigstr_rules(rterm, token, interpol=False) + + return states + + op3_re = "|".join(re.escape(s) for s in OPERATORS3) + op2_re = "|".join(re.escape(s) for s in OPERATORS2) + op1_re = "|".join(re.escape(s) for s in OPERATORS1) + ops_re = r'(?:%s|%s|%s)' % (op3_re, op2_re, op1_re) + punctuation_re = "|".join(re.escape(s) for s in PUNCTUATION) + alnum = '[A-Za-z_0-9]' + name_re = r'(?:\.\.\.|[a-z_]%s*[!\?]?)' % alnum + modname_re = r'[A-Z]%(alnum)s*(?:\.[A-Z]%(alnum)s*)*' % {'alnum': alnum} + complex_name_re = r'(?:%s|%s|%s)' % (name_re, modname_re, ops_re) + special_atom_re = r'(?:\.\.\.|<<>>|%{}|%|{})' + + long_hex_char_re = r'(\\x{)([\da-fA-F]+)(})' + hex_char_re = r'(\\x[\da-fA-F]{1,2})' + escape_char_re = r'(\\[abdefnrstv])' + + tokens = { + 'root': [ + (r'\s+', Text), + (r'#.*$', Comment.Single), + + # Various kinds of characters + (r'(\?)' + long_hex_char_re, + bygroups(String.Char, + String.Escape, Number.Hex, String.Escape)), + (r'(\?)' + hex_char_re, + bygroups(String.Char, String.Escape)), + (r'(\?)' + escape_char_re, + bygroups(String.Char, String.Escape)), + (r'\?\\?.', String.Char), + + # '::' has to go before atoms + (r':::', String.Symbol), + (r'::', Operator), + + # atoms + (r':' + special_atom_re, String.Symbol), + (r':' + complex_name_re, String.Symbol), + (r':"', String.Symbol, 'string_double_atom'), + (r":'", String.Symbol, 'string_single_atom'), + + # [keywords: ...] + (r'(%s|%s)(:)(?=\s|\n)' % (special_atom_re, complex_name_re), + bygroups(String.Symbol, Punctuation)), + + # @attributes + (r'@' + name_re, Name.Attribute), + + # identifiers + (name_re, Name), + (r'(%%?)(%s)' % (modname_re,), bygroups(Punctuation, Name.Class)), + + # operators and punctuation + (op3_re, Operator), + (op2_re, Operator), + (punctuation_re, Punctuation), + (r'&\d', Name.Entity), # anon func arguments + (op1_re, Operator), + + # numbers + (r'0b[01]+', Number.Bin), + (r'0o[0-7]+', Number.Oct), + (r'0x[\da-fA-F]+', Number.Hex), + (r'\d(_?\d)*\.\d(_?\d)*([eE][-+]?\d(_?\d)*)?', Number.Float), + (r'\d(_?\d)*', Number.Integer), + + # strings and heredocs + (r'"""\s*', String.Heredoc, 'heredoc_double'), + (r"'''\s*$", String.Heredoc, 'heredoc_single'), + (r'"', String.Double, 'string_double'), + (r"'", String.Single, 'string_single'), + + include('sigils'), + + (r'%{', Punctuation, 'map_key'), + (r'{', Punctuation, 'tuple'), + ], + 'heredoc_double': [ + (r'^\s*"""', String.Heredoc, '#pop'), + include('heredoc_interpol'), + ], + 'heredoc_single': [ + (r"^\s*'''", String.Heredoc, '#pop'), + include('heredoc_interpol'), + ], + 'heredoc_interpol': [ + (r'[^#\\\n]+', String.Heredoc), + include('escapes'), + (r'\\.', String.Heredoc), + (r'\n+', String.Heredoc), + include('interpol'), + ], + 'heredoc_no_interpol': [ + (r'[^\\\n]+', String.Heredoc), + (r'\\.', String.Heredoc), + (r'\n+', String.Heredoc), + ], + 'escapes': [ + (long_hex_char_re, + bygroups(String.Escape, Number.Hex, String.Escape)), + (hex_char_re, String.Escape), + (escape_char_re, String.Escape), + ], + 'interpol': [ + (r'#{', String.Interpol, 'interpol_string'), + ], + 'interpol_string': [ + (r'}', String.Interpol, "#pop"), + include('root') + ], + 'map_key': [ + include('root'), + (r':', Punctuation, 'map_val'), + (r'=>', Punctuation, 'map_val'), + (r'}', Punctuation, '#pop'), + ], + 'map_val': [ + include('root'), + (r',', Punctuation, '#pop'), + (r'(?=})', Punctuation, '#pop'), + ], + 'tuple': [ + include('root'), + (r'}', Punctuation, '#pop'), + ], + } + tokens.update(gen_elixir_string_rules('double', '"', String.Double)) + tokens.update(gen_elixir_string_rules('single', "'", String.Single)) + tokens.update(gen_elixir_string_rules('double_atom', '"', String.Symbol)) + tokens.update(gen_elixir_string_rules('single_atom', "'", String.Symbol)) + tokens.update(gen_elixir_sigil_rules()) + + +class ElixirConsoleLexer(Lexer): + """ + For Elixir interactive console (iex) output like: + + .. sourcecode:: iex + + iex> [head | tail] = [1,2,3] + [1,2,3] + iex> head + 1 + iex> tail + [2,3] + iex> [head | tail] + [1,2,3] + iex> length [head | tail] + 3 + + .. versionadded:: 1.5 + """ + + name = 'Elixir iex session' + aliases = ['iex'] + mimetypes = ['text/x-elixir-shellsession'] + + _prompt_re = re.compile('(iex|\.{3})(\(\d+\))?> ') + + def get_tokens_unprocessed(self, text): + exlexer = ElixirLexer(**self.options) + + curcode = '' + in_error = False + insertions = [] + for match in line_re.finditer(text): + line = match.group() + if line.startswith(u'** '): + in_error = True + insertions.append((len(curcode), + [(0, Generic.Error, line[:-1])])) + curcode += line[-1:] + else: + m = self._prompt_re.match(line) + if m is not None: + in_error = False + end = m.end() + insertions.append((len(curcode), + [(0, Generic.Prompt, line[:end])])) + curcode += line[end:] + else: + if curcode: + for item in do_insertions( + insertions, exlexer.get_tokens_unprocessed(curcode)): + yield item + curcode = '' + insertions = [] + token = Generic.Error if in_error else Generic.Output + yield match.start(), token, line + if curcode: + for item in do_insertions( + insertions, exlexer.get_tokens_unprocessed(curcode)): + yield item diff --git a/pygments/lexers/misc/ml.py b/pygments/lexers/misc/ml.py new file mode 100644 index 00000000..661b693a --- /dev/null +++ b/pygments/lexers/misc/ml.py @@ -0,0 +1,768 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.ml + ~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for ML family languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Error + +__all__ = ['SMLLexer', 'OcamlLexer', 'OpaLexer'] + + +class SMLLexer(RegexLexer): + """ + For the Standard ML language. + + .. versionadded:: 1.5 + """ + + name = 'Standard ML' + aliases = ['sml'] + filenames = ['*.sml', '*.sig', '*.fun'] + mimetypes = ['text/x-standardml', 'application/x-standardml'] + + alphanumid_reserved = set(( + # Core + 'abstype', 'and', 'andalso', 'as', 'case', 'datatype', 'do', 'else', + 'end', 'exception', 'fn', 'fun', 'handle', 'if', 'in', 'infix', + 'infixr', 'let', 'local', 'nonfix', 'of', 'op', 'open', 'orelse', + 'raise', 'rec', 'then', 'type', 'val', 'with', 'withtype', 'while', + # Modules + 'eqtype', 'functor', 'include', 'sharing', 'sig', 'signature', + 'struct', 'structure', 'where', + )) + + symbolicid_reserved = set(( + # Core + ':', '\|', '=', '=>', '->', '#', + # Modules + ':>', + )) + + nonid_reserved = set(('(', ')', '[', ']', '{', '}', ',', ';', '...', '_')) + + alphanumid_re = r"[a-zA-Z][\w']*" + symbolicid_re = r"[!%&$#+\-/:<=>?@\\~`^|*]+" + + # A character constant is a sequence of the form #s, where s is a string + # constant denoting a string of size one character. This setup just parses + # the entire string as either a String.Double or a String.Char (depending + # on the argument), even if the String.Char is an erronous + # multiple-character string. + def stringy(whatkind): + return [ + (r'[^"\\]', whatkind), + (r'\\[\\\"abtnvfr]', String.Escape), + # Control-character notation is used for codes < 32, + # where \^@ == \000 + (r'\\\^[\x40-\x5e]', String.Escape), + # Docs say 'decimal digits' + (r'\\[0-9]{3}', String.Escape), + (r'\\u[0-9a-fA-F]{4}', String.Escape), + (r'\\\s+\\', String.Interpol), + (r'"', whatkind, '#pop'), + ] + + # Callbacks for distinguishing tokens and reserved words + def long_id_callback(self, match): + if match.group(1) in self.alphanumid_reserved: + token = Error + else: + token = Name.Namespace + yield match.start(1), token, match.group(1) + yield match.start(2), Punctuation, match.group(2) + + def end_id_callback(self, match): + if match.group(1) in self.alphanumid_reserved: + token = Error + elif match.group(1) in self.symbolicid_reserved: + token = Error + else: + token = Name + yield match.start(1), token, match.group(1) + + def id_callback(self, match): + str = match.group(1) + if str in self.alphanumid_reserved: + token = Keyword.Reserved + elif str in self.symbolicid_reserved: + token = Punctuation + else: + token = Name + yield match.start(1), token, str + + tokens = { + # Whitespace and comments are (almost) everywhere + 'whitespace': [ + (r'\s+', Text), + (r'\(\*', Comment.Multiline, 'comment'), + ], + + 'delimiters': [ + # This lexer treats these delimiters specially: + # Delimiters define scopes, and the scope is how the meaning of + # the `|' is resolved - is it a case/handle expression, or function + # definition by cases? (This is not how the Definition works, but + # it's how MLton behaves, see http://mlton.org/SMLNJDeviations) + (r'\(|\[|{', Punctuation, 'main'), + (r'\)|\]|}', Punctuation, '#pop'), + (r'\b(let|if|local)\b(?!\')', Keyword.Reserved, ('main', 'main')), + (r'\b(struct|sig|while)\b(?!\')', Keyword.Reserved, 'main'), + (r'\b(do|else|end|in|then)\b(?!\')', Keyword.Reserved, '#pop'), + ], + + 'core': [ + # Punctuation that doesn't overlap symbolic identifiers + (r'(%s)' % '|'.join(re.escape(z) for z in nonid_reserved), + Punctuation), + + # Special constants: strings, floats, numbers in decimal and hex + (r'#"', String.Char, 'char'), + (r'"', String.Double, 'string'), + (r'~?0x[0-9a-fA-F]+', Number.Hex), + (r'0wx[0-9a-fA-F]+', Number.Hex), + (r'0w\d+', Number.Integer), + (r'~?\d+\.\d+[eE]~?\d+', Number.Float), + (r'~?\d+\.\d+', Number.Float), + (r'~?\d+[eE]~?\d+', Number.Float), + (r'~?\d+', Number.Integer), + + # Labels + (r'#\s*[1-9][0-9]*', Name.Label), + (r'#\s*(%s)' % alphanumid_re, Name.Label), + (r'#\s+(%s)' % symbolicid_re, Name.Label), + # Some reserved words trigger a special, local lexer state change + (r'\b(datatype|abstype)\b(?!\')', Keyword.Reserved, 'dname'), + (r'(?=\b(exception)\b(?!\'))', Text, ('ename')), + (r'\b(functor|include|open|signature|structure)\b(?!\')', + Keyword.Reserved, 'sname'), + (r'\b(type|eqtype)\b(?!\')', Keyword.Reserved, 'tname'), + + # Regular identifiers, long and otherwise + (r'\'[\w\']*', Name.Decorator), + (r'(%s)(\.)' % alphanumid_re, long_id_callback, "dotted"), + (r'(%s)' % alphanumid_re, id_callback), + (r'(%s)' % symbolicid_re, id_callback), + ], + 'dotted': [ + (r'(%s)(\.)' % alphanumid_re, long_id_callback), + (r'(%s)' % alphanumid_re, end_id_callback, "#pop"), + (r'(%s)' % symbolicid_re, end_id_callback, "#pop"), + (r'\s+', Error), + (r'\S+', Error), + ], + + + # Main parser (prevents errors in files that have scoping errors) + 'root': [ + default('main') + ], + + # In this scope, I expect '|' to not be followed by a function name, + # and I expect 'and' to be followed by a binding site + 'main': [ + include('whitespace'), + + # Special behavior of val/and/fun + (r'\b(val|and)\b(?!\')', Keyword.Reserved, 'vname'), + (r'\b(fun)\b(?!\')', Keyword.Reserved, + ('#pop', 'main-fun', 'fname')), + + include('delimiters'), + include('core'), + (r'\S+', Error), + ], + + # In this scope, I expect '|' and 'and' to be followed by a function + 'main-fun': [ + include('whitespace'), + + (r'\s', Text), + (r'\(\*', Comment.Multiline, 'comment'), + + # Special behavior of val/and/fun + (r'\b(fun|and)\b(?!\')', Keyword.Reserved, 'fname'), + (r'\b(val)\b(?!\')', Keyword.Reserved, + ('#pop', 'main', 'vname')), + + # Special behavior of '|' and '|'-manipulating keywords + (r'\|', Punctuation, 'fname'), + (r'\b(case|handle)\b(?!\')', Keyword.Reserved, + ('#pop', 'main')), + + include('delimiters'), + include('core'), + (r'\S+', Error), + ], + + # Character and string parsers + 'char': stringy(String.Char), + 'string': stringy(String.Double), + + 'breakout': [ + (r'(?=\b(%s)\b(?!\'))' % '|'.join(alphanumid_reserved), Text, '#pop'), + ], + + # Dealing with what comes after module system keywords + 'sname': [ + include('whitespace'), + include('breakout'), + + (r'(%s)' % alphanumid_re, Name.Namespace), + default('#pop'), + ], + + # Dealing with what comes after the 'fun' (or 'and' or '|') keyword + 'fname': [ + include('whitespace'), + (r'\'[0-9a-zA-Z_\']*', Name.Decorator), + (r'\(', Punctuation, 'tyvarseq'), + + (r'(%s)' % alphanumid_re, Name.Function, '#pop'), + (r'(%s)' % symbolicid_re, Name.Function, '#pop'), + + # Ignore interesting function declarations like "fun (x + y) = ..." + default('#pop'), + ], + + # Dealing with what comes after the 'val' (or 'and') keyword + 'vname': [ + include('whitespace'), + (r'\'[0-9a-zA-Z_\']*', Name.Decorator), + (r'\(', Punctuation, 'tyvarseq'), + + (r'(%s)(\s*)(=(?!%s))' % (alphanumid_re, symbolicid_re), + bygroups(Name.Variable, Text, Punctuation), '#pop'), + (r'(%s)(\s*)(=(?!%s))' % (symbolicid_re, symbolicid_re), + bygroups(Name.Variable, Text, Punctuation), '#pop'), + (r'(%s)' % alphanumid_re, Name.Variable, '#pop'), + (r'(%s)' % symbolicid_re, Name.Variable, '#pop'), + + # Ignore interesting patterns like 'val (x, y)' + default('#pop'), + ], + + # Dealing with what comes after the 'type' (or 'and') keyword + 'tname': [ + include('whitespace'), + include('breakout'), + + (r'\'[0-9a-zA-Z_\']*', Name.Decorator), + (r'\(', Punctuation, 'tyvarseq'), + (r'=(?!%s)' % symbolicid_re, Punctuation, ('#pop', 'typbind')), + + (r'(%s)' % alphanumid_re, Keyword.Type), + (r'(%s)' % symbolicid_re, Keyword.Type), + (r'\S+', Error, '#pop'), + ], + + # A type binding includes most identifiers + 'typbind': [ + include('whitespace'), + + (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')), + + include('breakout'), + include('core'), + (r'\S+', Error, '#pop'), + ], + + # Dealing with what comes after the 'datatype' (or 'and') keyword + 'dname': [ + include('whitespace'), + include('breakout'), + + (r'\'[0-9a-zA-Z_\']*', Name.Decorator), + (r'\(', Punctuation, 'tyvarseq'), + (r'(=)(\s*)(datatype)', + bygroups(Punctuation, Text, Keyword.Reserved), '#pop'), + (r'=(?!%s)' % symbolicid_re, Punctuation, + ('#pop', 'datbind', 'datcon')), + + (r'(%s)' % alphanumid_re, Keyword.Type), + (r'(%s)' % symbolicid_re, Keyword.Type), + (r'\S+', Error, '#pop'), + ], + + # common case - A | B | C of int + 'datbind': [ + include('whitespace'), + + (r'\b(and)\b(?!\')', Keyword.Reserved, ('#pop', 'dname')), + (r'\b(withtype)\b(?!\')', Keyword.Reserved, ('#pop', 'tname')), + (r'\b(of)\b(?!\')', Keyword.Reserved), + + (r'(\|)(\s*)(%s)' % alphanumid_re, + bygroups(Punctuation, Text, Name.Class)), + (r'(\|)(\s+)(%s)' % symbolicid_re, + bygroups(Punctuation, Text, Name.Class)), + + include('breakout'), + include('core'), + (r'\S+', Error), + ], + + # Dealing with what comes after an exception + 'ename': [ + include('whitespace'), + + (r'(exception|and)\b(\s+)(%s)' % alphanumid_re, + bygroups(Keyword.Reserved, Text, Name.Class)), + (r'(exception|and)\b(\s*)(%s)' % symbolicid_re, + bygroups(Keyword.Reserved, Text, Name.Class)), + (r'\b(of)\b(?!\')', Keyword.Reserved), + + include('breakout'), + include('core'), + (r'\S+', Error), + ], + + 'datcon': [ + include('whitespace'), + (r'(%s)' % alphanumid_re, Name.Class, '#pop'), + (r'(%s)' % symbolicid_re, Name.Class, '#pop'), + (r'\S+', Error, '#pop'), + ], + + # Series of type variables + 'tyvarseq': [ + (r'\s', Text), + (r'\(\*', Comment.Multiline, 'comment'), + + (r'\'[0-9a-zA-Z_\']*', Name.Decorator), + (alphanumid_re, Name), + (r',', Punctuation), + (r'\)', Punctuation, '#pop'), + (symbolicid_re, Name), + ], + + 'comment': [ + (r'[^(*)]', Comment.Multiline), + (r'\(\*', Comment.Multiline, '#push'), + (r'\*\)', Comment.Multiline, '#pop'), + (r'[(*)]', Comment.Multiline), + ], + } + + +class OcamlLexer(RegexLexer): + """ + For the OCaml language. + + .. versionadded:: 0.7 + """ + + name = 'OCaml' + aliases = ['ocaml'] + filenames = ['*.ml', '*.mli', '*.mll', '*.mly'] + mimetypes = ['text/x-ocaml'] + + keywords = ( + 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', + 'downto', 'else', 'end', 'exception', 'external', 'false', + 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', + 'inherit', 'initializer', 'lazy', 'let', 'match', 'method', + 'module', 'mutable', 'new', 'object', 'of', 'open', 'private', + 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try', + 'type', 'value', 'val', 'virtual', 'when', 'while', 'with', + ) + keyopts = ( + '!=', '#', '&', '&&', '\(', '\)', '\*', '\+', ',', '-', + '-\.', '->', '\.', '\.\.', ':', '::', ':=', ':>', ';', ';;', '<', + '<-', '=', '>', '>]', '>}', '\?', '\?\?', '\[', '\[<', '\[>', '\[\|', + ']', '_', '`', '{', '{<', '\|', '\|]', '}', '~' + ) + + operators = r'[!$%&*+\./:<=>?@^|~-]' + word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or') + prefix_syms = r'[!?~]' + infix_syms = r'[=<>@^|&+\*/$%-]' + primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array') + + tokens = { + 'escape-sequence': [ + (r'\\[\\\"\'ntbr]', String.Escape), + (r'\\[0-9]{3}', String.Escape), + (r'\\x[0-9a-fA-F]{2}', String.Escape), + ], + 'root': [ + (r'\s+', Text), + (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo), + (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'), + (r'\b([A-Z][\w\']*)', Name.Class), + (r'\(\*(?![)])', Comment, 'comment'), + (r'\b(%s)\b' % '|'.join(keywords), Keyword), + (r'(%s)' % '|'.join(keyopts[::-1]), Operator), + (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), + (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word), + (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), + + (r"[^\W\d][\w']*", Name), + + (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), + (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), + (r'0[oO][0-7][0-7_]*', Number.Oct), + (r'0[bB][01][01_]*', Number.Bin), + (r'\d[\d_]*', Number.Integer), + + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", + String.Char), + (r"'.'", String.Char), + (r"'", Keyword), # a stray quote is another syntax element + + (r'"', String.Double, 'string'), + + (r'[~?][a-z][\w\']*:', Name.Variable), + ], + 'comment': [ + (r'[^(*)]+', Comment), + (r'\(\*', Comment, '#push'), + (r'\*\)', Comment, '#pop'), + (r'[(*)]', Comment), + ], + 'string': [ + (r'[^\\"]+', String.Double), + include('escape-sequence'), + (r'\\\n', String.Double), + (r'"', String.Double, '#pop'), + ], + 'dotted': [ + (r'\s+', Text), + (r'\.', Punctuation), + (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), + (r'[A-Z][\w\']*', Name.Class, '#pop'), + (r'[a-z_][\w\']*', Name, '#pop'), + ], + } + + +class OpaLexer(RegexLexer): + """ + Lexer for the Opa language (http://opalang.org). + + .. versionadded:: 1.5 + """ + + name = 'Opa' + aliases = ['opa'] + filenames = ['*.opa'] + mimetypes = ['text/x-opa'] + + # most of these aren't strictly keywords + # but if you color only real keywords, you might just + # as well not color anything + keywords = ( + 'and', 'as', 'begin', 'case', 'client', 'css', 'database', 'db', 'do', + 'else', 'end', 'external', 'forall', 'function', 'if', 'import', + 'match', 'module', 'or', 'package', 'parser', 'rec', 'server', 'then', + 'type', 'val', 'with', 'xml_parser', + ) + + # matches both stuff and `stuff` + ident_re = r'(([a-zA-Z_]\w*)|(`[^`]*`))' + + op_re = r'[.=\-<>,@~%/+?*&^!]' + punc_re = r'[()\[\],;|]' # '{' and '}' are treated elsewhere + # because they are also used for inserts + + tokens = { + # copied from the caml lexer, should be adapted + 'escape-sequence': [ + (r'\\[\\\"\'ntr}]', String.Escape), + (r'\\[0-9]{3}', String.Escape), + (r'\\x[0-9a-fA-F]{2}', String.Escape), + ], + + # factorizing these rules, because they are inserted many times + 'comments': [ + (r'/\*', Comment, 'nested-comment'), + (r'//.*?$', Comment), + ], + 'comments-and-spaces': [ + include('comments'), + (r'\s+', Text), + ], + + 'root': [ + include('comments-and-spaces'), + # keywords + (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword), + # directives + # we could parse the actual set of directives instead of anything + # starting with @, but this is troublesome + # because it needs to be adjusted all the time + # and assuming we parse only sources that compile, it is useless + (r'@' + ident_re + r'\b', Name.Builtin.Pseudo), + + # number literals + (r'-?.[\d]+([eE][+\-]?\d+)', Number.Float), + (r'-?\d+.\d*([eE][+\-]?\d+)', Number.Float), + (r'-?\d+[eE][+\-]?\d+', Number.Float), + (r'0[xX][\da-fA-F]+', Number.Hex), + (r'0[oO][0-7]+', Number.Oct), + (r'0[bB][01]+', Number.Bin), + (r'\d+', Number.Integer), + # color literals + (r'#[\da-fA-F]{3,6}', Number.Integer), + + # string literals + (r'"', String.Double, 'string'), + # char literal, should be checked because this is the regexp from + # the caml lexer + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2})|.)'", + String.Char), + + # this is meant to deal with embedded exprs in strings + # every time we find a '}' we pop a state so that if we were + # inside a string, we are back in the string state + # as a consequence, we must also push a state every time we find a + # '{' or else we will have errors when parsing {} for instance + (r'{', Operator, '#push'), + (r'}', Operator, '#pop'), + + # html literals + # this is a much more strict that the actual parser, + # since a])', String.Single, 'html-open-tag'), + + # db path + # matching the '[_]' in '/a[_]' because it is a part + # of the syntax of the db path definition + # unfortunately, i don't know how to match the ']' in + # /a[1], so this is somewhat inconsistent + (r'[@?!]?(/\w+)+(\[_\])?', Name.Variable), + # putting the same color on <- as on db path, since + # it can be used only to mean Db.write + (r'<-(?!'+op_re+r')', Name.Variable), + + # 'modules' + # although modules are not distinguished by their names as in caml + # the standard library seems to follow the convention that modules + # only area capitalized + (r'\b([A-Z]\w*)(?=\.)', Name.Namespace), + + # operators + # = has a special role because this is the only + # way to syntactic distinguish binding constructions + # unfortunately, this colors the equal in {x=2} too + (r'=(?!'+op_re+r')', Keyword), + (r'(%s)+' % op_re, Operator), + (r'(%s)+' % punc_re, Operator), + + # coercions + (r':', Operator, 'type'), + # type variables + # we need this rule because we don't parse specially type + # definitions so in "type t('a) = ...", "'a" is parsed by 'root' + ("'"+ident_re, Keyword.Type), + + # id literal, #something, or #{expr} + (r'#'+ident_re, String.Single), + (r'#(?={)', String.Single), + + # identifiers + # this avoids to color '2' in 'a2' as an integer + (ident_re, Text), + + # default, not sure if that is needed or not + # (r'.', Text), + ], + + # it is quite painful to have to parse types to know where they end + # this is the general rule for a type + # a type is either: + # * -> ty + # * type-with-slash + # * type-with-slash -> ty + # * type-with-slash (, type-with-slash)+ -> ty + # + # the code is pretty funky in here, but this code would roughly + # translate in caml to: + # let rec type stream = + # match stream with + # | [< "->"; stream >] -> type stream + # | [< ""; stream >] -> + # type_with_slash stream + # type_lhs_1 stream; + # and type_1 stream = ... + 'type': [ + include('comments-and-spaces'), + (r'->', Keyword.Type), + default(('#pop', 'type-lhs-1', 'type-with-slash')), + ], + + # parses all the atomic or closed constructions in the syntax of type + # expressions: record types, tuple types, type constructors, basic type + # and type variables + 'type-1': [ + include('comments-and-spaces'), + (r'\(', Keyword.Type, ('#pop', 'type-tuple')), + (r'~?{', Keyword.Type, ('#pop', 'type-record')), + (ident_re+r'\(', Keyword.Type, ('#pop', 'type-tuple')), + (ident_re, Keyword.Type, '#pop'), + ("'"+ident_re, Keyword.Type), + # this case is not in the syntax but sometimes + # we think we are parsing types when in fact we are parsing + # some css, so we just pop the states until we get back into + # the root state + default('#pop'), + ], + + # type-with-slash is either: + # * type-1 + # * type-1 (/ type-1)+ + 'type-with-slash': [ + include('comments-and-spaces'), + default(('#pop', 'slash-type-1', 'type-1')), + ], + 'slash-type-1': [ + include('comments-and-spaces'), + ('/', Keyword.Type, ('#pop', 'type-1')), + # same remark as above + default('#pop'), + ], + + # we go in this state after having parsed a type-with-slash + # while trying to parse a type + # and at this point we must determine if we are parsing an arrow + # type (in which case we must continue parsing) or not (in which + # case we stop) + 'type-lhs-1': [ + include('comments-and-spaces'), + (r'->', Keyword.Type, ('#pop', 'type')), + (r'(?=,)', Keyword.Type, ('#pop', 'type-arrow')), + default('#pop'), + ], + 'type-arrow': [ + include('comments-and-spaces'), + # the look ahead here allows to parse f(x : int, y : float -> truc) + # correctly + (r',(?=[^:]*?->)', Keyword.Type, 'type-with-slash'), + (r'->', Keyword.Type, ('#pop', 'type')), + # same remark as above + default('#pop'), + ], + + # no need to do precise parsing for tuples and records + # because they are closed constructions, so we can simply + # find the closing delimiter + # note that this function would be not work if the source + # contained identifiers like `{)` (although it could be patched + # to support it) + 'type-tuple': [ + include('comments-and-spaces'), + (r'[^\(\)/*]+', Keyword.Type), + (r'[/*]', Keyword.Type), + (r'\(', Keyword.Type, '#push'), + (r'\)', Keyword.Type, '#pop'), + ], + 'type-record': [ + include('comments-and-spaces'), + (r'[^{}/*]+', Keyword.Type), + (r'[/*]', Keyword.Type), + (r'{', Keyword.Type, '#push'), + (r'}', Keyword.Type, '#pop'), + ], + + # 'type-tuple': [ + # include('comments-and-spaces'), + # (r'\)', Keyword.Type, '#pop'), + # default(('#pop', 'type-tuple-1', 'type-1')), + # ], + # 'type-tuple-1': [ + # include('comments-and-spaces'), + # (r',?\s*\)', Keyword.Type, '#pop'), # ,) is a valid end of tuple, in (1,) + # (r',', Keyword.Type, 'type-1'), + # ], + # 'type-record':[ + # include('comments-and-spaces'), + # (r'}', Keyword.Type, '#pop'), + # (r'~?(?:\w+|`[^`]*`)', Keyword.Type, 'type-record-field-expr'), + # ], + # 'type-record-field-expr': [ + # + # ], + + 'nested-comment': [ + (r'[^/*]+', Comment), + (r'/\*', Comment, '#push'), + (r'\*/', Comment, '#pop'), + (r'[/*]', Comment), + ], + + # the copy pasting between string and single-string + # is kinda sad. Is there a way to avoid that?? + 'string': [ + (r'[^\\"{]+', String.Double), + (r'"', String.Double, '#pop'), + (r'{', Operator, 'root'), + include('escape-sequence'), + ], + 'single-string': [ + (r'[^\\\'{]+', String.Double), + (r'\'', String.Double, '#pop'), + (r'{', Operator, 'root'), + include('escape-sequence'), + ], + + # all the html stuff + # can't really reuse some existing html parser + # because we must be able to parse embedded expressions + + # we are in this state after someone parsed the '<' that + # started the html literal + 'html-open-tag': [ + (r'[\w\-:]+', String.Single, ('#pop', 'html-attr')), + (r'>', String.Single, ('#pop', 'html-content')), + ], + + # we are in this state after someone parsed the ' is allowed + (r'[\w\-:]*>', String.Single, '#pop'), + ], + + # we are in this state after having parsed '', String.Single, '#pop'), + (r'>', String.Single, ('#pop', 'html-content')), + ], + + 'html-attr-value': [ + (r"'", String.Single, ('#pop', 'single-string')), + (r'"', String.Single, ('#pop', 'string')), + (r'#'+ident_re, String.Single, '#pop'), + (r'#(?={)', String.Single, ('#pop', 'root')), + (r'[^"\'{`=<>]+', String.Single, '#pop'), + (r'{', Operator, ('#pop', 'root')), # this is a tail call! + ], + + # we should probably deal with '\' escapes here + 'html-content': [ + (r'', Comment, '#pop'), + (r'[^\-]+|-', Comment), + ], + } diff --git a/pygments/lexers/misc/nix.py b/pygments/lexers/misc/nix.py new file mode 100644 index 00000000..c1799638 --- /dev/null +++ b/pygments/lexers/misc/nix.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.nix + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for the NixOS Nix language. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Literal + +__all__ = ['NixLexer'] + + +class NixLexer(RegexLexer): + """ + For the `Nix language `_. + + .. versionadded:: 2.0 + """ + + name = 'Nix' + aliases = ['nixos', 'nix'] + filenames = ['*.nix'] + mimetypes = ['text/x-nix'] + + flags = re.MULTILINE | re.UNICODE + + keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if', + 'else', 'then', '...'] + builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins', + 'map', 'removeAttrs', 'throw', 'toString', 'derivation'] + operators = ['++', '+', '?', '.', '!', '//', '==', + '!=', '&&', '||', '->', '='] + + punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"] + + tokens = { + 'root': [ + # comments starting with # + (r'#.*$', Comment.Single), + + # multiline comments + (r'/\*', Comment.Multiline, 'comment'), + + # whitespace + (r'\s+', Text), + + # keywords + ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword), + + # highlight the builtins + ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins), + Name.Builtin), + + (r'\b(true|false|null)\b', Name.Constant), + + # operators + ('(%s)' % '|'.join(re.escape(entry) for entry in operators), + Operator), + + # word operators + (r'\b(or|and)\b', Operator.Word), + + # punctuations + ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation), + + # integers + (r'[0-9]+', Number.Integer), + + # strings + (r'"', String.Double, 'doublequote'), + (r"''", String.Single, 'singlequote'), + + # paths + (r'[\w.+-]*(\/[\w.+-]+)+', Literal), + (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal), + + # urls + (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal), + + # names of variables + (r'[\w-]+\s*=', String.Symbol), + (r'[a-zA-Z_][\w\'-]*', Text), + + ], + 'comment': [ + (r'[^/\*]+', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[\*/]', Comment.Multiline), + ], + 'singlequote': [ + (r"'''", String.Escape), + (r"''\$\{", String.Escape), + (r"''\n", String.Escape), + (r"''\r", String.Escape), + (r"''\t", String.Escape), + (r"''", String.Single, '#pop'), + (r'\$\{', String.Interpol, 'antiquote'), + (r"[^']", String.Single), + ], + 'doublequote': [ + (r'\\', String.Escape), + (r'\\"', String.Escape), + (r'\\${', String.Escape), + (r'"', String.Double, '#pop'), + (r'\$\{', String.Interpol, 'antiquote'), + (r'[^"]', String.Double), + ], + 'antiquote': [ + (r"}", String.Interpol, '#pop'), + # TODO: we should probably escape also here ''${ \${ + (r"\$\{", String.Interpol, '#push'), + include('root'), + ], + } + + def analyse_text(text): + rv = 0.0 + # TODO: let/in + if re.search(r'import.+?<[^>]+>', text): + rv += 0.4 + if re.search(r'mkDerivation\s+(\(|\{|rec)', text): + rv += 0.4 + if re.search(r'with\s+[a-zA-Z\.]+;', text): + rv += 0.2 + if re.search(r'inherit\s+[a-zA-Z()\.];', text): + rv += 0.2 + if re.search(r'=\s+mkIf\s+', text): + rv += 0.4 + if re.search(r'\{[a-zA-Z,\s]+\}:', text): + rv += 0.1 + return rv diff --git a/pygments/lexers/qbasic.py b/pygments/lexers/qbasic.py deleted file mode 100644 index 80b80f9f..00000000 --- a/pygments/lexers/qbasic.py +++ /dev/null @@ -1,157 +0,0 @@ -# -*- coding: utf-8 -*- -""" - pygments.lexers.qbasic - ~~~~~~~~~~~~~~~~~~~~~~ - - Simple lexer for Microsoft QBasic source code. - - :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from pygments.lexer import RegexLexer, include, bygroups -from pygments.token import Text, Name, Comment, String, Keyword, Punctuation, \ - Number, Operator - -__all__ = ['QBasicLexer'] - - -class QBasicLexer(RegexLexer): - """ - For - `QBasic `_ - source code. - """ - - name = 'QBasic' - aliases = ['qbasic', 'basic'] - filenames = ['*.BAS', '*.bas'] - mimetypes = ['text/basic'] - - declarations = ['DATA', 'LET'] - - functions = [ - 'ABS', 'ASC', 'ATN', 'CDBL', 'CHR$', 'CINT', 'CLNG', - 'COMMAND$', 'COS', 'CSNG', 'CSRLIN', 'CVD', 'CVDMBF', 'CVI', - 'CVL', 'CVS', 'CVSMBF', 'DATE$', 'ENVIRON$', 'EOF', 'ERDEV', - 'ERDEV$', 'ERL', 'ERR', 'EXP', 'FILEATTR', 'FIX', 'FRE', - 'FREEFILE', 'HEX$', 'INKEY$', 'INP', 'INPUT$', 'INSTR', 'INT', - 'IOCTL$', 'LBOUND', 'LCASE$', 'LEFT$', 'LEN', 'LOC', 'LOF', - 'LOG', 'LPOS', 'LTRIM$', 'MID$', 'MKD$', 'MKDMBF$', 'MKI$', - 'MKL$', 'MKS$', 'MKSMBF$', 'OCT$', 'PEEK', 'PEN', 'PLAY', - 'PMAP', 'POINT', 'POS', 'RIGHT$', 'RND', 'RTRIM$', 'SADD', - 'SCREEN', 'SEEK', 'SETMEM', 'SGN', 'SIN', 'SPACE$', 'SPC', - 'SQR', 'STICK', 'STR$', 'STRIG', 'STRING$', 'TAB', 'TAN', - 'TIME$', 'TIMER', 'UBOUND', 'UCASE$', 'VAL', 'VARPTR', - 'VARPTR$', 'VARSEG' - ] - - metacommands = ['$DYNAMIC', '$INCLUDE', '$STATIC'] - - operators = ['AND', 'EQV', 'IMP', 'NOT', 'OR', 'XOR'] - - statements = [ - 'BEEP', 'BLOAD', 'BSAVE', 'CALL', 'CALL ABSOLUTE', - 'CALL INTERRUPT', 'CALLS', 'CHAIN', 'CHDIR', 'CIRCLE', 'CLEAR', - 'CLOSE', 'CLS', 'COLOR', 'COM', 'COMMON', 'CONST', 'DATA', - 'DATE$', 'DECLARE', 'DEF FN', 'DEF SEG', 'DEFDBL', 'DEFINT', - 'DEFLNG', 'DEFSNG', 'DEFSTR', 'DEF', 'DIM', 'DO', 'LOOP', - 'DRAW', 'END', 'ENVIRON', 'ERASE', 'ERROR', 'EXIT', 'FIELD', - 'FILES', 'FOR', 'NEXT', 'FUNCTION', 'GET', 'GOSUB', 'GOTO', - 'IF', 'THEN', 'INPUT', 'INPUT #', 'IOCTL', 'KEY', 'KEY', - 'KILL', 'LET', 'LINE', 'LINE INPUT', 'LINE INPUT #', 'LOCATE', - 'LOCK', 'UNLOCK', 'LPRINT', 'LSET', 'MID$', 'MKDIR', 'NAME', - 'ON COM', 'ON ERROR', 'ON KEY', 'ON PEN', 'ON PLAY', - 'ON STRIG', 'ON TIMER', 'ON UEVENT', 'ON', 'OPEN', 'OPEN COM', - 'OPTION BASE', 'OUT', 'PAINT', 'PALETTE', 'PCOPY', 'PEN', - 'PLAY', 'POKE', 'PRESET', 'PRINT', 'PRINT #', 'PRINT USING', - 'PSET', 'PUT', 'PUT', 'RANDOMIZE', 'READ', 'REDIM', 'REM', - 'RESET', 'RESTORE', 'RESUME', 'RETURN', 'RMDIR', 'RSET', 'RUN', - 'SCREEN', 'SEEK', 'SELECT CASE', 'SHARED', 'SHELL', 'SLEEP', - 'SOUND', 'STATIC', 'STOP', 'STRIG', 'SUB', 'SWAP', 'SYSTEM', - 'TIME$', 'TIMER', 'TROFF', 'TRON', 'TYPE', 'UEVENT', 'UNLOCK', - 'VIEW', 'WAIT', 'WHILE', 'WEND', 'WIDTH', 'WINDOW', 'WRITE' - ] - - keywords = [ - 'ACCESS', 'ALIAS', 'ANY', 'APPEND', 'AS', 'BASE', 'BINARY', - 'BYVAL', 'CASE', 'CDECL', 'DOUBLE', 'ELSE', 'ELSEIF', 'ENDIF', - 'INTEGER', 'IS', 'LIST', 'LOCAL', 'LONG', 'LOOP', 'MOD', - 'NEXT', 'OFF', 'ON', 'OUTPUT', 'RANDOM', 'SIGNAL', 'SINGLE', - 'STEP', 'STRING', 'THEN', 'TO', 'UNTIL', 'USING', 'WEND' - ] - - tokens = { - 'root': [ - (r'\n+', Text), - (r'\s+', Text.Whitespace), - (r'^(\s*)(\d*)(\s*)(REM .*)$', - bygroups(Text.Whitespace, Name.Label, Text.Whitespace, - Comment.Single)), - (r'^(\s*)(\d+)(\s*)', - bygroups(Text.Whitespace, Name.Label, Text.Whitespace)), - (r'(?=[\s]*)(\w+)(?=[\s]*=)', Name.Variable.Global), - (r'(?=[^"]*)\'.*$', Comment.Single), - (r'"[^\n\"]*"', String.Double), - (r'(END)(\s+)(FUNCTION|IF|SELECT|SUB)', - bygroups(Keyword.Reserved, Text.Whitespace, Keyword.Reserved)), - (r'(DECLARE)(\s+)([A-Z]+)(\s+)(\S+)', - bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, - Text.Whitespace, Name)), - (r'(DIM)(\s+)(SHARED)(\s+)([^\s\(]+)', - bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable, - Text.Whitespace, Name.Variable.Global)), - (r'(DIM)(\s+)([^\s\(]+)', - bygroups(Keyword.Declaration, Text.Whitespace, Name.Variable.Global)), - (r'^(\s*)([a-zA-Z_]+)(\s*)(\=)', - bygroups(Text.Whitespace, Name.Variable.Global, Text.Whitespace, - Operator)), - (r'(GOTO|GOSUB)(\s+)(\w+\:?)', - bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)), - (r'(SUB)(\s+)(\w+\:?)', - bygroups(Keyword.Reserved, Text.Whitespace, Name.Label)), - include('declarations'), - include('functions'), - include('metacommands'), - include('operators'), - include('statements'), - include('keywords'), - (r'[a-zA-Z_]\w*[\$@#&!]', Name.Variable.Global), - (r'[a-zA-Z_]\w*\:', Name.Label), - (r'\-?\d*\.\d+[@|#]?', Number.Float), - (r'\-?\d+[@|#]', Number.Float), - (r'\-?\d+#?', Number.Integer.Long), - (r'\-?\d+#?', Number.Integer), - (r'!=|==|:=|\.=|<<|>>|[-~+/\\*%=<>&^|?:!.]', Operator), - (r'[\[\]{}(),;]', Punctuation), - (r'[\w]+', Name.Variable.Global), - ], - # can't use regular \b because of X$() - 'declarations': [ - (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, declarations)), - Keyword.Declaration), - ], - 'functions': [ - (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, functions)), - Keyword.Reserved), - ], - 'metacommands': [ - (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, metacommands)), - Keyword.Constant), - ], - 'operators': [ - (r'\b(%s)(?=\(|\b)' % '|'.join(map(re.escape, operators)), Operator.Word), - ], - 'statements': [ - (r'\b(%s)\b' % '|'.join(map(re.escape, statements)), - Keyword.Reserved), - ], - 'keywords': [ - (r'\b(%s)\b' % '|'.join(keywords), Keyword), - ], - } - - def analyse_text(text): - return 0.2 diff --git a/pygments/lexers/theorem.py b/pygments/lexers/theorem.py new file mode 100644 index 00000000..293af0ec --- /dev/null +++ b/pygments/lexers/theorem.py @@ -0,0 +1,156 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.functl + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for theorem-proving languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, default, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['CoqLexer'] + + +class CoqLexer(RegexLexer): + """ + For the `Coq `_ theorem prover. + + .. versionadded:: 1.5 + """ + + name = 'Coq' + aliases = ['coq'] + filenames = ['*.v'] + mimetypes = ['text/x-coq'] + + keywords1 = ( + # Vernacular commands + 'Section', 'Module', 'End', 'Require', 'Import', 'Export', 'Variable', + 'Variables', 'Parameter', 'Parameters', 'Axiom', 'Hypothesis', + 'Hypotheses', 'Notation', 'Local', 'Tactic', 'Reserved', 'Scope', + 'Open', 'Close', 'Bind', 'Delimit', 'Definition', 'Let', 'Ltac', + 'Fixpoint', 'CoFixpoint', 'Morphism', 'Relation', 'Implicit', + 'Arguments', 'Set', 'Unset', 'Contextual', 'Strict', 'Prenex', + 'Implicits', 'Inductive', 'CoInductive', 'Record', 'Structure', + 'Canonical', 'Coercion', 'Theorem', 'Lemma', 'Corollary', + 'Proposition', 'Fact', 'Remark', 'Example', 'Proof', 'Goal', 'Save', + 'Qed', 'Defined', 'Hint', 'Resolve', 'Rewrite', 'View', 'Search', + 'Show', 'Print', 'Printing', 'All', 'Graph', 'Projections', 'inside', + 'outside', + ) + keywords2 = ( + # Gallina + 'forall', 'exists', 'exists2', 'fun', 'fix', 'cofix', 'struct', + 'match', 'end', 'in', 'return', 'let', 'if', 'is', 'then', 'else', + 'for', 'of', 'nosimpl', 'with', 'as', + ) + keywords3 = ( + # Sorts + 'Type', 'Prop', + ) + keywords4 = ( + # Tactics + 'pose', 'set', 'move', 'case', 'elim', 'apply', 'clear', 'hnf', 'intro', + 'intros', 'generalize', 'rename', 'pattern', 'after', 'destruct', + 'induction', 'using', 'refine', 'inversion', 'injection', 'rewrite', + 'congr', 'unlock', 'compute', 'ring', 'field', 'replace', 'fold', + 'unfold', 'change', 'cutrewrite', 'simpl', 'have', 'suff', 'wlog', + 'suffices', 'without', 'loss', 'nat_norm', 'assert', 'cut', 'trivial', + 'revert', 'bool_congr', 'nat_congr', 'symmetry', 'transitivity', 'auto', + 'split', 'left', 'right', 'autorewrite', + ) + keywords5 = ( + # Terminators + 'by', 'done', 'exact', 'reflexivity', 'tauto', 'romega', 'omega', + 'assumption', 'solve', 'contradiction', 'discriminate', + ) + keywords6 = ( + # Control + 'do', 'last', 'first', 'try', 'idtac', 'repeat', + ) + # 'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', + # 'downto', 'else', 'end', 'exception', 'external', 'false', + # 'for', 'fun', 'function', 'functor', 'if', 'in', 'include', + # 'inherit', 'initializer', 'lazy', 'let', 'match', 'method', + # 'module', 'mutable', 'new', 'object', 'of', 'open', 'private', + # 'raise', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try', + # 'type', 'val', 'virtual', 'when', 'while', 'with' + keyopts = ( + '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-', + r'-\.', '->', r'\.', r'\.\.', ':', '::', ':=', ':>', ';', ';;', '<', + '<-', '=', '>', '>]', '>}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>', + r'\[\|', ']', '_', '`', '{', '{<', r'\|', r'\|]', '}', '~', '=>', + r'/\\', r'\\/', + u'Π', u'λ', + ) + operators = r'[!$%&*+\./:<=>?@^|~-]' + word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lxor', 'mod', 'or') + prefix_syms = r'[!?~]' + infix_syms = r'[=<>@^|&+\*/$%-]' + primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', + 'array') + + tokens = { + 'root': [ + (r'\s+', Text), + (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo), + (r'\(\*', Comment, 'comment'), + (words(keywords1, prefix=r'\b', suffix=r'\b'), Keyword.Namespace), + (words(keywords2, prefix=r'\b', suffix=r'\b'), Keyword), + (words(keywords3, prefix=r'\b', suffix=r'\b'), Keyword.Type), + (words(keywords4, prefix=r'\b', suffix=r'\b'), Keyword), + (words(keywords5, prefix=r'\b', suffix=r'\b'), Keyword.Pseudo), + (words(keywords6, prefix=r'\b', suffix=r'\b'), Keyword.Reserved), + (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'), + (r'\b([A-Z][\w\']*)', Name.Class), + (r'(%s)' % '|'.join(keyopts[::-1]), Operator), + (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator), + (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word), + (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type), + + (r"[^\W\d][\w']*", Name), + + (r'\d[\d_]*', Number.Integer), + (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex), + (r'0[oO][0-7][0-7_]*', Number.Oct), + (r'0[bB][01][01_]*', Number.Bin), + (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float), + + (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'", + String.Char), + (r"'.'", String.Char), + (r"'", Keyword), # a stray quote is another syntax element + + (r'"', String.Double, 'string'), + + (r'[~?][a-z][\w\']*:', Name.Variable), + ], + 'comment': [ + (r'[^(*)]+', Comment), + (r'\(\*', Comment, '#push'), + (r'\*\)', Comment, '#pop'), + (r'[(*)]', Comment), + ], + 'string': [ + (r'[^"]+', String.Double), + (r'""', String.Double), + (r'"', String.Double, '#pop'), + ], + 'dotted': [ + (r'\s+', Text), + (r'\.', Punctuation), + (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace), + (r'[A-Z][\w\']*', Name.Class, '#pop'), + (r'[a-z][a-z0-9_\']*', Name, '#pop'), + default('#pop') + ], + } + + def analyse_text(text): + if text.startswith('(*'): + return True -- cgit v1.2.1 From c45439c1ef32348ed9476c441d81adbb84e71ec4 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 22:15:24 +0200 Subject: Make all keyword lists in special modules into tuples. Tuples of strings are stored as prebuild constants in the .pyc file instead of having to build a list element by element at runtime. --- pygments/lexers/_asybuiltins.py | 8 +- pygments/lexers/_clbuiltins.py | 28 +- pygments/lexers/_cocoabuiltins.py | 6 +- pygments/lexers/_lassobuiltins.py | 24 +- pygments/lexers/_luabuiltins.py | 37 +- pygments/lexers/_openedgebuiltins.py | 4 +- pygments/lexers/_phpbuiltins.py | 678 +++++++++++++++++----------------- pygments/lexers/_postgres_builtins.py | 19 +- pygments/lexers/_scilab_builtins.py | 8 +- pygments/lexers/_sourcemodbuiltins.py | 4 +- pygments/lexers/_stan_builtins.py | 21 +- pygments/lexers/_vimbuiltins.py | 6 +- pygments/lexers/sql.py | 6 +- tests/test_qbasiclexer.py | 4 +- 14 files changed, 425 insertions(+), 428 deletions(-) diff --git a/pygments/lexers/_asybuiltins.py b/pygments/lexers/_asybuiltins.py index 5472cb63..b1c65890 100644 --- a/pygments/lexers/_asybuiltins.py +++ b/pygments/lexers/_asybuiltins.py @@ -14,7 +14,7 @@ :license: BSD, see LICENSE for details. """ -ASYFUNCNAME = set([ +ASYFUNCNAME = set(( 'AND', 'Arc', 'ArcArrow', @@ -1038,9 +1038,9 @@ ASYFUNCNAME = set([ 'ztick', 'ztick3', 'ztrans' -]) +)) -ASYVARNAME = set([ +ASYVARNAME = set(( 'AliceBlue', 'Align', 'Allow', @@ -1642,4 +1642,4 @@ ASYVARNAME = set([ 'ylabelwidth', 'zerotickfuzz', 'zerowinding' -]) +)) diff --git a/pygments/lexers/_clbuiltins.py b/pygments/lexers/_clbuiltins.py index 3f9adf2f..81e58234 100644 --- a/pygments/lexers/_clbuiltins.py +++ b/pygments/lexers/_clbuiltins.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -BUILTIN_FUNCTIONS = set([ # 638 functions +BUILTIN_FUNCTIONS = set(( # 638 functions '<', '<=', '=', '>', '>=', '-', '/', '/=', '*', '+', '1-', '1+', 'abort', 'abs', 'acons', 'acos', 'acosh', 'add-method', 'adjoin', 'adjustable-array-p', 'adjust-array', 'allocate-instance', @@ -157,17 +157,17 @@ BUILTIN_FUNCTIONS = set([ # 638 functions 'wild-pathname-p', 'write', 'write-byte', 'write-char', 'write-line', 'write-sequence', 'write-string', 'write-to-string', 'yes-or-no-p', 'y-or-n-p', 'zerop', -]) +)) -SPECIAL_FORMS = set([ +SPECIAL_FORMS = set(( 'block', 'catch', 'declare', 'eval-when', 'flet', 'function', 'go', 'if', 'labels', 'lambda', 'let', 'let*', 'load-time-value', 'locally', 'macrolet', 'multiple-value-call', 'multiple-value-prog1', 'progn', 'progv', 'quote', 'return-from', 'setq', 'symbol-macrolet', 'tagbody', 'the', 'throw', 'unwind-protect', -]) +)) -MACROS = set([ +MACROS = set(( 'and', 'assert', 'call-method', 'case', 'ccase', 'check-type', 'cond', 'ctypecase', 'decf', 'declaim', 'defclass', 'defconstant', 'defgeneric', 'define-compiler-macro', 'define-condition', 'define-method-combination', @@ -188,19 +188,19 @@ MACROS = set([ 'with-input-from-string', 'with-open-file', 'with-open-stream', 'with-output-to-string', 'with-package-iterator', 'with-simple-restart', 'with-slots', 'with-standard-io-syntax', -]) +)) -LAMBDA_LIST_KEYWORDS = set([ +LAMBDA_LIST_KEYWORDS = set(( '&allow-other-keys', '&aux', '&body', '&environment', '&key', '&optional', '&rest', '&whole', -]) +)) -DECLARATIONS = set([ +DECLARATIONS = set(( 'dynamic-extent', 'ignore', 'optimize', 'ftype', 'inline', 'special', 'ignorable', 'notinline', 'type', -]) +)) -BUILTIN_TYPES = set([ +BUILTIN_TYPES = set(( 'atom', 'boolean', 'base-char', 'base-string', 'bignum', 'bit', 'compiled-function', 'extended-char', 'fixnum', 'keyword', 'nil', 'signed-byte', 'short-float', 'single-float', 'double-float', 'long-float', @@ -217,9 +217,9 @@ BUILTIN_TYPES = set([ 'simple-type-error', 'simple-warning', 'stream-error', 'storage-condition', 'style-warning', 'type-error', 'unbound-variable', 'unbound-slot', 'undefined-function', 'warning', -]) +)) -BUILTIN_CLASSES = set([ +BUILTIN_CLASSES = set(( 'array', 'broadcast-stream', 'bit-vector', 'built-in-class', 'character', 'class', 'complex', 'concatenated-stream', 'cons', 'echo-stream', 'file-stream', 'float', 'function', 'generic-function', 'hash-table', @@ -229,4 +229,4 @@ BUILTIN_CLASSES = set([ 'standard-generic-function', 'standard-method', 'standard-object', 'string-stream', 'stream', 'string', 'structure-class', 'structure-object', 'symbol', 'synonym-stream', 't', 'two-way-stream', 'vector', -]) +)) diff --git a/pygments/lexers/_cocoabuiltins.py b/pygments/lexers/_cocoabuiltins.py index 26179594..5550b1a6 100644 --- a/pygments/lexers/_cocoabuiltins.py +++ b/pygments/lexers/_cocoabuiltins.py @@ -14,9 +14,9 @@ from __future__ import print_function -COCOA_INTERFACES = set(['UITableViewCell', 'NSURLSessionDataTask', 'NSLinguisticTagger', 'NSStream', 'UIPrintInfo', 'SKPaymentTransaction', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'SKSpriteNode', 'JSContext', 'UICollectionReusableView', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UITextSelectionRect', 'MKRoute', 'MPVolumeView', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'UIAccessibilityElement', 'NSShadow', 'NSAtomicStoreCacheNode', 'UIPushBehavior', 'CBCharacteristic', 'CBUUID', 'CMStepCounter', 'NSNetService', 'UICollectionView', 'UIViewPrintFormatter', 'CAShapeLayer', 'MCPeerID', 'NSFileVersion', 'CMGyroData', 'SKPhysicsJointSpring', 'CIFilter', 'UIView', 'MKMapItem', 'PKPass', 'MKPolygonRenderer', 'JSValue', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'MKMapView', 'CATransition', 'CLCircularRegion', 'MKTileOverlay', 'UICollisionBehavior', 'ACAccountCredential', 'SKPhysicsJointLimit', 'AVMediaSelectionGroup', 'NSIndexSet', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'UITableView', 'AVCaptureStillImageOutput', 'GCController', 'NSAssertionHandler', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NSPropertyListSerialization', 'AVPlayerItemAccessLogEvent', 'UISwipeGestureRecognizer', 'MKOverlayRenderer', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'AVCaptureMovieFileOutput', 'UIImagePickerController', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'GKLeaderboardViewController', 'MPMoviePlayerController', 'GKScore', 'NSURLConnection', 'ABUnknownPersonViewController', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SSReadingList', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'UIStepper', 'UIRefreshControl', 'GKTurnBasedParticipant', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UITabBarController', 'CMMotionActivity', 'SKAction', 'AVPlayerItemOutput', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'SKMutablePayment', 'UIStoryboardSegue', 'NSOrderedSet', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'UIPasteboard', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSUserDefaults', 'SLRequest', 'AVPlayerLayer', 'NSPointerArray', 'AVAudioMix', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'GKMatch', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'NSPipe', 'AVComposition', 'ADBannerView', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UINavigationItem', 'CBPeripheralManager', 'UIStoryboardPopoverSegue', 'SKProductsRequest', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'MKReverseGeocoder', 'GCControllerAxisInput', 'MKMapSnapshotter', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVAssetReaderOutput', 'EAGLContext', 'UICollectionViewController', 'AVAssetTrack', 'SKEmitterNode', 'AVCaptureDeviceInput', 'AVVideoCompositionCoreAnimationTool', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'AVAsynchronousVideoCompositionRequest', 'CAGradientLayer', 'NSFormatter', 'CATransaction', 'MPMovieAccessLogEvent', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'MCBrowserViewController', 'NSRelationshipDescription', 'NSMutableAttributedString', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'MKETAResponse', 'CATextLayer', 'NSNotificationQueue', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'CAScrollLayer', 'NSTextCheckingResult', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'AVAssetResourceLoader', 'AVMutableVideoCompositionInstruction', 'CTCall', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'ACAccountType', 'GKSession', 'SKVideoNode', 'GCExtendedGamepadSnapshot', 'GCExtendedGamepad', 'CAValueFunction', 'UIActivityIndicatorView', 'NSNotification', 'SKReceiptRefreshRequest', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIWebView', 'NSIncrementalStoreNode', 'EKEventStore', 'UISlider', 'AVAssetResourceLoadingRequest', 'AVCaptureInput', 'SKPhysicsBody', 'NSOperation', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CBMutableService', 'SKTransition', 'UIDynamicAnimator', 'NSMutableArray', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'MPMediaPickerController', 'NSFileCoordinator', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'MPMediaItem', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NSCompoundPredicate', 'MKMultiPoint', 'UIPrintFormatter', 'SKView', 'NSConstantString', 'UIPopoverController', 'AVMetadataFaceObject', 'EKEventViewController', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'UINib', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'UISplitViewController', 'AVAudioSession', 'CAEmitterLayer', 'NSNull', 'MKCircleView', 'UIColor', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'EKEventEditViewController', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'UIActivity', 'MKShape', 'NSMergeConflict', 'CIImage', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'NSSortDescriptor', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'GKTurnBasedEventHandler', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'AVURLAsset', 'CBPeripheral', 'AVAssetWriterInputGroup', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'CBService', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'NSNumberFormatter', 'UIPinchGestureRecognizer', 'UIMarkupTextPrintFormatter', 'MKRouteStep', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'CTSubscriber', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'CLHeading', 'NSFileWrapper', 'MKDirectionsResponse', 'UILocalNotification', 'UICollectionViewCell', 'UITextView', 'CMMagnetometerData', 'UIProgressView', 'GKInvite', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'ALAssetsFilter', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSDecimalNumberHandler', 'NSURLSessionConfiguration', 'EKCalendar', 'NSDictionary', 'CAPropertyAnimation', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'NSExpressionDescription', 'UIViewController', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'GCControllerElement', 'GKPeerPickerController', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'MFMessageComposeViewController', 'AVCaptureSession', 'NSDataDetector', 'AVCaptureVideoPreviewLayer', 'NSURLComponents', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'GLKTextureLoader', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'MKUserLocation', 'CIFeature', 'NSMachPort', 'ALAsset', 'NSURLSessionDownloadTask', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'NSPersistentStoreRequest', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'NSLock', 'UIDynamicBehavior', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'SKEffectNode', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'EKRecurrenceDayOfWeek', 'ASIdentifierManager', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'AVCaptureMetadataOutput', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKAlarm', 'NSMutableURLRequest', 'UIVideoEditorController', 'NSAtomicStore', 'UIResponder', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'EKEvent', 'NSDateFormatter', 'AVAssetWriterInputPixelBufferAdaptor', 'UICollectionViewFlowLayoutInvalidationContext', 'UITextField', 'CLPlacemark', 'AVCaptureOutput', 'NSPropertyDescription', 'GCGamepad', 'NSPersistentStoreCoordinator', 'GKMatchmaker', 'CIContext', 'NSThread', 'SKRequest', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CAEmitterCell', 'UIFont', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'SKNode', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'AVSpeechSynthesisVoice', 'UIImage', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SKPayment', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'NSArray', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'EASession', 'UIInputView', 'NSHTTPCookieStorage', 'NSPointerFunctions', 'AVMediaSelectionOption', 'NSRunLoop', 'CAAnimationGroup', 'MKCircle', 'NSMigrationManager', 'UICollectionViewUpdateItem', 'NSMutableData', 'NSMutableParagraphStyle', 'GLKEffectProperty', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'UIAccelerometer', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'NSPropertyMapping', 'GKChallenge', 'NSURLProtectionSpace', 'ACAccountStore', 'UITextRange', 'NSComparisonPredicate', 'NSOutputStream', 'PKAddPassesViewController', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'MCNearbyServiceAdvertiser', 'NSObject', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'GKAchievement', 'AVCaptureAudioFileOutput', 'TWRequest', 'SKLabelNode', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'NSURLProtocol', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'CIColor', 'UIDictationPhrase']) -COCOA_PROTOCOLS = set(['SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSTextAttachmentContainer', 'NSDecimalNumberBehaviors', 'NSMutableCopying', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UITableViewDelegate', 'GKAchievementViewControllerDelegate', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'MFMailComposeViewControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'NSPortDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'UIPageViewControllerDataSource', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'UIToolbarDelegate', 'UIViewControllerTransitionCoordinator', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'UITextInputTraits', 'NSLayoutManagerDelegate', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'NSDiscardableContent', 'UITextFieldDelegate', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'NSURLSessionDelegate', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'GKTurnBasedMatchmakerViewControllerDelegate', 'UIActionSheetDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'AVVideoCompositing', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'MFMessageComposeViewControllerDelegate', 'UITextSelecting', 'NSURLProtocolClient', 'UIVideoEditorControllerDelegate', 'UITableViewDataSource', 'UIDynamicAnimatorDelegate', 'NSURLSessionDataDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'GLKViewDelegate', 'EAAccessoryDelegate', 'NSKeyedUnarchiverDelegate', 'NSMachPortDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'GKVoiceChatClient', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UISplitViewControllerDelegate', 'MKAnnotation', 'UIAccessibilityIdentification', 'ABNewPersonViewControllerDelegate', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'MKMapViewDelegate', 'UIKeyInput', 'UICollectionViewDataSource', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'EKCalendarChooserDelegate', 'NSTextField', 'NSInteger', 'NSUInteger']) -COCOA_PRIMITIVES = set(['ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', 'CFStreamErrorHTTP', '__CFMachPort', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'KernVersion0Header', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'CFStreamSocketSecurityProtocol', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', 'BslnFormat0Part', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', '__CFDictionary', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'ALMXHeader', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'PKErrorCode', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__CFNull', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', '__CFString', 'AnchorPoint', 'JustTable', '__CFNetService', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'MIDIThruConnectionParams', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', '_GLKMatrix3', 'CGGradient', 'OpaqueMIDISetup', '_GLKMatrix2', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', '__CFBinaryHeap', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', '__CFBoolean', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'KernStateEntry', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'KerxControlPointEntry', '__CFCharacterSet', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'AnkrTable', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'AudioFile_SMPTE_Time', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', '__CFURL', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'CFHostInfoType', 'KernSimpleArrayHeader', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'OpaqueCMBlockBuffer', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', '__CFNumber', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', '__CFArray', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'PropTable', 'CGPDFScanner', 'OpaqueMusicEventIterator', '__CFFileSecurity', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'CGRectEdge', 'sfntFontDescriptor', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'SFNTLookupSegment', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', '__CFData', '__CFDate', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__CFWriteStream', '__CFAttributedString', '__CFStringTokenizer', 'JustWidthDeltaEntry', '__CFSet', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', '__CFTimeZone', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'CFStreamErrorHTTPAuthentication', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'sfntDescriptorHeader', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', '__SecCertificate', 'CGDataConsumerCallbacks', 'CGInterpolationQuality', 'CGLineCap', 'MIDIControlTransform', 'BslnFormat1Part', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', 'KerxSubtableHeader', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'OpaqueJSClass', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'CFNetServiceMonitorType', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'CFNetServicesError', 'KernOrderedListEntry', '__CFLocale', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'OpaqueAudioConverter', 'MIDIRawData', 'CFNetDiagnosticStatusValues', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', 'JustPCAction', 'CGPathElementType', '__CFRunLoopTimer', '__CFError', 'AudioFormatListItem', '__CFReadStream', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'CFNetworkErrors', 'sfntCMapHeader', '__CFURLEnumerator', '__CFCalendar', '__CFMessagePort', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader']) +COCOA_INTERFACES = set(('UITableViewCell', 'NSURLSessionDataTask', 'NSLinguisticTagger', 'NSStream', 'UIPrintInfo', 'SKPaymentTransaction', 'SKPhysicsWorld', 'NSString', 'CMAttitude', 'SKSpriteNode', 'JSContext', 'UICollectionReusableView', 'AVMutableCompositionTrack', 'GKLeaderboard', 'NSFetchedResultsController', 'MKTileOverlayRenderer', 'MIDINetworkSession', 'UITextSelectionRect', 'MKRoute', 'MPVolumeView', 'UIKeyCommand', 'AVMutableAudioMix', 'GLKEffectPropertyLight', 'UICollectionViewLayout', 'NSMutableCharacterSet', 'UIAccessibilityElement', 'NSShadow', 'NSAtomicStoreCacheNode', 'UIPushBehavior', 'CBCharacteristic', 'CBUUID', 'CMStepCounter', 'NSNetService', 'UICollectionView', 'UIViewPrintFormatter', 'CAShapeLayer', 'MCPeerID', 'NSFileVersion', 'CMGyroData', 'SKPhysicsJointSpring', 'CIFilter', 'UIView', 'MKMapItem', 'PKPass', 'MKPolygonRenderer', 'JSValue', 'CLGeocoder', 'NSByteCountFormatter', 'AVCaptureScreenInput', 'CAAnimation', 'MKOverlayPathView', 'UIActionSheet', 'UIMotionEffectGroup', 'UIBarItem', 'SKProduct', 'AVAssetExportSession', 'NSKeyedUnarchiver', 'NSMutableSet', 'MKMapView', 'CATransition', 'CLCircularRegion', 'MKTileOverlay', 'UICollisionBehavior', 'ACAccountCredential', 'SKPhysicsJointLimit', 'AVMediaSelectionGroup', 'NSIndexSet', 'AVAudioRecorder', 'NSURL', 'CBCentral', 'NSNumber', 'UITableView', 'AVCaptureStillImageOutput', 'GCController', 'NSAssertionHandler', 'AVAudioSessionPortDescription', 'NSHTTPURLResponse', 'NSPropertyListSerialization', 'AVPlayerItemAccessLogEvent', 'UISwipeGestureRecognizer', 'MKOverlayRenderer', 'NSDecimalNumber', 'EKReminder', 'MKPolylineView', 'AVCaptureMovieFileOutput', 'UIImagePickerController', 'GKAchievementDescription', 'EKParticipant', 'NSBlockOperation', 'UIActivityItemProvider', 'CLLocation', 'GKLeaderboardViewController', 'MPMoviePlayerController', 'GKScore', 'NSURLConnection', 'ABUnknownPersonViewController', 'UIMenuController', 'NSEvent', 'SKTextureAtlas', 'NSKeyedArchiver', 'GKLeaderboardSet', 'NSSimpleCString', 'CBATTRequest', 'GKMatchRequest', 'AVMetadataObject', 'UIAlertView', 'NSIncrementalStore', 'MFMailComposeViewController', 'SSReadingList', 'MPMovieAccessLog', 'NSManagedObjectContext', 'AVCaptureAudioDataOutput', 'ACAccount', 'AVMetadataItem', 'AVCaptureDeviceInputSource', 'CLLocationManager', 'UIStepper', 'UIRefreshControl', 'GKTurnBasedParticipant', 'UICollectionViewTransitionLayout', 'CBCentralManager', 'NSPurgeableData', 'SLComposeViewController', 'NSHashTable', 'MKUserTrackingBarButtonItem', 'UITabBarController', 'CMMotionActivity', 'SKAction', 'AVPlayerItemOutput', 'UIDocumentInteractionController', 'UIDynamicItemBehavior', 'NSMutableDictionary', 'UILabel', 'AVCaptureInputPort', 'NSExpression', 'SKMutablePayment', 'UIStoryboardSegue', 'NSOrderedSet', 'UIPopoverBackgroundView', 'UIToolbar', 'NSNotificationCenter', 'NSEntityMigrationPolicy', 'NSLocale', 'NSURLSession', 'NSTimeZone', 'UIManagedDocument', 'AVMutableVideoCompositionLayerInstruction', 'AVAssetTrackGroup', 'NSInvocationOperation', 'ALAssetRepresentation', 'AVQueuePlayer', 'UIPasteboard', 'NSLayoutManager', 'EKCalendarChooser', 'EKObject', 'CATiledLayer', 'GLKReflectionMapEffect', 'NSManagedObjectID', 'NSUserDefaults', 'SLRequest', 'AVPlayerLayer', 'NSPointerArray', 'AVAudioMix', 'MCAdvertiserAssistant', 'MKMapSnapshotOptions', 'GKMatch', 'AVTimedMetadataGroup', 'CBMutableCharacteristic', 'NSFetchRequest', 'UIDevice', 'NSManagedObject', 'NKAssetDownload', 'AVOutputSettingsAssistant', 'SKPhysicsJointPin', 'UITabBar', 'UITextInputMode', 'NSFetchRequestExpression', 'NSPipe', 'AVComposition', 'ADBannerView', 'AVPlayerItem', 'AVSynchronizedLayer', 'MKDirectionsRequest', 'NSMetadataItem', 'UINavigationItem', 'CBPeripheralManager', 'UIStoryboardPopoverSegue', 'SKProductsRequest', 'UIGravityBehavior', 'UIWindow', 'CBMutableDescriptor', 'UIBezierPath', 'UINavigationController', 'ABPeoplePickerNavigationController', 'EKSource', 'AVAssetWriterInput', 'AVPlayerItemTrack', 'GLKEffectPropertyTexture', 'NSURLResponse', 'SKPaymentQueue', 'MKReverseGeocoder', 'GCControllerAxisInput', 'MKMapSnapshotter', 'NSOrthography', 'NSURLSessionUploadTask', 'NSCharacterSet', 'AVAssetReaderOutput', 'EAGLContext', 'UICollectionViewController', 'AVAssetTrack', 'SKEmitterNode', 'AVCaptureDeviceInput', 'AVVideoCompositionCoreAnimationTool', 'NSURLRequest', 'CMAccelerometerData', 'NSNetServiceBrowser', 'AVAsynchronousVideoCompositionRequest', 'CAGradientLayer', 'NSFormatter', 'CATransaction', 'MPMovieAccessLogEvent', 'UIStoryboard', 'MPMediaLibrary', 'UITapGestureRecognizer', 'MPMediaItemArtwork', 'NSURLSessionTask', 'MCBrowserViewController', 'NSRelationshipDescription', 'NSMutableAttributedString', 'MPNowPlayingInfoCenter', 'MKLocalSearch', 'EAAccessory', 'MKETAResponse', 'CATextLayer', 'NSNotificationQueue', 'NSValue', 'NSMutableIndexSet', 'SKPhysicsContact', 'NSProgress', 'CAScrollLayer', 'NSTextCheckingResult', 'NSEntityDescription', 'NSURLCredentialStorage', 'UIApplication', 'SKDownload', 'MKLocalSearchRequest', 'SKScene', 'UISearchDisplayController', 'CAReplicatorLayer', 'UIPrintPageRenderer', 'EKCalendarItem', 'NSUUID', 'EAAccessoryManager', 'AVAssetResourceLoader', 'AVMutableVideoCompositionInstruction', 'CTCall', 'CIVector', 'UINavigationBar', 'UIPanGestureRecognizer', 'MPMediaQuery', 'ABNewPersonViewController', 'ACAccountType', 'GKSession', 'SKVideoNode', 'GCExtendedGamepadSnapshot', 'GCExtendedGamepad', 'CAValueFunction', 'UIActivityIndicatorView', 'NSNotification', 'SKReceiptRefreshRequest', 'AVCaptureDeviceFormat', 'AVPlayerItemErrorLog', 'NSMapTable', 'NSSet', 'CMMotionManager', 'GKVoiceChatService', 'UIPageControl', 'MKGeodesicPolyline', 'AVMutableComposition', 'NSLayoutConstraint', 'UIWebView', 'NSIncrementalStoreNode', 'EKEventStore', 'UISlider', 'AVAssetResourceLoadingRequest', 'AVCaptureInput', 'SKPhysicsBody', 'NSOperation', 'MKMapCamera', 'SKProductsResponse', 'GLKEffectPropertyMaterial', 'AVCaptureDevice', 'CTCallCenter', 'CBMutableService', 'SKTransition', 'UIDynamicAnimator', 'NSMutableArray', 'MCNearbyServiceBrowser', 'NSOperationQueue', 'MKPolylineRenderer', 'UICollectionViewLayoutAttributes', 'NSValueTransformer', 'UICollectionViewFlowLayout', 'NSEntityMapping', 'SKTexture', 'NSMergePolicy', 'UITextInputStringTokenizer', 'NSRecursiveLock', 'AVAsset', 'NSUndoManager', 'MPMediaPickerController', 'NSFileCoordinator', 'NSFileHandle', 'NSConditionLock', 'UISegmentedControl', 'NSManagedObjectModel', 'UITabBarItem', 'MPMediaItem', 'EKRecurrenceRule', 'UIEvent', 'UITouch', 'UIPrintInteractionController', 'CMDeviceMotion', 'NSCompoundPredicate', 'MKMultiPoint', 'UIPrintFormatter', 'SKView', 'NSConstantString', 'UIPopoverController', 'AVMetadataFaceObject', 'EKEventViewController', 'NSPort', 'MKCircleRenderer', 'AVCompositionTrack', 'UINib', 'NSUbiquitousKeyValueStore', 'NSMetadataQueryResultGroup', 'AVAssetResourceLoadingDataRequest', 'UITableViewHeaderFooterView', 'UISplitViewController', 'AVAudioSession', 'CAEmitterLayer', 'NSNull', 'MKCircleView', 'UIColor', 'UIAttachmentBehavior', 'CLBeacon', 'NSInputStream', 'NSURLCache', 'GKPlayer', 'NSMappingModel', 'NSHTTPCookie', 'AVMutableVideoComposition', 'NSAttributeDescription', 'AVPlayer', 'MKAnnotationView', 'UIFontDescriptor', 'NSTimer', 'CBDescriptor', 'MKOverlayView', 'EKEventEditViewController', 'NSSaveChangesRequest', 'UIReferenceLibraryViewController', 'SKPhysicsJointFixed', 'UILocalizedIndexedCollation', 'UIInterpolatingMotionEffect', 'AVAssetWriter', 'NSBundle', 'SKStoreProductViewController', 'GLKViewController', 'NSMetadataQueryAttributeValueTuple', 'GKTurnBasedMatch', 'UIActivity', 'MKShape', 'NSMergeConflict', 'CIImage', 'UIRotationGestureRecognizer', 'AVPlayerItemLegibleOutput', 'AVAssetImageGenerator', 'GCControllerButtonInput', 'NSSortDescriptor', 'MPTimedMetadata', 'NKIssue', 'UIScreenMode', 'GKTurnBasedEventHandler', 'MKPolyline', 'JSVirtualMachine', 'AVAssetReader', 'NSAttributedString', 'GKMatchmakerViewController', 'NSCountedSet', 'UIButton', 'GKLocalPlayer', 'MPMovieErrorLog', 'AVSpeechUtterance', 'AVURLAsset', 'CBPeripheral', 'AVAssetWriterInputGroup', 'AVAssetReaderAudioMixOutput', 'NSEnumerator', 'UIDocument', 'MKLocalSearchResponse', 'UISimpleTextPrintFormatter', 'CBService', 'MCSession', 'QLPreviewController', 'CAMediaTimingFunction', 'UITextPosition', 'NSNumberFormatter', 'UIPinchGestureRecognizer', 'UIMarkupTextPrintFormatter', 'MKRouteStep', 'NSMetadataQuery', 'AVAssetResourceLoadingContentInformationRequest', 'CTSubscriber', 'CTCarrier', 'NSFileSecurity', 'UIAcceleration', 'UIMotionEffect', 'CLHeading', 'NSFileWrapper', 'MKDirectionsResponse', 'UILocalNotification', 'UICollectionViewCell', 'UITextView', 'CMMagnetometerData', 'UIProgressView', 'GKInvite', 'UISearchBar', 'MKPlacemark', 'AVCaptureConnection', 'ALAssetsFilter', 'AVPlayerItemErrorLogEvent', 'NSJSONSerialization', 'AVAssetReaderVideoCompositionOutput', 'ABPersonViewController', 'CIDetector', 'GKTurnBasedMatchmakerViewController', 'MPMediaItemCollection', 'NSCondition', 'NSURLCredential', 'MIDINetworkConnection', 'NSDecimalNumberHandler', 'NSURLSessionConfiguration', 'EKCalendar', 'NSDictionary', 'CAPropertyAnimation', 'UIPercentDrivenInteractiveTransition', 'MKPolygon', 'AVAssetTrackSegment', 'NSExpressionDescription', 'UIViewController', 'NSURLAuthenticationChallenge', 'NSDirectoryEnumerator', 'MKDistanceFormatter', 'GCControllerElement', 'GKPeerPickerController', 'UITableViewController', 'GKNotificationBanner', 'MKPointAnnotation', 'NSCache', 'SKPhysicsJoint', 'NSXMLParser', 'MFMessageComposeViewController', 'AVCaptureSession', 'NSDataDetector', 'AVCaptureVideoPreviewLayer', 'NSURLComponents', 'UISnapBehavior', 'AVMetadataMachineReadableCodeObject', 'GLKTextureLoader', 'NSTextAttachment', 'NSException', 'UIMenuItem', 'CMMotionActivityManager', 'MKUserLocation', 'CIFeature', 'NSMachPort', 'ALAsset', 'NSURLSessionDownloadTask', 'MPMoviePlayerViewController', 'NSMutableOrderedSet', 'AVCaptureVideoDataOutput', 'NSCachedURLResponse', 'ALAssetsLibrary', 'NSInvocation', 'UILongPressGestureRecognizer', 'NSTextStorage', 'CIFaceFeature', 'MKMapSnapshot', 'GLKEffectPropertyFog', 'NSPersistentStoreRequest', 'AVAudioMixInputParameters', 'CAEmitterBehavior', 'PKPassLibrary', 'NSLock', 'UIDynamicBehavior', 'AVPlayerMediaSelectionCriteria', 'CALayer', 'UIBarButtonItem', 'AVAudioSessionRouteDescription', 'CLBeaconRegion', 'SKEffectNode', 'CABasicAnimation', 'AVVideoCompositionInstruction', 'AVMutableTimedMetadataGroup', 'EKRecurrenceEnd', 'NSTextContainer', 'TWTweetComposeViewController', 'UIScrollView', 'EKRecurrenceDayOfWeek', 'ASIdentifierManager', 'UIScreen', 'CLRegion', 'NSProcessInfo', 'GLKTextureInfo', 'AVCaptureMetadataOutput', 'NSTextTab', 'JSManagedValue', 'NSDate', 'UITextChecker', 'NSData', 'NSParagraphStyle', 'AVMutableMetadataItem', 'EKAlarm', 'NSMutableURLRequest', 'UIVideoEditorController', 'NSAtomicStore', 'UIResponder', 'AVCompositionTrackSegment', 'GCGamepadSnapshot', 'MPMediaEntity', 'GLKSkyboxEffect', 'UISwitch', 'EKStructuredLocation', 'UIGestureRecognizer', 'NSProxy', 'GLKBaseEffect', 'GKScoreChallenge', 'NSCoder', 'MPMediaPlaylist', 'NSDateComponents', 'EKEvent', 'NSDateFormatter', 'AVAssetWriterInputPixelBufferAdaptor', 'UICollectionViewFlowLayoutInvalidationContext', 'UITextField', 'CLPlacemark', 'AVCaptureOutput', 'NSPropertyDescription', 'GCGamepad', 'NSPersistentStoreCoordinator', 'GKMatchmaker', 'CIContext', 'NSThread', 'SKRequest', 'SKPhysicsJointSliding', 'NSPredicate', 'GKVoiceChat', 'SKCropNode', 'AVCaptureAudioPreviewOutput', 'NSStringDrawingContext', 'GKGameCenterViewController', 'UIPrintPaper', 'UICollectionViewLayoutInvalidationContext', 'GLKEffectPropertyTransform', 'UIDatePicker', 'MKDirections', 'ALAssetsGroup', 'CAEmitterCell', 'UIFont', 'MKPinAnnotationView', 'UIPickerView', 'UIImageView', 'SKNode', 'MPMediaQuerySection', 'GKFriendRequestComposeViewController', 'NSError', 'CTSubscriberInfo', 'AVPlayerItemAccessLog', 'MPMediaPropertyPredicate', 'CMLogItem', 'NSAutoreleasePool', 'NSSocketPort', 'AVAssetReaderTrackOutput', 'AVSpeechSynthesisVoice', 'UIImage', 'AVCaptureAudioChannel', 'GKTurnBasedExchangeReply', 'AVVideoCompositionLayerInstruction', 'AVSpeechSynthesizer', 'GKChallengeEventHandler', 'AVCaptureFileOutput', 'UIControl', 'SKPayment', 'ADInterstitialAd', 'AVAudioSessionDataSourceDescription', 'NSArray', 'GCControllerDirectionPad', 'NSFileManager', 'AVMutableAudioMixInputParameters', 'UIScreenEdgePanGestureRecognizer', 'CAKeyframeAnimation', 'EASession', 'UIInputView', 'NSHTTPCookieStorage', 'NSPointerFunctions', 'AVMediaSelectionOption', 'NSRunLoop', 'CAAnimationGroup', 'MKCircle', 'NSMigrationManager', 'UICollectionViewUpdateItem', 'NSMutableData', 'NSMutableParagraphStyle', 'GLKEffectProperty', 'SKShapeNode', 'MPMovieErrorLogEvent', 'MKPolygonView', 'UIAccelerometer', 'NSScanner', 'GKAchievementChallenge', 'AVAudioPlayer', 'AVVideoComposition', 'NKLibrary', 'NSPersistentStore', 'NSPropertyMapping', 'GKChallenge', 'NSURLProtectionSpace', 'ACAccountStore', 'UITextRange', 'NSComparisonPredicate', 'NSOutputStream', 'PKAddPassesViewController', 'CTTelephonyNetworkInfo', 'AVTextStyleRule', 'NSFetchedPropertyDescription', 'UIPageViewController', 'CATransformLayer', 'MCNearbyServiceAdvertiser', 'NSObject', 'MPMusicPlayerController', 'MKOverlayPathRenderer', 'GKAchievement', 'AVCaptureAudioFileOutput', 'TWRequest', 'SKLabelNode', 'MIDINetworkHost', 'MPMediaPredicate', 'AVFrameRateRange', 'NSIndexPath', 'AVVideoCompositionRenderContext', 'CADisplayLink', 'CAEAGLLayer', 'NSMutableString', 'NSMessagePort', 'AVAudioSessionChannelDescription', 'GLKView', 'UIActivityViewController', 'GKAchievementViewController', 'NSURLProtocol', 'NSCalendar', 'SKKeyframeSequence', 'AVMetadataItemFilter', 'NSMethodSignature', 'NSRegularExpression', 'EAGLSharegroup', 'AVPlayerItemVideoOutput', 'CIColor', 'UIDictationPhrase')) +COCOA_PROTOCOLS = set(('SKStoreProductViewControllerDelegate', 'AVVideoCompositionInstruction', 'AVAudioSessionDelegate', 'GKMatchDelegate', 'NSFileManagerDelegate', 'UILayoutSupport', 'NSCopying', 'UIPrintInteractionControllerDelegate', 'QLPreviewControllerDataSource', 'SKProductsRequestDelegate', 'NSTextStorageDelegate', 'MCBrowserViewControllerDelegate', 'UIViewControllerTransitionCoordinatorContext', 'NSTextAttachmentContainer', 'NSDecimalNumberBehaviors', 'NSMutableCopying', 'UIViewControllerTransitioningDelegate', 'UIAlertViewDelegate', 'AVAudioPlayerDelegate', 'MKReverseGeocoderDelegate', 'NSCoding', 'UITextInputTokenizer', 'GKFriendRequestComposeViewControllerDelegate', 'UIActivityItemSource', 'NSCacheDelegate', 'UITableViewDelegate', 'GKAchievementViewControllerDelegate', 'EKEventEditViewDelegate', 'NSURLConnectionDelegate', 'GKPeerPickerControllerDelegate', 'UIGuidedAccessRestrictionDelegate', 'AVSpeechSynthesizerDelegate', 'MFMailComposeViewControllerDelegate', 'AVPlayerItemLegibleOutputPushDelegate', 'ADInterstitialAdDelegate', 'AVAssetResourceLoaderDelegate', 'UITabBarControllerDelegate', 'SKPaymentTransactionObserver', 'AVCaptureAudioDataOutputSampleBufferDelegate', 'UIInputViewAudioFeedback', 'GKChallengeListener', 'UIPickerViewDelegate', 'UIWebViewDelegate', 'UIApplicationDelegate', 'GKInviteEventListener', 'MPMediaPlayback', 'AVAsynchronousKeyValueLoading', 'QLPreviewItem', 'NSPortDelegate', 'SKRequestDelegate', 'SKPhysicsContactDelegate', 'UIPageViewControllerDataSource', 'AVPlayerItemOutputPushDelegate', 'UICollectionViewDelegate', 'UIImagePickerControllerDelegate', 'UIToolbarDelegate', 'UIViewControllerTransitionCoordinator', 'NSURLConnectionDataDelegate', 'MKOverlay', 'CBCentralManagerDelegate', 'JSExport', 'NSTextLayoutOrientationProvider', 'UIPickerViewDataSource', 'UITextInputTraits', 'NSLayoutManagerDelegate', 'NSFetchedResultsControllerDelegate', 'ABPeoplePickerNavigationControllerDelegate', 'NSDiscardableContent', 'UITextFieldDelegate', 'GKGameCenterControllerDelegate', 'MPMediaPickerControllerDelegate', 'UIAppearance', 'UIPickerViewAccessibilityDelegate', 'UIScrollViewAccessibilityDelegate', 'ADBannerViewDelegate', 'NSURLSessionDelegate', 'NSXMLParserDelegate', 'UIViewControllerRestoration', 'UISearchBarDelegate', 'UIBarPositioning', 'CBPeripheralDelegate', 'UISearchDisplayDelegate', 'CAAction', 'PKAddPassesViewControllerDelegate', 'MCNearbyServiceAdvertiserDelegate', 'GKTurnBasedMatchmakerViewControllerDelegate', 'UIActionSheetDelegate', 'AVCaptureVideoDataOutputSampleBufferDelegate', 'UIAppearanceContainer', 'UIStateRestoring', 'NSURLSessionTaskDelegate', 'NSFilePresenter', 'UIViewControllerContextTransitioning', 'UITextInput', 'CBPeripheralManagerDelegate', 'UITextInputDelegate', 'NSFastEnumeration', 'NSURLAuthenticationChallengeSender', 'AVVideoCompositing', 'NSSecureCoding', 'MCAdvertiserAssistantDelegate', 'GKLocalPlayerListener', 'GLKNamedEffect', 'UIPopoverControllerDelegate', 'AVCaptureMetadataOutputObjectsDelegate', 'MFMessageComposeViewControllerDelegate', 'UITextSelecting', 'NSURLProtocolClient', 'UIVideoEditorControllerDelegate', 'UITableViewDataSource', 'UIDynamicAnimatorDelegate', 'NSURLSessionDataDelegate', 'UICollisionBehaviorDelegate', 'NSStreamDelegate', 'MCNearbyServiceBrowserDelegate', 'UINavigationControllerDelegate', 'MCSessionDelegate', 'UIViewControllerInteractiveTransitioning', 'GKTurnBasedEventListener', 'GLKViewDelegate', 'EAAccessoryDelegate', 'NSKeyedUnarchiverDelegate', 'NSMachPortDelegate', 'UIBarPositioningDelegate', 'ABPersonViewControllerDelegate', 'NSNetServiceBrowserDelegate', 'EKEventViewDelegate', 'UIScrollViewDelegate', 'NSURLConnectionDownloadDelegate', 'UIGestureRecognizerDelegate', 'UINavigationBarDelegate', 'GKVoiceChatClient', 'NSFetchedResultsSectionInfo', 'UIDocumentInteractionControllerDelegate', 'QLPreviewControllerDelegate', 'UIAccessibilityReadingContent', 'ABUnknownPersonViewControllerDelegate', 'GLKViewControllerDelegate', 'UICollectionViewDelegateFlowLayout', 'UISplitViewControllerDelegate', 'MKAnnotation', 'UIAccessibilityIdentification', 'ABNewPersonViewControllerDelegate', 'CAMediaTiming', 'AVCaptureFileOutputRecordingDelegate', 'UITextViewDelegate', 'UITabBarDelegate', 'GKLeaderboardViewControllerDelegate', 'MKMapViewDelegate', 'UIKeyInput', 'UICollectionViewDataSource', 'NSLocking', 'AVCaptureFileOutputDelegate', 'GKChallengeEventHandlerDelegate', 'UIObjectRestoration', 'CIFilterConstructor', 'AVPlayerItemOutputPullDelegate', 'EAGLDrawable', 'AVVideoCompositionValidationHandling', 'UIViewControllerAnimatedTransitioning', 'NSURLSessionDownloadDelegate', 'UIAccelerometerDelegate', 'UIPageViewControllerDelegate', 'UIDataSourceModelAssociation', 'AVAudioRecorderDelegate', 'GKSessionDelegate', 'NSKeyedArchiverDelegate', 'UIDynamicItem', 'CLLocationManagerDelegate', 'NSMetadataQueryDelegate', 'NSNetServiceDelegate', 'GKMatchmakerViewControllerDelegate', 'EKCalendarChooserDelegate', 'NSTextField', 'NSInteger', 'NSUInteger')) +COCOA_PRIMITIVES = set(('ROTAHeader', '__CFBundle', 'MortSubtable', 'AudioFilePacketTableInfo', 'CGPDFOperatorTable', 'KerxStateEntry', 'ExtendedTempoEvent', 'CTParagraphStyleSetting', 'OpaqueMIDIPort', 'CFStreamErrorHTTP', '__CFMachPort', '_GLKMatrix4', 'ExtendedControlEvent', 'CAFAudioDescription', 'KernVersion0Header', 'CGTextDrawingMode', 'EKErrorCode', 'gss_buffer_desc_struct', 'AudioUnitParameterInfo', '__SCPreferences', '__CTFrame', '__CTLine', 'CFStreamSocketSecurityProtocol', 'gss_krb5_lucid_context_v1', 'OpaqueJSValue', 'TrakTableEntry', 'AudioFramePacketTranslation', 'CGImageSource', 'OpaqueJSPropertyNameAccumulator', 'JustPCGlyphRepeatAddAction', 'BslnFormat0Part', 'OpaqueMIDIThruConnection', 'opaqueCMBufferQueue', 'OpaqueMusicSequence', 'MortRearrangementSubtable', 'MixerDistanceParams', 'MorxSubtable', 'MIDIObjectPropertyChangeNotification', '__CFDictionary', 'CGImageMetadataErrors', 'CGPath', 'OpaqueMIDIEndpoint', 'ALMXHeader', 'AudioComponentPlugInInterface', 'gss_ctx_id_t_desc_struct', 'sfntFontFeatureSetting', 'OpaqueJSContextGroup', '__SCNetworkConnection', 'AudioUnitParameterValueTranslation', 'CGImageMetadataType', 'CGPattern', 'AudioFileTypeAndFormatID', 'CGContext', 'AUNodeInteraction', 'SFNTLookupTable', 'JustPCDecompositionAction', 'KerxControlPointHeader', 'PKErrorCode', 'AudioStreamPacketDescription', 'KernSubtableHeader', '__CFNull', 'AUMIDIOutputCallbackStruct', 'MIDIMetaEvent', 'AudioQueueChannelAssignment', '__CFString', 'AnchorPoint', 'JustTable', '__CFNetService', 'gss_krb5_lucid_key', 'CGPDFDictionary', 'MIDIThruConnectionParams', 'CAF_UUID_ChunkHeader', 'gss_krb5_cfx_keydata', '_GLKMatrix3', 'CGGradient', 'OpaqueMIDISetup', '_GLKMatrix2', 'JustPostcompTable', '__CTParagraphStyle', 'AudioUnitParameterHistoryInfo', 'OpaqueJSContext', 'CGShading', '__CFBinaryHeap', 'SFNTLookupSingle', '__CFHost', '__SecRandom', '__CTFontDescriptor', '_NSRange', 'sfntDirectory', 'AudioQueueLevelMeterState', 'CAFPositionPeak', '__CFBoolean', 'PropLookupSegment', '__CVOpenGLESTextureCache', 'sfntInstance', '_GLKQuaternion', 'KernStateEntry', '__SCNetworkProtocol', 'CAFFileHeader', 'KerxOrderedListHeader', 'CGBlendMode', 'STXEntryOne', 'CAFRegion', 'SFNTLookupTrimmedArrayHeader', 'KerxControlPointEntry', '__CFCharacterSet', 'OpaqueMusicTrack', '_GLKVector4', 'gss_OID_set_desc_struct', 'OpaqueMusicPlayer', '_CFHTTPAuthentication', 'CGAffineTransform', 'CAFMarkerChunk', 'AUHostIdentifier', 'ROTAGlyphEntry', 'BslnTable', 'gss_krb5_lucid_context_version', '_GLKMatrixStack', 'CGImage', 'AnkrTable', 'SFNTLookupSingleHeader', 'MortLigatureSubtable', 'AudioFile_SMPTE_Time', 'CAFUMIDChunk', 'SMPTETime', 'CAFDataChunk', 'CGPDFStream', 'AudioFileRegionList', 'STEntryTwo', 'SFNTLookupBinarySearchHeader', 'OpbdTable', '__CTGlyphInfo', 'BslnFormat2Part', 'KerxIndexArrayHeader', 'TrakTable', 'KerxKerningPair', '__CFBitVector', 'KernVersion0SubtableHeader', 'OpaqueAudioComponentInstance', 'AudioChannelLayout', '__CFUUID', 'MIDISysexSendRequest', '__CFNumberFormatter', 'CGImageSourceStatus', '__CFURL', 'AudioFileMarkerList', 'AUSamplerBankPresetData', 'CGDataProvider', 'AudioFormatInfo', '__SecIdentity', 'sfntCMapExtendedSubHeader', 'MIDIChannelMessage', 'KernOffsetTable', 'CGColorSpaceModel', 'MFMailComposeErrorCode', 'CGFunction', '__SecTrust', 'CFHostInfoType', 'KernSimpleArrayHeader', 'CGFontPostScriptFormat', 'KernStateHeader', 'AudioUnitCocoaViewInfo', 'CGDataConsumer', 'OpaqueMIDIDevice', 'OpaqueCMBlockBuffer', 'AnchorPointTable', 'CGImageDestination', 'CAFInstrumentChunk', 'AudioUnitMeterClipping', '__CFNumber', 'MorxChain', '__CTFontCollection', 'STEntryOne', 'STXEntryTwo', 'ExtendedNoteOnEvent', '__CFArray', 'CGColorRenderingIntent', 'KerxSimpleArrayHeader', 'MorxTable', '_GLKVector3', '_GLKVector2', 'MortTable', 'CGPDFBox', 'AudioUnitParameterValueFromString', '__CFSocket', 'ALCdevice_struct', 'MIDINoteMessage', 'sfntFeatureHeader', 'CGRect', '__SCNetworkInterface', '__CFTree', 'MusicEventUserData', 'TrakTableData', 'MortContextualSubtable', '__CTRun', 'AudioUnitFrequencyResponseBin', 'MortChain', 'MorxInsertionSubtable', 'CGImageMetadata', 'gss_auth_identity', 'AudioUnitMIDIControlMapping', 'CAFChunkHeader', 'PropTable', 'CGPDFScanner', 'OpaqueMusicEventIterator', '__CFFileSecurity', 'AudioUnitNodeConnection', 'OpaqueMIDIDeviceList', 'ExtendedAudioFormatInfo', 'CGRectEdge', 'sfntFontDescriptor', '__CFRunLoopObserver', 'CGPatternTiling', 'MIDINotification', 'MorxLigatureSubtable', 'SFNTLookupSegment', 'MessageComposeResult', 'MIDIThruConnectionEndpoint', 'MusicDeviceStdNoteParams', 'opaqueCMSimpleQueue', 'ALCcontext_struct', 'OpaqueAudioQueue', 'PropLookupSingle', 'CGColor', 'AudioOutputUnitStartAtTimeParams', 'gss_name_t_desc_struct', 'CGFunctionCallbacks', 'CAFPacketTableHeader', 'AudioChannelDescription', 'sfntFeatureName', 'MorxContextualSubtable', 'CVSMPTETime', 'AudioValueRange', 'CGTextEncoding', 'AudioStreamBasicDescription', 'AUNodeRenderCallback', 'AudioPanningInfo', '__CFData', '__CFDate', 'KerxOrderedListEntry', '__CFAllocator', 'OpaqueJSPropertyNameArray', '__SCDynamicStore', 'OpaqueMIDIEntity', 'CFHostClientContext', 'CFNetServiceClientContext', 'AudioUnitPresetMAS_SettingData', 'opaqueCMBufferQueueTriggerToken', 'AudioUnitProperty', 'CAFRegionChunk', 'CGPDFString', '__CFWriteStream', '__CFAttributedString', '__CFStringTokenizer', 'JustWidthDeltaEntry', '__CFSet', 'sfntVariationAxis', '__CFNetDiagnostic', 'CAFOverviewSample', 'sfntCMapEncoding', 'CGVector', '__SCNetworkService', 'opaqueCMSampleBuffer', 'AUHostVersionIdentifier', 'AudioBalanceFade', 'sfntFontRunFeature', 'KerxCoordinateAction', 'sfntCMapSubHeader', 'CVPlanarPixelBufferInfo', 'AUNumVersion', '__CFTimeZone', 'AUSamplerInstrumentData', 'AUPreset', '__CTRunDelegate', 'OpaqueAudioQueueProcessingTap', 'KerxTableHeader', '_NSZone', 'OpaqueExtAudioFile', '__CFRunLoopSource', 'KerxAnchorPointAction', 'OpaqueJSString', 'AudioQueueParameterEvent', '__CFHTTPMessage', 'OpaqueCMClock', 'ScheduledAudioFileRegion', 'STEntryZero', 'gss_channel_bindings_struct', 'sfntVariationHeader', 'AUChannelInfo', 'UIOffset', 'GLKEffectPropertyPrv', 'KerxStateHeader', 'CGLineJoin', 'CGPDFDocument', '__CFBag', 'CFStreamErrorHTTPAuthentication', 'KernOrderedListHeader', '__SCNetworkSet', '__SecKey', 'MIDIObjectAddRemoveNotification', 'sfntDescriptorHeader', 'AudioUnitParameter', 'JustPCActionSubrecord', 'AudioComponentDescription', 'AudioUnitParameterValueName', 'AudioUnitParameterEvent', 'KerxControlPointAction', 'AudioTimeStamp', 'KernKerningPair', 'gss_buffer_set_desc_struct', 'MortFeatureEntry', 'FontVariation', 'CAFStringID', 'LcarCaretClassEntry', 'AudioUnitParameterStringFromValue', 'ACErrorCode', 'ALMXGlyphEntry', 'LtagTable', '__CTTypesetter', 'AuthorizationOpaqueRef', 'UIEdgeInsets', 'CGPathElement', 'CAFMarker', 'KernTableHeader', 'NoteParamsControlValue', 'SSLContext', 'gss_cred_id_t_desc_struct', 'AudioUnitParameterNameInfo', '__SecCertificate', 'CGDataConsumerCallbacks', 'CGInterpolationQuality', 'CGLineCap', 'MIDIControlTransform', 'BslnFormat1Part', 'CGPDFArray', '__SecPolicy', 'AudioConverterPrimeInfo', '__CTTextTab', '__CFNetServiceMonitor', 'AUInputSamplesInOutputCallbackStruct', '__CTFramesetter', 'CGPDFDataFormat', 'STHeader', 'CVPlanarPixelBufferInfo_YCbCrPlanar', 'MIDIValueMap', 'JustDirectionTable', '__SCBondStatus', 'SFNTLookupSegmentHeader', 'OpaqueCMMemoryPool', 'CGPathDrawingMode', 'CGFont', '__SCNetworkReachability', 'AudioClassDescription', 'CGPoint', 'CAFStrings', '__CFNetServiceBrowser', 'opaqueMTAudioProcessingTap', 'sfntNameRecord', 'CGPDFPage', 'CGLayer', 'ComponentInstanceRecord', 'CAFInfoStrings', 'HostCallbackInfo', 'MusicDeviceNoteParams', 'KernIndexArrayHeader', 'CVPlanarPixelBufferInfo_YCbCrBiPlanar', 'MusicTrackLoopInfo', 'opaqueCMFormatDescription', 'STClassTable', 'sfntDirectoryEntry', 'OpaqueCMTimebase', 'CGDataProviderDirectCallbacks', 'MIDIPacketList', 'CAFOverviewChunk', 'MIDIPacket', 'ScheduledAudioSlice', 'CGDataProviderSequentialCallbacks', 'AudioBuffer', 'MorxRearrangementSubtable', 'CGPatternCallbacks', 'AUDistanceAttenuationData', 'MIDIIOErrorNotification', 'CGPDFContentStream', 'IUnknownVTbl', 'MIDITransform', 'MortInsertionSubtable', 'CABarBeatTime', 'AudioBufferList', 'KerxSubtableHeader', '__CVBuffer', 'AURenderCallbackStruct', 'STXEntryZero', 'JustPCDuctilityAction', 'OpaqueAudioQueueTimeline', 'OpaqueMIDIClient', '__CFPlugInInstance', 'AudioQueueBuffer', '__CFFileDescriptor', 'AudioUnitConnection', '_GKTurnBasedExchangeStatus', 'LcarCaretTable', 'CVPlanarComponentInfo', 'JustWidthDeltaGroup', 'OpaqueAudioComponent', 'ParameterEvent', '__CVPixelBufferPool', '__CTFont', 'OpaqueJSClass', 'CGColorSpace', 'CGSize', 'AUDependentParameter', 'MIDIDriverInterface', 'gss_krb5_rfc1964_keydata', '__CFDateFormatter', 'LtagStringRange', 'CFNetServiceMonitorType', 'gss_iov_buffer_desc_struct', 'AUPresetEvent', 'CFNetServicesError', 'KernOrderedListEntry', '__CFLocale', 'gss_OID_desc_struct', 'AudioUnitPresetMAS_Settings', 'AudioFileMarker', 'JustPCConditionalAddAction', 'BslnFormat3Part', '__CFNotificationCenter', 'MortSwashSubtable', 'AUParameterMIDIMapping', 'OpaqueAudioConverter', 'MIDIRawData', 'CFNetDiagnosticStatusValues', 'sfntNameHeader', '__CFRunLoop', 'MFMailComposeResult', 'CATransform3D', 'OpbdSideValues', 'CAF_SMPTE_Time', 'JustPCAction', 'CGPathElementType', '__CFRunLoopTimer', '__CFError', 'AudioFormatListItem', '__CFReadStream', 'AudioUnitExternalBuffer', 'AudioFileRegion', 'AudioValueTranslation', 'CGImageMetadataTag', 'CAFPeakChunk', 'AudioBytePacketTranslation', 'CFNetworkErrors', 'sfntCMapHeader', '__CFURLEnumerator', '__CFCalendar', '__CFMessagePort', 'STXHeader', 'CGPDFObjectType', 'SFNTLookupArrayHeader')) if __name__ == '__main__': diff --git a/pygments/lexers/_lassobuiltins.py b/pygments/lexers/_lassobuiltins.py index 9a0a89da..aa9d2343 100644 --- a/pygments/lexers/_lassobuiltins.py +++ b/pygments/lexers/_lassobuiltins.py @@ -10,7 +10,7 @@ """ BUILTINS = { - 'Types': [ + 'Types': ( 'null', 'void', 'tag', @@ -270,8 +270,8 @@ BUILTINS = { 'web_error_atend', 'web_response_impl', 'web_router' - ], - 'Traits': [ + ), + 'Traits': ( 'trait_asstring', 'any', 'trait_generator', @@ -344,8 +344,8 @@ BUILTINS = { 'web_node_content_html_specialized', 'web_node_content_css_specialized', 'web_node_content_js_specialized' - ], - 'Unbound Methods': [ + ), + 'Unbound Methods': ( 'fail_now', 'register', 'register_thread', @@ -1845,8 +1845,8 @@ BUILTINS = { 'web_response', 'web_router_database', 'web_router_initialize' - ], - 'Lasso 8 Tags': [ + ), + 'Lasso 8 Tags': ( '__char', '__sync_timestamp__', '_admin_addgroup', @@ -3030,10 +3030,10 @@ BUILTINS = { 'xsd_processsimpletype', 'xsd_ref', 'xsd_type' - ] + ) } MEMBERS = { - 'Member Methods': [ + 'Member Methods': ( 'escape_member', 'oncompare', 'sameas', @@ -4720,8 +4720,8 @@ MEMBERS = { 'acceptpost', 'csscontent', 'jscontent' - ], - 'Lasso 8 Member Tags': [ + ), + 'Lasso 8 Member Tags': ( 'accept', 'add', 'addattachment', @@ -5178,5 +5178,5 @@ MEMBERS = { 'xmllang', 'xmlschematype', 'year' - ] + ) } diff --git a/pygments/lexers/_luabuiltins.py b/pygments/lexers/_luabuiltins.py index 40037357..87bfb26e 100644 --- a/pygments/lexers/_luabuiltins.py +++ b/pygments/lexers/_luabuiltins.py @@ -16,7 +16,7 @@ from __future__ import print_function -MODULES = {'basic': ['_G', +MODULES = {'basic': ('_G', '_VERSION', 'assert', 'collectgarbage', @@ -42,14 +42,14 @@ MODULES = {'basic': ['_G', 'tostring', 'type', 'unpack', - 'xpcall'], - 'coroutine': ['coroutine.create', + 'xpcall'), + 'coroutine': ('coroutine.create', 'coroutine.resume', 'coroutine.running', 'coroutine.status', 'coroutine.wrap', - 'coroutine.yield'], - 'debug': ['debug.debug', + 'coroutine.yield'), + 'debug': ('debug.debug', 'debug.getfenv', 'debug.gethook', 'debug.getinfo', @@ -62,8 +62,8 @@ MODULES = {'basic': ['_G', 'debug.setlocal', 'debug.setmetatable', 'debug.setupvalue', - 'debug.traceback'], - 'io': ['io.close', + 'debug.traceback'), + 'io': ('io.close', 'io.flush', 'io.input', 'io.lines', @@ -73,8 +73,8 @@ MODULES = {'basic': ['_G', 'io.read', 'io.tmpfile', 'io.type', - 'io.write'], - 'math': ['math.abs', + 'io.write'), + 'math': ('math.abs', 'math.acos', 'math.asin', 'math.atan2', @@ -103,16 +103,16 @@ MODULES = {'basic': ['_G', 'math.sin', 'math.sqrt', 'math.tanh', - 'math.tan'], - 'modules': ['module', + 'math.tan'), + 'modules': ('module', 'require', 'package.cpath', 'package.loaded', 'package.loadlib', 'package.path', 'package.preload', - 'package.seeall'], - 'os': ['os.clock', + 'package.seeall'), + 'os': ('os.clock', 'os.date', 'os.difftime', 'os.execute', @@ -122,8 +122,8 @@ MODULES = {'basic': ['_G', 'os.rename', 'os.setlocale', 'os.time', - 'os.tmpname'], - 'string': ['string.byte', + 'os.tmpname'), + 'string': ('string.byte', 'string.char', 'string.dump', 'string.find', @@ -136,12 +136,12 @@ MODULES = {'basic': ['_G', 'string.rep', 'string.reverse', 'string.sub', - 'string.upper'], - 'table': ['table.concat', + 'string.upper'), + 'table': ('table.concat', 'table.insert', 'table.maxn', 'table.remove', - 'table.sort']} + 'table.sort')} if __name__ == '__main__': import re @@ -251,5 +251,4 @@ if __name__ == '__main__': regenerate(__file__, modules) - run() diff --git a/pygments/lexers/_openedgebuiltins.py b/pygments/lexers/_openedgebuiltins.py index 4750e80e..76bad1e6 100644 --- a/pygments/lexers/_openedgebuiltins.py +++ b/pygments/lexers/_openedgebuiltins.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -OPENEDGEKEYWORDS = [ +OPENEDGEKEYWORDS = ( 'ABSOLUTE', 'ABS', 'ABSO', 'ABSOL', 'ABSOLU', 'ABSOLUT', 'ACCELERATOR', 'ACCUM', 'ACCUMULATE', 'ACCUM', 'ACCUMU', 'ACCUMUL', 'ACCUMULA', 'ACCUMULAT', 'ACTIVE-FORM', 'ACTIVE-WINDOW', 'ADD', 'ADD-BUFFER', @@ -559,4 +559,4 @@ OPENEDGEKEYWORDS = [ 'XML-DATA-TYPE', 'XML-NODE-TYPE', 'XML-SCHEMA-PATH', 'XML-SUPPRESS-NAMESPACE-PROCESSING', 'X-OF', 'XREF', 'XREF-XML', 'Y', 'YEAR', 'YEAR-OFFSET', 'YES', 'YES-NO', 'YES-NO-CANCEL', 'Y-OF' -] +) diff --git a/pygments/lexers/_phpbuiltins.py b/pygments/lexers/_phpbuiltins.py index 354e995c..6ee91eb0 100644 --- a/pygments/lexers/_phpbuiltins.py +++ b/pygments/lexers/_phpbuiltins.py @@ -18,8 +18,8 @@ from __future__ import print_function -MODULES = {'.NET': ['dotnet_load'], - 'APC': ['apc_add', +MODULES = {'.NET': ('dotnet_load',), + 'APC': ('apc_add', 'apc_bin_dump', 'apc_bin_dumpfile', 'apc_bin_load', @@ -37,8 +37,8 @@ MODULES = {'.NET': ['dotnet_load'], 'apc_inc', 'apc_load_constants', 'apc_sma_info', - 'apc_store'], - 'APD': ['apd_breakpoint', + 'apc_store'), + 'APD': ('apd_breakpoint', 'apd_callstack', 'apd_clunk', 'apd_continue', @@ -53,8 +53,8 @@ MODULES = {'.NET': ['dotnet_load'], 'apd_set_session_trace', 'apd_set_session', 'override_function', - 'rename_function'], - 'Aliases and deprecated Mysqli': ['mysqli_bind_param', + 'rename_function'), + 'Aliases and deprecated Mysqli': ('mysqli_bind_param', 'mysqli_bind_result', 'mysqli_client_encoding', 'mysqli_connect', @@ -72,8 +72,8 @@ MODULES = {'.NET': ['dotnet_load'], 'mysqli_rpl_parse_enabled', 'mysqli_rpl_probe', 'mysqli_send_long_data', - 'mysqli_slave_query'], - 'Apache': ['apache_child_terminate', + 'mysqli_slave_query'), + 'Apache': ('apache_child_terminate', 'apache_get_modules', 'apache_get_version', 'apache_getenv', @@ -84,8 +84,8 @@ MODULES = {'.NET': ['dotnet_load'], 'apache_response_headers', 'apache_setenv', 'getallheaders', - 'virtual'], - 'Array': ['array_change_key_case', + 'virtual'), + 'Array': ('array_change_key_case', 'array_chunk', 'array_column', 'array_combine', @@ -163,15 +163,15 @@ MODULES = {'.NET': ['dotnet_load'], 'sort', 'uasort', 'uksort', - 'usort'], - 'BBCode': ['bbcode_add_element', + 'usort'), + 'BBCode': ('bbcode_add_element', 'bbcode_add_smiley', 'bbcode_create', 'bbcode_destroy', 'bbcode_parse', 'bbcode_set_arg_parser', - 'bbcode_set_flags'], - 'BC Math': ['bcadd', + 'bbcode_set_flags'), + 'BC Math': ('bcadd', 'bccomp', 'bcdiv', 'bcmod', @@ -180,9 +180,9 @@ MODULES = {'.NET': ['dotnet_load'], 'bcpowmod', 'bcscale', 'bcsqrt', - 'bcsub'], - 'Blenc': ['blenc_encrypt'], - 'Bzip2': ['bzclose', + 'bcsub'), + 'Blenc': ('blenc_encrypt',), + 'Bzip2': ('bzclose', 'bzcompress', 'bzdecompress', 'bzerrno', @@ -191,8 +191,8 @@ MODULES = {'.NET': ['dotnet_load'], 'bzflush', 'bzopen', 'bzread', - 'bzwrite'], - 'COM': ['com_addref', + 'bzwrite'), + 'COM': ('com_addref', 'com_create_guid', 'com_event_sink', 'com_get_active_object', @@ -233,8 +233,8 @@ MODULES = {'.NET': ['dotnet_load'], 'variant_set_type', 'variant_set', 'variant_sub', - 'variant_xor'], - 'CUBRID': ['cubrid_bind', + 'variant_xor'), + 'CUBRID': ('cubrid_bind', 'cubrid_close_prepare', 'cubrid_close_request', 'cubrid_col_get', @@ -301,8 +301,8 @@ MODULES = {'.NET': ['dotnet_load'], 'cubrid_set_db_parameter', 'cubrid_set_drop', 'cubrid_set_query_timeout', - 'cubrid_version'], - 'Cairo': ['cairo_create', + 'cubrid_version'), + 'Cairo': ('cairo_create', 'cairo_font_face_get_type', 'cairo_font_face_status', 'cairo_font_options_create', @@ -396,8 +396,8 @@ MODULES = {'.NET': ['dotnet_load'], 'cairo_surface_write_to_png', 'cairo_svg_surface_create', 'cairo_svg_surface_restrict_to_version', - 'cairo_svg_version_to_string'], - 'Calendar': ['cal_days_in_month', + 'cairo_svg_version_to_string'), + 'Calendar': ('cal_days_in_month', 'cal_from_jd', 'cal_info', 'cal_to_jd', @@ -414,8 +414,8 @@ MODULES = {'.NET': ['dotnet_load'], 'jdtounix', 'JewishToJD', 'JulianToJD', - 'unixtojd'], - 'Classes/Object': ['__autoload', + 'unixtojd'), + 'Classes/Object': ('__autoload', 'call_user_method_array', 'call_user_method', 'class_alias', @@ -434,18 +434,18 @@ MODULES = {'.NET': ['dotnet_load'], 'is_subclass_of', 'method_exists', 'property_exists', - 'trait_exists'], - 'Classkit': ['classkit_import', + 'trait_exists'), + 'Classkit': ('classkit_import', 'classkit_method_add', 'classkit_method_copy', 'classkit_method_redefine', 'classkit_method_remove', - 'classkit_method_rename'], - 'Crack': ['crack_check', + 'classkit_method_rename'), + 'Crack': ('crack_check', 'crack_closedict', 'crack_getlastmessage', - 'crack_opendict'], - 'Ctype': ['ctype_alnum', + 'crack_opendict'), + 'Ctype': ('ctype_alnum', 'ctype_alpha', 'ctype_cntrl', 'ctype_digit', @@ -455,14 +455,14 @@ MODULES = {'.NET': ['dotnet_load'], 'ctype_punct', 'ctype_space', 'ctype_upper', - 'ctype_xdigit'], - 'Cyrus': ['cyrus_authenticate', + 'ctype_xdigit'), + 'Cyrus': ('cyrus_authenticate', 'cyrus_bind', 'cyrus_close', 'cyrus_connect', 'cyrus_query', - 'cyrus_unbind'], - 'DB++': ['dbplus_add', + 'cyrus_unbind'), + 'DB++': ('dbplus_add', 'dbplus_aql', 'dbplus_chdir', 'dbplus_close', @@ -508,8 +508,8 @@ MODULES = {'.NET': ['dotnet_load'], 'dbplus_unselect', 'dbplus_update', 'dbplus_xlockrel', - 'dbplus_xunlockrel'], - 'DBA': ['dba_close', + 'dbplus_xunlockrel'), + 'DBA': ('dba_close', 'dba_delete', 'dba_exists', 'dba_fetch', @@ -523,9 +523,9 @@ MODULES = {'.NET': ['dotnet_load'], 'dba_optimize', 'dba_popen', 'dba_replace', - 'dba_sync'], - 'DOM': ['dom_import_simplexml'], - 'Date/Time': ['checkdate', + 'dba_sync'), + 'DOM': ('dom_import_simplexml',), + 'Date/Time': ('checkdate', 'date_add', 'date_create_from_format', 'date_create_immutable_from_format', @@ -575,8 +575,8 @@ MODULES = {'.NET': ['dotnet_load'], 'timezone_offset_get', 'timezone_open', 'timezone_transitions_get', - 'timezone_version_get'], - 'Direct IO': ['dio_close', + 'timezone_version_get'), + 'Direct IO': ('dio_close', 'dio_fcntl', 'dio_open', 'dio_read', @@ -584,8 +584,8 @@ MODULES = {'.NET': ['dotnet_load'], 'dio_stat', 'dio_tcsetattr', 'dio_truncate', - 'dio_write'], - 'Directory': ['chdir', + 'dio_write'), + 'Directory': ('chdir', 'chroot', 'closedir', 'dir', @@ -593,8 +593,8 @@ MODULES = {'.NET': ['dotnet_load'], 'opendir', 'readdir', 'rewinddir', - 'scandir'], - 'Eio': ['eio_busy', + 'scandir'), + 'Eio': ('eio_busy', 'eio_cancel', 'eio_chmod', 'eio_chown', @@ -652,8 +652,8 @@ MODULES = {'.NET': ['dotnet_load'], 'eio_truncate', 'eio_unlink', 'eio_utime', - 'eio_write'], - 'Enchant': ['enchant_broker_describe', + 'eio_write'), + 'Enchant': ('enchant_broker_describe', 'enchant_broker_dict_exists', 'enchant_broker_free_dict', 'enchant_broker_free', @@ -671,8 +671,8 @@ MODULES = {'.NET': ['dotnet_load'], 'enchant_dict_is_in_session', 'enchant_dict_quick_check', 'enchant_dict_store_replacement', - 'enchant_dict_suggest'], - 'Error Handling': ['debug_backtrace', + 'enchant_dict_suggest'), + 'Error Handling': ('debug_backtrace', 'debug_print_backtrace', 'error_get_last', 'error_log', @@ -682,14 +682,14 @@ MODULES = {'.NET': ['dotnet_load'], 'set_error_handler', 'set_exception_handler', 'trigger_error', - 'user_error'], - 'Exif': ['exif_imagetype', + 'user_error'), + 'Exif': ('exif_imagetype', 'exif_read_data', 'exif_tagname', 'exif_thumbnail', - 'read_exif_data'], - 'Expect': ['expect_expectl', 'expect_popen'], - 'FAM': ['fam_cancel_monitor', + 'read_exif_data'), + 'Expect': ('expect_expectl', 'expect_popen'), + 'FAM': ('fam_cancel_monitor', 'fam_close', 'fam_monitor_collection', 'fam_monitor_directory', @@ -698,8 +698,8 @@ MODULES = {'.NET': ['dotnet_load'], 'fam_open', 'fam_pending', 'fam_resume_monitor', - 'fam_suspend_monitor'], - 'FDF': ['fdf_add_doc_javascript', + 'fam_suspend_monitor'), + 'FDF': ('fdf_add_doc_javascript', 'fdf_add_template', 'fdf_close', 'fdf_create', @@ -733,9 +733,9 @@ MODULES = {'.NET': ['dotnet_load'], 'fdf_set_submit_form_action', 'fdf_set_target_frame', 'fdf_set_value', - 'fdf_set_version'], - 'FPM': ['fastcgi_finish_request'], - 'FTP': ['ftp_alloc', + 'fdf_set_version'), + 'FPM': ('fastcgi_finish_request',), + 'FTP': ('ftp_alloc', 'ftp_cdup', 'ftp_chdir', 'ftp_chmod', @@ -768,8 +768,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ftp_site', 'ftp_size', 'ftp_ssl_connect', - 'ftp_systype'], - 'Fann': ['fann_cascadetrain_on_data', + 'ftp_systype'), + 'Fann': ('fann_cascadetrain_on_data', 'fann_cascadetrain_on_file', 'fann_clear_scaling_params', 'fann_copy', @@ -909,14 +909,14 @@ MODULES = {'.NET': ['dotnet_load'], 'fann_train_epoch', 'fann_train_on_data', 'fann_train_on_file', - 'fann_train'], - 'Fileinfo': ['finfo_buffer', + 'fann_train'), + 'Fileinfo': ('finfo_buffer', 'finfo_close', 'finfo_file', 'finfo_open', 'finfo_set_flags', - 'mime_content_type'], - 'Filesystem': ['basename', + 'mime_content_type'), + 'Filesystem': ('basename', 'chgrp', 'chmod', 'chown', @@ -995,15 +995,15 @@ MODULES = {'.NET': ['dotnet_load'], 'tmpfile', 'touch', 'umask', - 'unlink'], - 'Filter': ['filter_has_var', + 'unlink'), + 'Filter': ('filter_has_var', 'filter_id', 'filter_input_array', 'filter_input', 'filter_list', 'filter_var_array', - 'filter_var'], - 'Firebird/InterBase': ['ibase_add_user', + 'filter_var'), + 'Firebird/InterBase': ('ibase_add_user', 'ibase_affected_rows', 'ibase_backup', 'ibase_blob_add', @@ -1050,9 +1050,9 @@ MODULES = {'.NET': ['dotnet_load'], 'ibase_service_detach', 'ibase_set_event_handler', 'ibase_trans', - 'ibase_wait_event'], - 'FriBiDi': ['fribidi_log2vis'], - 'FrontBase': ['fbsql_affected_rows', + 'ibase_wait_event'), + 'FriBiDi': ('fribidi_log2vis',), + 'FrontBase': ('fbsql_affected_rows', 'fbsql_autocommit', 'fbsql_blob_size', 'fbsql_change_user', @@ -1111,8 +1111,8 @@ MODULES = {'.NET': ['dotnet_load'], 'fbsql_table_name', 'fbsql_tablename', 'fbsql_username', - 'fbsql_warnings'], - 'Function handling': ['call_user_func_array', + 'fbsql_warnings'), + 'Function handling': ('call_user_func_array', 'call_user_func', 'create_function', 'forward_static_call_array', @@ -1124,8 +1124,8 @@ MODULES = {'.NET': ['dotnet_load'], 'get_defined_functions', 'register_shutdown_function', 'register_tick_function', - 'unregister_tick_function'], - 'GD and Image': ['gd_info', + 'unregister_tick_function'), + 'GD and Image': ('gd_info', 'getimagesize', 'getimagesizefromstring', 'image_type_to_extension', @@ -1238,8 +1238,8 @@ MODULES = {'.NET': ['dotnet_load'], 'iptcembed', 'iptcparse', 'jpeg2wbmp', - 'png2wbmp'], - 'GMP': ['gmp_abs', + 'png2wbmp'), + 'GMP': ('gmp_abs', 'gmp_add', 'gmp_and', 'gmp_clrbit', @@ -1279,8 +1279,8 @@ MODULES = {'.NET': ['dotnet_load'], 'gmp_strval', 'gmp_sub', 'gmp_testbit', - 'gmp_xor'], - 'GeoIP': ['geoip_asnum_by_name', + 'gmp_xor'), + 'GeoIP': ('geoip_asnum_by_name', 'geoip_continent_code_by_name', 'geoip_country_code_by_name', 'geoip_country_code3_by_name', @@ -1298,8 +1298,8 @@ MODULES = {'.NET': ['dotnet_load'], 'geoip_region_by_name', 'geoip_region_name_by_code', 'geoip_setup_custom_directory', - 'geoip_time_zone_by_country_and_region'], - 'Gettext': ['bind_textdomain_codeset', + 'geoip_time_zone_by_country_and_region'), + 'Gettext': ('bind_textdomain_codeset', 'bindtextdomain', 'dcgettext', 'dcngettext', @@ -1307,8 +1307,8 @@ MODULES = {'.NET': ['dotnet_load'], 'dngettext', 'gettext', 'ngettext', - 'textdomain'], - 'GnuPG': ['gnupg_adddecryptkey', + 'textdomain'), + 'GnuPG': ('gnupg_adddecryptkey', 'gnupg_addencryptkey', 'gnupg_addsignkey', 'gnupg_cleardecryptkeys', @@ -1328,9 +1328,9 @@ MODULES = {'.NET': ['dotnet_load'], 'gnupg_seterrormode', 'gnupg_setsignmode', 'gnupg_sign', - 'gnupg_verify'], - 'Gopher': ['gopher_parsedir'], - 'Grapheme': ['grapheme_extract', + 'gnupg_verify'), + 'Gopher': ('gopher_parsedir',), + 'Grapheme': ('grapheme_extract', 'grapheme_stripos', 'grapheme_stristr', 'grapheme_strlen', @@ -1338,8 +1338,8 @@ MODULES = {'.NET': ['dotnet_load'], 'grapheme_strripos', 'grapheme_strrpos', 'grapheme_strstr', - 'grapheme_substr'], - 'Gupnp': ['gupnp_context_get_host_ip', + 'grapheme_substr'), + 'Gupnp': ('gupnp_context_get_host_ip', 'gupnp_context_get_port', 'gupnp_context_get_subscription_timeout', 'gupnp_context_host_path', @@ -1376,8 +1376,8 @@ MODULES = {'.NET': ['dotnet_load'], 'gupnp_service_proxy_get_subscribed', 'gupnp_service_proxy_remove_notify', 'gupnp_service_proxy_set_subscribed', - 'gupnp_service_thaw_notify'], - 'HTTP': ['http_cache_etag', + 'gupnp_service_thaw_notify'), + 'HTTP': ('http_cache_etag', 'http_cache_last_modified', 'http_chunked_decode', 'http_deflate', @@ -1427,8 +1427,8 @@ MODULES = {'.NET': ['dotnet_load'], 'http_send_stream', 'http_throttle', 'http_build_str', - 'http_build_url'], - 'Hash': ['hash_algos', + 'http_build_url'), + 'Hash': ('hash_algos', 'hash_copy', 'hash_file', 'hash_final', @@ -1439,8 +1439,8 @@ MODULES = {'.NET': ['dotnet_load'], 'hash_update_file', 'hash_update_stream', 'hash_update', - 'hash'], - 'Hyperwave': ['hw_Array2Objrec', + 'hash'), + 'Hyperwave': ('hw_Array2Objrec', 'hw_changeobject', 'hw_Children', 'hw_ChildrenObj', @@ -1501,12 +1501,12 @@ MODULES = {'.NET': ['dotnet_load'], 'hw_setlinkroot', 'hw_stat', 'hw_Unlock', - 'hw_Who'], - 'Hyperwave API': ['hwapi_attribute_new', + 'hw_Who'), + 'Hyperwave API': ('hwapi_attribute_new', 'hwapi_content_new', 'hwapi_hgcsp', - 'hwapi_object_new'], - 'IBM DB2': ['db2_autocommit', + 'hwapi_object_new'), + 'IBM DB2': ('db2_autocommit', 'db2_bind_param', 'db2_client_info', 'db2_close', @@ -1556,8 +1556,8 @@ MODULES = {'.NET': ['dotnet_load'], 'db2_stmt_error', 'db2_stmt_errormsg', 'db2_table_privileges', - 'db2_tables'], - 'ID3': ['id3_get_frame_long_name', + 'db2_tables'), + 'ID3': ('id3_get_frame_long_name', 'id3_get_frame_short_name', 'id3_get_genre_id', 'id3_get_genre_list', @@ -1565,9 +1565,9 @@ MODULES = {'.NET': ['dotnet_load'], 'id3_get_tag', 'id3_get_version', 'id3_remove_tag', - 'id3_set_tag'], - 'IDN': ['grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'], - 'IIS': ['iis_add_server', + 'id3_set_tag'), + 'IDN': ('grapheme_substr', 'idn_to_ascii', 'idn_to_unicode', 'idn_to_utf8'), + 'IIS': ('iis_add_server', 'iis_get_dir_security', 'iis_get_script_map', 'iis_get_server_by_comment', @@ -1582,8 +1582,8 @@ MODULES = {'.NET': ['dotnet_load'], 'iis_start_server', 'iis_start_service', 'iis_stop_server', - 'iis_stop_service'], - 'IMAP': ['imap_8bit', + 'iis_stop_service'), + 'IMAP': ('imap_8bit', 'imap_alerts', 'imap_append', 'imap_base64', @@ -1655,8 +1655,8 @@ MODULES = {'.NET': ['dotnet_load'], 'imap_unsubscribe', 'imap_utf7_decode', 'imap_utf7_encode', - 'imap_utf8'], - 'Informix': ['ifx_affected_rows', + 'imap_utf8'), + 'Informix': ('ifx_affected_rows', 'ifx_blobinfile_mode', 'ifx_byteasvarchar', 'ifx_close', @@ -1693,8 +1693,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ifxus_read_slob', 'ifxus_seek_slob', 'ifxus_tell_slob', - 'ifxus_write_slob'], - 'Ingres': ['ingres_autocommit_state', + 'ifxus_write_slob'), + 'Ingres': ('ingres_autocommit_state', 'ingres_autocommit', 'ingres_charset', 'ingres_close', @@ -1727,19 +1727,19 @@ MODULES = {'.NET': ['dotnet_load'], 'ingres_result_seek', 'ingres_rollback', 'ingres_set_environment', - 'ingres_unbuffered_query'], - 'Inotify': ['inotify_add_watch', + 'ingres_unbuffered_query'), + 'Inotify': ('inotify_add_watch', 'inotify_init', 'inotify_queue_len', 'inotify_read', - 'inotify_rm_watch'], - 'JSON': ['json_decode', + 'inotify_rm_watch'), + 'JSON': ('json_decode', 'json_encode', 'json_last_error_msg', - 'json_last_error'], - 'Java': ['java_last_exception_clear', 'java_last_exception_get'], - 'Judy': ['judy_type', 'judy_version'], - 'KADM5': ['kadm5_chpass_principal', + 'json_last_error'), + 'Java': ('java_last_exception_clear', 'java_last_exception_get'), + 'Judy': ('judy_type', 'judy_version'), + 'KADM5': ('kadm5_chpass_principal', 'kadm5_create_principal', 'kadm5_delete_principal', 'kadm5_destroy', @@ -1748,8 +1748,8 @@ MODULES = {'.NET': ['dotnet_load'], 'kadm5_get_principal', 'kadm5_get_principals', 'kadm5_init_with_password', - 'kadm5_modify_principal'], - 'LDAP': ['ldap_8859_to_t61', + 'kadm5_modify_principal'), + 'LDAP': ('ldap_8859_to_t61', 'ldap_add', 'ldap_bind', 'ldap_close', @@ -1793,9 +1793,9 @@ MODULES = {'.NET': ['dotnet_load'], 'ldap_sort', 'ldap_start_tls', 'ldap_t61_to_8859', - 'ldap_unbind'], - 'LZF': ['lzf_compress', 'lzf_decompress', 'lzf_optimized_for'], - 'Libevent': ['event_add', + 'ldap_unbind'), + 'LZF': ('lzf_compress', 'lzf_decompress', 'lzf_optimized_for'), + 'Libevent': ('event_add', 'event_base_free', 'event_base_loop', 'event_base_loopbreak', @@ -1818,8 +1818,8 @@ MODULES = {'.NET': ['dotnet_load'], 'event_del', 'event_free', 'event_new', - 'event_set'], - 'Lotus Notes': ['notes_body', + 'event_set'), + 'Lotus Notes': ('notes_body', 'notes_copy_db', 'notes_create_db', 'notes_create_note', @@ -1832,8 +1832,8 @@ MODULES = {'.NET': ['dotnet_load'], 'notes_nav_create', 'notes_search', 'notes_unread', - 'notes_version'], - 'MCVE': ['m_checkstatus', + 'notes_version'), + 'MCVE': ('m_checkstatus', 'm_completeauthorizations', 'm_connect', 'm_connectionerror', @@ -1871,9 +1871,9 @@ MODULES = {'.NET': ['dotnet_load'], 'm_uwait', 'm_validateidentifier', 'm_verifyconnection', - 'm_verifysslcert'], - 'Mail': ['ezmlm_hash', 'mail'], - 'Mailparse': ['mailparse_determine_best_xfer_encoding', + 'm_verifysslcert'), + 'Mail': ('ezmlm_hash', 'mail'), + 'Mailparse': ('mailparse_determine_best_xfer_encoding', 'mailparse_msg_create', 'mailparse_msg_extract_part_file', 'mailparse_msg_extract_part', @@ -1886,8 +1886,8 @@ MODULES = {'.NET': ['dotnet_load'], 'mailparse_msg_parse', 'mailparse_rfc822_parse_addresses', 'mailparse_stream_encode', - 'mailparse_uudecode_all'], - 'Math': ['abs', + 'mailparse_uudecode_all'), + 'Math': ('abs', 'acos', 'acosh', 'asin', @@ -1934,8 +1934,8 @@ MODULES = {'.NET': ['dotnet_load'], 'sqrt', 'srand', 'tan', - 'tanh'], - 'MaxDB': ['maxdb_affected_rows', + 'tanh'), + 'MaxDB': ('maxdb_affected_rows', 'maxdb_autocommit', 'maxdb_bind_param', 'maxdb_bind_result', @@ -2036,8 +2036,8 @@ MODULES = {'.NET': ['dotnet_load'], 'maxdb_thread_id', 'maxdb_thread_safe', 'maxdb_use_result', - 'maxdb_warning_count'], - 'Mcrypt': ['mcrypt_cbc', + 'maxdb_warning_count'), + 'Mcrypt': ('mcrypt_cbc', 'mcrypt_cfb', 'mcrypt_create_iv', 'mcrypt_decrypt', @@ -2073,20 +2073,20 @@ MODULES = {'.NET': ['dotnet_load'], 'mcrypt_module_open', 'mcrypt_module_self_test', 'mcrypt_ofb', - 'mdecrypt_generic'], - 'Memcache': ['memcache_debug'], - 'Mhash': ['mhash_count', + 'mdecrypt_generic'), + 'Memcache': ('memcache_debug',), + 'Mhash': ('mhash_count', 'mhash_get_block_size', 'mhash_get_hash_name', 'mhash_keygen_s2k', - 'mhash'], - 'Ming': ['ming_keypress', + 'mhash'), + 'Ming': ('ming_keypress', 'ming_setcubicthreshold', 'ming_setscale', 'ming_setswfcompression', 'ming_useconstants', - 'ming_useswfversion'], - 'Misc.': ['connection_aborted', + 'ming_useswfversion'), + 'Misc.': ('connection_aborted', 'connection_status', 'connection_timeout', 'constant', @@ -2110,9 +2110,9 @@ MODULES = {'.NET': ['dotnet_load'], 'time_sleep_until', 'uniqid', 'unpack', - 'usleep'], - 'Mongo': ['bson_decode', 'bson_encode'], - 'Msession': ['msession_connect', + 'usleep'), + 'Mongo': ('bson_decode', 'bson_encode'), + 'Msession': ('msession_connect', 'msession_count', 'msession_create', 'msession_destroy', @@ -2132,8 +2132,8 @@ MODULES = {'.NET': ['dotnet_load'], 'msession_set', 'msession_timeout', 'msession_uniq', - 'msession_unlock'], - 'Mssql': ['mssql_bind', + 'msession_unlock'), + 'Mssql': ('mssql_bind', 'mssql_close', 'mssql_connect', 'mssql_data_seek', @@ -2162,8 +2162,8 @@ MODULES = {'.NET': ['dotnet_load'], 'mssql_query', 'mssql_result', 'mssql_rows_affected', - 'mssql_select_db'], - 'Multibyte String': ['mb_check_encoding', + 'mssql_select_db'), + 'Multibyte String': ('mb_check_encoding', 'mb_convert_case', 'mb_convert_encoding', 'mb_convert_kana', @@ -2217,8 +2217,8 @@ MODULES = {'.NET': ['dotnet_load'], 'mb_strwidth', 'mb_substitute_character', 'mb_substr_count', - 'mb_substr'], - 'MySQL': ['mysql_affected_rows', + 'mb_substr'), + 'MySQL': ('mysql_affected_rows', 'mysql_client_encoding', 'mysql_close', 'mysql_connect', @@ -2265,9 +2265,9 @@ MODULES = {'.NET': ['dotnet_load'], 'mysql_stat', 'mysql_tablename', 'mysql_thread_id', - 'mysql_unbuffered_query'], - 'Mysqlnd_memcache': ['mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'], - 'Mysqlnd_ms': ['mysqlnd_ms_dump_servers', + 'mysql_unbuffered_query'), + 'Mysqlnd_memcache': ('mysqlnd_memcache_get_config', 'mysqlnd_memcache_set'), + 'Mysqlnd_ms': ('mysqlnd_ms_dump_servers', 'mysqlnd_ms_fabric_select_global', 'mysqlnd_ms_fabric_select_shard', 'mysqlnd_ms_get_last_gtid', @@ -2276,12 +2276,12 @@ MODULES = {'.NET': ['dotnet_load'], 'mysqlnd_ms_match_wild', 'mysqlnd_ms_query_is_select', 'mysqlnd_ms_set_qos', - 'mysqlnd_ms_set_user_pick_server'], - 'Mysqlnd_uh': ['mysqlnd_uh_convert_to_mysqlnd', + 'mysqlnd_ms_set_user_pick_server'), + 'Mysqlnd_uh': ('mysqlnd_uh_convert_to_mysqlnd', 'mysqlnd_uh_set_connection_proxy', - 'mysqlnd_uh_set_statement_proxy'], - 'NSAPI': ['nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'], - 'Ncurses': ['ncurses_addch', + 'mysqlnd_uh_set_statement_proxy'), + 'NSAPI': ('nsapi_request_headers', 'nsapi_response_headers', 'nsapi_virtual'), + 'Ncurses': ('ncurses_addch', 'ncurses_addchnstr', 'ncurses_addchstr', 'ncurses_addnstr', @@ -2440,8 +2440,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ncurses_wrefresh', 'ncurses_wstandend', 'ncurses_wstandout', - 'ncurses_wvline'], - 'Network': ['checkdnsrr', + 'ncurses_wvline'), + 'Network': ('checkdnsrr', 'closelog', 'define_syslog_variables', 'dns_check_record', @@ -2474,8 +2474,8 @@ MODULES = {'.NET': ['dotnet_load'], 'socket_get_status', 'socket_set_blocking', 'socket_set_timeout', - 'syslog'], - 'Newt': ['newt_bell', + 'syslog'), + 'Newt': ('newt_bell', 'newt_button_bar', 'newt_button', 'newt_centered_window', @@ -2590,9 +2590,9 @@ MODULES = {'.NET': ['dotnet_load'], 'newt_win_menu', 'newt_win_message', 'newt_win_messagev', - 'newt_win_ternary'], - 'OAuth': ['oauth_get_sbs', 'oauth_urlencode'], - 'OCI8': ['oci_bind_array_by_name', + 'newt_win_ternary'), + 'OAuth': ('oauth_get_sbs', 'oauth_urlencode'), + 'OCI8': ('oci_bind_array_by_name', 'oci_bind_by_name', 'oci_cancel', 'oci_client_version', @@ -2639,8 +2639,8 @@ MODULES = {'.NET': ['dotnet_load'], 'oci_set_edition', 'oci_set_module_name', 'oci_set_prefetch', - 'oci_statement_type'], - 'ODBC': ['odbc_autocommit', + 'oci_statement_type'), + 'ODBC': ('odbc_autocommit', 'odbc_binmode', 'odbc_close_all', 'odbc_close', @@ -2684,13 +2684,13 @@ MODULES = {'.NET': ['dotnet_load'], 'odbc_specialcolumns', 'odbc_statistics', 'odbc_tableprivileges', - 'odbc_tables'], - 'OPcache': ['opcache_compile_file', + 'odbc_tables'), + 'OPcache': ('opcache_compile_file', 'opcache_get_configuration', 'opcache_get_status', 'opcache_invalidate', - 'opcache_reset'], - 'Object Aggregation': ['aggregate_info', + 'opcache_reset'), + 'Object Aggregation': ('aggregate_info', 'aggregate_methods_by_list', 'aggregate_methods_by_regexp', 'aggregate_methods', @@ -2699,8 +2699,8 @@ MODULES = {'.NET': ['dotnet_load'], 'aggregate_properties', 'aggregate', 'aggregation_info', - 'deaggregate'], - 'OpenAL': ['openal_buffer_create', + 'deaggregate'), + 'OpenAL': ('openal_buffer_create', 'openal_buffer_data', 'openal_buffer_destroy', 'openal_buffer_get', @@ -2722,8 +2722,8 @@ MODULES = {'.NET': ['dotnet_load'], 'openal_source_rewind', 'openal_source_set', 'openal_source_stop', - 'openal_stream'], - 'OpenSSL': ['openssl_cipher_iv_length', + 'openal_stream'), + 'OpenSSL': ('openssl_cipher_iv_length', 'openssl_csr_export_to_file', 'openssl_csr_export', 'openssl_csr_get_public_key', @@ -2774,8 +2774,8 @@ MODULES = {'.NET': ['dotnet_load'], 'openssl_x509_export', 'openssl_x509_free', 'openssl_x509_parse', - 'openssl_x509_read'], - 'Output Control': ['flush', + 'openssl_x509_read'), + 'Output Control': ('flush', 'ob_clean', 'ob_end_clean', 'ob_end_flush', @@ -2791,8 +2791,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ob_list_handlers', 'ob_start', 'output_add_rewrite_var', - 'output_reset_rewrite_vars'], - 'Ovrimos SQL': ['ovrimos_close', + 'output_reset_rewrite_vars'), + 'Ovrimos SQL': ('ovrimos_close', 'ovrimos_commit', 'ovrimos_connect', 'ovrimos_cursor', @@ -2811,8 +2811,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ovrimos_prepare', 'ovrimos_result_all', 'ovrimos_result', - 'ovrimos_rollback'], - 'PCNTL': ['pcntl_alarm', + 'ovrimos_rollback'), + 'PCNTL': ('pcntl_alarm', 'pcntl_errno', 'pcntl_exec', 'pcntl_fork', @@ -2832,8 +2832,8 @@ MODULES = {'.NET': ['dotnet_load'], 'pcntl_wifsignaled', 'pcntl_wifstopped', 'pcntl_wstopsig', - 'pcntl_wtermsig'], - 'PCRE': ['preg_filter', + 'pcntl_wtermsig'), + 'PCRE': ('preg_filter', 'preg_grep', 'preg_last_error', 'preg_match_all', @@ -2841,8 +2841,8 @@ MODULES = {'.NET': ['dotnet_load'], 'preg_quote', 'preg_replace_callback', 'preg_replace', - 'preg_split'], - 'PDF': ['PDF_activate_item', + 'preg_split'), + 'PDF': ('PDF_activate_item', 'PDF_add_annotation', 'PDF_add_bookmark', 'PDF_add_launchlink', @@ -3020,8 +3020,8 @@ MODULES = {'.NET': ['dotnet_load'], 'PDF_translate', 'PDF_utf16_to_utf8', 'PDF_utf32_to_utf16', - 'PDF_utf8_to_utf16'], - 'PHP Options/Info': ['assert_options', + 'PDF_utf8_to_utf16'), + 'PHP Options/Info': ('assert_options', 'assert', 'cli_get_process_title', 'cli_set_process_title', @@ -3074,8 +3074,8 @@ MODULES = {'.NET': ['dotnet_load'], 'version_compare', 'zend_logo_guid', 'zend_thread_id', - 'zend_version'], - 'POSIX': ['posix_access', + 'zend_version'), + 'POSIX': ('posix_access', 'posix_ctermid', 'posix_errno', 'posix_get_last_error', @@ -3110,15 +3110,15 @@ MODULES = {'.NET': ['dotnet_load'], 'posix_strerror', 'posix_times', 'posix_ttyname', - 'posix_uname'], - 'POSIX Regex': ['ereg_replace', + 'posix_uname'), + 'POSIX Regex': ('ereg_replace', 'ereg', 'eregi_replace', 'eregi', 'split', 'spliti', - 'sql_regcase'], - 'PS': ['ps_add_bookmark', + 'sql_regcase'), + 'PS': ('ps_add_bookmark', 'ps_add_launchlink', 'ps_add_locallink', 'ps_add_note', @@ -3195,8 +3195,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ps_symbol_name', 'ps_symbol_width', 'ps_symbol', - 'ps_translate'], - 'Paradox': ['px_close', + 'ps_translate'), + 'Paradox': ('px_close', 'px_create_fp', 'px_date2string', 'px_delete_record', @@ -3220,15 +3220,15 @@ MODULES = {'.NET': ['dotnet_load'], 'px_set_targetencoding', 'px_set_value', 'px_timestamp2string', - 'px_update_record'], - 'Parsekit': ['parsekit_compile_file', + 'px_update_record'), + 'Parsekit': ('parsekit_compile_file', 'parsekit_compile_string', - 'parsekit_func_arginfo'], - 'Password Hashing': ['password_get_info', + 'parsekit_func_arginfo'), + 'Password Hashing': ('password_get_info', 'password_hash', 'password_needs_rehash', - 'password_verify'], - 'PostgreSQL': ['pg_affected_rows', + 'password_verify'), + 'PostgreSQL': ('pg_affected_rows', 'pg_cancel_query', 'pg_client_encoding', 'pg_close', @@ -3312,8 +3312,8 @@ MODULES = {'.NET': ['dotnet_load'], 'pg_unescape_bytea', 'pg_untrace', 'pg_update', - 'pg_version'], - 'Printer': ['printer_abort', + 'pg_version'), + 'Printer': ('printer_abort', 'printer_close', 'printer_create_brush', 'printer_create_dc', @@ -3343,9 +3343,9 @@ MODULES = {'.NET': ['dotnet_load'], 'printer_set_option', 'printer_start_doc', 'printer_start_page', - 'printer_write'], - 'Proctitle': ['setproctitle', 'setthreadtitle'], - 'Program execution': ['escapeshellarg', + 'printer_write'), + 'Proctitle': ('setproctitle', 'setthreadtitle'), + 'Program execution': ('escapeshellarg', 'escapeshellcmd', 'exec', 'passthru', @@ -3355,8 +3355,8 @@ MODULES = {'.NET': ['dotnet_load'], 'proc_open', 'proc_terminate', 'shell_exec', - 'system'], - 'Pspell': ['pspell_add_to_personal', + 'system'), + 'Pspell': ('pspell_add_to_personal', 'pspell_add_to_session', 'pspell_check', 'pspell_clear_session', @@ -3374,13 +3374,13 @@ MODULES = {'.NET': ['dotnet_load'], 'pspell_new', 'pspell_save_wordlist', 'pspell_store_replacement', - 'pspell_suggest'], - 'RPM Reader': ['rpm_close', + 'pspell_suggest'), + 'RPM Reader': ('rpm_close', 'rpm_get_tag', 'rpm_is_valid', 'rpm_open', - 'rpm_version'], - 'RRD': ['rrd_create', + 'rpm_version'), + 'RRD': ('rrd_create', 'rrd_error', 'rrd_fetch', 'rrd_first', @@ -3393,8 +3393,8 @@ MODULES = {'.NET': ['dotnet_load'], 'rrd_update', 'rrd_version', 'rrd_xport', - 'rrdc_disconnect'], - 'Radius': ['radius_acct_open', + 'rrdc_disconnect'), + 'Radius': ('radius_acct_open', 'radius_add_server', 'radius_auth_open', 'radius_close', @@ -3421,9 +3421,9 @@ MODULES = {'.NET': ['dotnet_load'], 'radius_salt_encrypt_attr', 'radius_send_request', 'radius_server_secret', - 'radius_strerror'], - 'Rar': ['rar_wrapper_cache_stats'], - 'Readline': ['readline_add_history', + 'radius_strerror'), + 'Rar': ('rar_wrapper_cache_stats',), + 'Readline': ('readline_add_history', 'readline_callback_handler_install', 'readline_callback_handler_remove', 'readline_callback_read_char', @@ -3435,9 +3435,9 @@ MODULES = {'.NET': ['dotnet_load'], 'readline_read_history', 'readline_redisplay', 'readline_write_history', - 'readline'], - 'Recode': ['recode_file', 'recode_string', 'recode'], - 'SNMP': ['snmp_get_quick_print', + 'readline'), + 'Recode': ('recode_file', 'recode_string', 'recode'), + 'SNMP': ('snmp_get_quick_print', 'snmp_get_valueretrieval', 'snmp_read_mib', 'snmp_set_enum_print', @@ -3460,9 +3460,9 @@ MODULES = {'.NET': ['dotnet_load'], 'snmprealwalk', 'snmpset', 'snmpwalk', - 'snmpwalkoid'], - 'SOAP': ['is_soap_fault', 'use_soap_error_handler'], - 'SPL': ['class_implements', + 'snmpwalkoid'), + 'SOAP': ('is_soap_fault', 'use_soap_error_handler'), + 'SPL': ('class_implements', 'class_parents', 'class_uses', 'iterator_apply', @@ -3475,9 +3475,9 @@ MODULES = {'.NET': ['dotnet_load'], 'spl_autoload_unregister', 'spl_autoload', 'spl_classes', - 'spl_object_hash'], - 'SPPLUS': ['calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'], - 'SQLSRV': ['sqlsrv_begin_transaction', + 'spl_object_hash'), + 'SPPLUS': ('calcul_hmac', 'calculhmac', 'nthmac', 'signeurlpaiement'), + 'SQLSRV': ('sqlsrv_begin_transaction', 'sqlsrv_cancel', 'sqlsrv_client_info', 'sqlsrv_close', @@ -3502,8 +3502,8 @@ MODULES = {'.NET': ['dotnet_load'], 'sqlsrv_rollback', 'sqlsrv_rows_affected', 'sqlsrv_send_stream_data', - 'sqlsrv_server_info'], - 'SQLite': ['sqlite_array_query', + 'sqlsrv_server_info'), + 'SQLite': ('sqlite_array_query', 'sqlite_busy_timeout', 'sqlite_changes', 'sqlite_close', @@ -3542,8 +3542,8 @@ MODULES = {'.NET': ['dotnet_load'], 'sqlite_udf_decode_binary', 'sqlite_udf_encode_binary', 'sqlite_unbuffered_query', - 'sqlite_valid'], - 'SSH2': ['ssh2_auth_agent', + 'sqlite_valid'), + 'SSH2': ('ssh2_auth_agent', 'ssh2_auth_hostbased_file', 'ssh2_auth_none', 'ssh2_auth_password', @@ -3571,8 +3571,8 @@ MODULES = {'.NET': ['dotnet_load'], 'ssh2_sftp_unlink', 'ssh2_sftp', 'ssh2_shell', - 'ssh2_tunnel'], - 'SVN': ['svn_add', + 'ssh2_tunnel'), + 'SVN': ('svn_add', 'svn_auth_get_parameter', 'svn_auth_set_parameter', 'svn_blame', @@ -3619,8 +3619,8 @@ MODULES = {'.NET': ['dotnet_load'], 'svn_repos_recover', 'svn_revert', 'svn_status', - 'svn_update'], - 'SWF': ['swf_actiongeturl', + 'svn_update'), + 'SWF': ('swf_actiongeturl', 'swf_actiongotoframe', 'swf_actiongotolabel', 'swf_actionnextframe', @@ -3686,8 +3686,8 @@ MODULES = {'.NET': ['dotnet_load'], 'swf_startsymbol', 'swf_textwidth', 'swf_translate', - 'swf_viewport'], - 'Semaphore': ['ftok', + 'swf_viewport'), + 'Semaphore': ('ftok', 'msg_get_queue', 'msg_queue_exists', 'msg_receive', @@ -3705,8 +3705,8 @@ MODULES = {'.NET': ['dotnet_load'], 'shm_has_var', 'shm_put_var', 'shm_remove_var', - 'shm_remove'], - 'Session': ['session_cache_expire', + 'shm_remove'), + 'Session': ('session_cache_expire', 'session_cache_limiter', 'session_commit', 'session_decode', @@ -3727,23 +3727,23 @@ MODULES = {'.NET': ['dotnet_load'], 'session_status', 'session_unregister', 'session_unset', - 'session_write_close'], - 'Session PgSQL': ['session_pgsql_add_error', + 'session_write_close'), + 'Session PgSQL': ('session_pgsql_add_error', 'session_pgsql_get_error', 'session_pgsql_get_field', 'session_pgsql_reset', 'session_pgsql_set_field', - 'session_pgsql_status'], - 'Shared Memory': ['shmop_close', + 'session_pgsql_status'), + 'Shared Memory': ('shmop_close', 'shmop_delete', 'shmop_open', 'shmop_read', 'shmop_size', - 'shmop_write'], - 'SimpleXML': ['simplexml_import_dom', + 'shmop_write'), + 'SimpleXML': ('simplexml_import_dom', 'simplexml_load_file', - 'simplexml_load_string'], - 'Socket': ['socket_accept', + 'simplexml_load_string'), + 'Socket': ('socket_accept', 'socket_bind', 'socket_clear_error', 'socket_close', @@ -3771,9 +3771,9 @@ MODULES = {'.NET': ['dotnet_load'], 'socket_set_option', 'socket_shutdown', 'socket_strerror', - 'socket_write'], - 'Solr': ['solr_get_version'], - 'Statistic': ['stats_absolute_deviation', + 'socket_write'), + 'Solr': ('solr_get_version',), + 'Statistic': ('stats_absolute_deviation', 'stats_cdf_beta', 'stats_cdf_binomial', 'stats_cdf_cauchy', @@ -3840,9 +3840,9 @@ MODULES = {'.NET': ['dotnet_load'], 'stats_stat_paired_t', 'stats_stat_percentile', 'stats_stat_powersum', - 'stats_variance'], - 'Stomp': ['stomp_connect_error', 'stomp_version'], - 'Stream': ['set_socket_blocking', + 'stats_variance'), + 'Stomp': ('stomp_connect_error', 'stomp_version'), + 'Stream': ('set_socket_blocking', 'stream_bucket_append', 'stream_bucket_make_writeable', 'stream_bucket_new', @@ -3888,8 +3888,8 @@ MODULES = {'.NET': ['dotnet_load'], 'stream_supports_lock', 'stream_wrapper_register', 'stream_wrapper_restore', - 'stream_wrapper_unregister'], - 'String': ['addcslashes', + 'stream_wrapper_unregister'), + 'String': ('addcslashes', 'addslashes', 'bin2hex', 'chop', @@ -3986,8 +3986,8 @@ MODULES = {'.NET': ['dotnet_load'], 'vfprintf', 'vprintf', 'vsprintf', - 'wordwrap'], - 'Sybase': ['sybase_affected_rows', + 'wordwrap'), + 'Sybase': ('sybase_affected_rows', 'sybase_close', 'sybase_connect', 'sybase_data_seek', @@ -4011,10 +4011,10 @@ MODULES = {'.NET': ['dotnet_load'], 'sybase_result', 'sybase_select_db', 'sybase_set_message_handler', - 'sybase_unbuffered_query'], - 'TCP': ['tcpwrap_check'], - 'Taint': ['is_tainted', 'taint', 'untaint'], - 'Tidy': ['ob_tidyhandler', + 'sybase_unbuffered_query'), + 'TCP': ('tcpwrap_check',), + 'Taint': ('is_tainted', 'taint', 'untaint'), + 'Tidy': ('ob_tidyhandler', 'tidy_access_count', 'tidy_config_count', 'tidy_error_count', @@ -4024,9 +4024,9 @@ MODULES = {'.NET': ['dotnet_load'], 'tidy_save_config', 'tidy_set_encoding', 'tidy_setopt', - 'tidy_warning_count'], - 'Tokenizer': ['token_get_all', 'token_name'], - 'Trader': ['trader_acos', + 'tidy_warning_count'), + 'Tokenizer': ('token_get_all', 'token_name'), + 'Trader': ('trader_acos', 'trader_ad', 'trader_add', 'trader_adosc', @@ -4188,8 +4188,8 @@ MODULES = {'.NET': ['dotnet_load'], 'trader_var', 'trader_wclprice', 'trader_willr', - 'trader_wma'], - 'URL': ['base64_decode', + 'trader_wma'), + 'URL': ('base64_decode', 'base64_encode', 'get_headers', 'get_meta_tags', @@ -4198,8 +4198,8 @@ MODULES = {'.NET': ['dotnet_load'], 'rawurldecode', 'rawurlencode', 'urldecode', - 'urlencode'], - 'Uopz': ['uopz_backup', + 'urlencode'), + 'Uopz': ('uopz_backup', 'uopz_compose', 'uopz_copy', 'uopz_delete', @@ -4211,8 +4211,8 @@ MODULES = {'.NET': ['dotnet_load'], 'uopz_redefine', 'uopz_rename', 'uopz_restore', - 'uopz_undefine'], - 'Variable handling': ['boolval', + 'uopz_undefine'), + 'Variable handling': ('boolval', 'debug_zval_dump', 'doubleval', 'empty', @@ -4245,19 +4245,19 @@ MODULES = {'.NET': ['dotnet_load'], 'unserialize', 'unset', 'var_dump', - 'var_export'], - 'W32api': ['w32api_deftype', + 'var_export'), + 'W32api': ('w32api_deftype', 'w32api_init_dtype', 'w32api_invoke_function', 'w32api_register_function', - 'w32api_set_call_method'], - 'WDDX': ['wddx_add_vars', + 'w32api_set_call_method'), + 'WDDX': ('wddx_add_vars', 'wddx_deserialize', 'wddx_packet_end', 'wddx_packet_start', 'wddx_serialize_value', - 'wddx_serialize_vars'], - 'WinCache': ['wincache_fcache_fileinfo', + 'wddx_serialize_vars'), + 'WinCache': ('wincache_fcache_fileinfo', 'wincache_fcache_meminfo', 'wincache_lock', 'wincache_ocache_fileinfo', @@ -4278,8 +4278,8 @@ MODULES = {'.NET': ['dotnet_load'], 'wincache_ucache_info', 'wincache_ucache_meminfo', 'wincache_ucache_set', - 'wincache_unlock'], - 'XML Parser': ['utf8_decode', + 'wincache_unlock'), + 'XML Parser': ('utf8_decode', 'utf8_encode', 'xml_error_string', 'xml_get_current_byte_index', @@ -4302,8 +4302,8 @@ MODULES = {'.NET': ['dotnet_load'], 'xml_set_object', 'xml_set_processing_instruction_handler', 'xml_set_start_namespace_decl_handler', - 'xml_set_unparsed_entity_decl_handler'], - 'XML-RPC': ['xmlrpc_decode_request', + 'xml_set_unparsed_entity_decl_handler'), + 'XML-RPC': ('xmlrpc_decode_request', 'xmlrpc_decode', 'xmlrpc_encode_request', 'xmlrpc_encode', @@ -4316,8 +4316,8 @@ MODULES = {'.NET': ['dotnet_load'], 'xmlrpc_server_destroy', 'xmlrpc_server_register_introspection_callback', 'xmlrpc_server_register_method', - 'xmlrpc_set_type'], - 'XSLT (PHP 4)': ['xslt_backend_info', + 'xmlrpc_set_type'), + 'XSLT (PHP 4)': ('xslt_backend_info', 'xslt_backend_name', 'xslt_backend_version', 'xslt_create', @@ -4335,12 +4335,12 @@ MODULES = {'.NET': ['dotnet_load'], 'xslt_set_sax_handlers', 'xslt_set_scheme_handler', 'xslt_set_scheme_handlers', - 'xslt_setopt'], - 'Xhprof': ['xhprof_disable', + 'xslt_setopt'), + 'Xhprof': ('xhprof_disable', 'xhprof_enable', 'xhprof_sample_disable', - 'xhprof_sample_enable'], - 'YAZ': ['yaz_addinfo', + 'xhprof_sample_enable'), + 'YAZ': ('yaz_addinfo', 'yaz_ccl_conf', 'yaz_ccl_parse', 'yaz_close', @@ -4364,8 +4364,8 @@ MODULES = {'.NET': ['dotnet_load'], 'yaz_set_option', 'yaz_sort', 'yaz_syntax', - 'yaz_wait'], - 'YP/NIS': ['yp_all', + 'yaz_wait'), + 'YP/NIS': ('yp_all', 'yp_cat', 'yp_err_string', 'yp_errno', @@ -4374,13 +4374,13 @@ MODULES = {'.NET': ['dotnet_load'], 'yp_master', 'yp_match', 'yp_next', - 'yp_order'], - 'Yaml': ['yaml_emit_file', + 'yp_order'), + 'Yaml': ('yaml_emit_file', 'yaml_emit', 'yaml_parse_file', 'yaml_parse_url', - 'yaml_parse'], - 'Zip': ['zip_close', + 'yaml_parse'), + 'Zip': ('zip_close', 'zip_entry_close', 'zip_entry_compressedsize', 'zip_entry_compressionmethod', @@ -4389,8 +4389,8 @@ MODULES = {'.NET': ['dotnet_load'], 'zip_entry_open', 'zip_entry_read', 'zip_open', - 'zip_read'], - 'Zlib': ['gzclose', + 'zip_read'), + 'Zlib': ('gzclose', 'gzcompress', 'gzdecode', 'gzdeflate', @@ -4413,8 +4413,8 @@ MODULES = {'.NET': ['dotnet_load'], 'readgzfile', 'zlib_decode', 'zlib_encode', - 'zlib_get_coding_type'], - 'bcompiler': ['bcompiler_load_exe', + 'zlib_get_coding_type'), + 'bcompiler': ('bcompiler_load_exe', 'bcompiler_load', 'bcompiler_parse_class', 'bcompiler_read', @@ -4426,8 +4426,8 @@ MODULES = {'.NET': ['dotnet_load'], 'bcompiler_write_function', 'bcompiler_write_functions_from_file', 'bcompiler_write_header', - 'bcompiler_write_included_filename'], - 'cURL': ['curl_close', + 'bcompiler_write_included_filename'), + 'cURL': ('curl_close', 'curl_copy_handle', 'curl_errno', 'curl_error', @@ -4455,9 +4455,9 @@ MODULES = {'.NET': ['dotnet_load'], 'curl_share_setopt', 'curl_strerror', 'curl_unescape', - 'curl_version'], - 'chdb': ['chdb_create'], - 'dBase': ['dbase_add_record', + 'curl_version'), + 'chdb': ('chdb_create',), + 'dBase': ('dbase_add_record', 'dbase_close', 'dbase_create', 'dbase_delete_record', @@ -4468,23 +4468,23 @@ MODULES = {'.NET': ['dotnet_load'], 'dbase_numrecords', 'dbase_open', 'dbase_pack', - 'dbase_replace_record'], - 'dbx': ['dbx_close', + 'dbase_replace_record'), + 'dbx': ('dbx_close', 'dbx_compare', 'dbx_connect', 'dbx_error', 'dbx_escape_string', 'dbx_fetch_row', 'dbx_query', - 'dbx_sort'], - 'filePro': ['filepro_fieldcount', + 'dbx_sort'), + 'filePro': ('filepro_fieldcount', 'filepro_fieldname', 'filepro_fieldtype', 'filepro_fieldwidth', 'filepro_retrieve', 'filepro_rowcount', - 'filepro'], - 'iconv': ['iconv_get_encoding', + 'filepro'), + 'iconv': ('iconv_get_encoding', 'iconv_mime_decode_headers', 'iconv_mime_decode', 'iconv_mime_encode', @@ -4494,20 +4494,20 @@ MODULES = {'.NET': ['dotnet_load'], 'iconv_strrpos', 'iconv_substr', 'iconv', - 'ob_iconv_handler'], - 'inclued': ['inclued_get_data'], - 'intl': ['intl_error_name', + 'ob_iconv_handler'), + 'inclued': ('inclued_get_data',), + 'intl': ('intl_error_name', 'intl_get_error_code', 'intl_get_error_message', - 'intl_is_failure'], - 'libxml': ['libxml_clear_errors', + 'intl_is_failure'), + 'libxml': ('libxml_clear_errors', 'libxml_disable_entity_loader', 'libxml_get_errors', 'libxml_get_last_error', 'libxml_set_external_entity_loader', 'libxml_set_streams_context', - 'libxml_use_internal_errors'], - 'mSQL': ['msql_affected_rows', + 'libxml_use_internal_errors'), + 'mSQL': ('msql_affected_rows', 'msql_close', 'msql_connect', 'msql_create_db', @@ -4546,8 +4546,8 @@ MODULES = {'.NET': ['dotnet_load'], 'msql_result', 'msql_select_db', 'msql_tablename', - 'msql'], - 'mnoGoSearch': ['udm_add_search_limit', + 'msql'), + 'mnoGoSearch': ('udm_add_search_limit', 'udm_alloc_agent_array', 'udm_alloc_agent', 'udm_api_version', @@ -4570,8 +4570,8 @@ MODULES = {'.NET': ['dotnet_load'], 'udm_hash32', 'udm_load_ispell_data', 'udm_open_stored', - 'udm_set_agent_param'], - 'mqseries': ['mqseries_back', + 'udm_set_agent_param'), + 'mqseries': ('mqseries_back', 'mqseries_begin', 'mqseries_close', 'mqseries_cmit', @@ -4584,8 +4584,8 @@ MODULES = {'.NET': ['dotnet_load'], 'mqseries_put1', 'mqseries_put', 'mqseries_set', - 'mqseries_strerror'], - 'mysqlnd_qc': ['mysqlnd_qc_clear_cache', + 'mqseries_strerror'), + 'mysqlnd_qc': ('mysqlnd_qc_clear_cache', 'mysqlnd_qc_get_available_handlers', 'mysqlnd_qc_get_cache_info', 'mysqlnd_qc_get_core_stats', @@ -4594,9 +4594,9 @@ MODULES = {'.NET': ['dotnet_load'], 'mysqlnd_qc_set_cache_condition', 'mysqlnd_qc_set_is_select', 'mysqlnd_qc_set_storage_handler', - 'mysqlnd_qc_set_user_handlers'], - 'qtdom': ['qdom_error', 'qdom_tree'], - 'runkit': ['runkit_class_adopt', + 'mysqlnd_qc_set_user_handlers'), + 'qtdom': ('qdom_error', 'qdom_tree'), + 'runkit': ('runkit_class_adopt', 'runkit_class_emancipate', 'runkit_constant_add', 'runkit_constant_redefine', @@ -4616,11 +4616,11 @@ MODULES = {'.NET': ['dotnet_load'], 'runkit_method_rename', 'runkit_return_value_used', 'runkit_sandbox_output_handler', - 'runkit_superglobals'], - 'ssdeep': ['ssdeep_fuzzy_compare', + 'runkit_superglobals'), + 'ssdeep': ('ssdeep_fuzzy_compare', 'ssdeep_fuzzy_hash_filename', - 'ssdeep_fuzzy_hash'], - 'vpopmail': ['vpopmail_add_alias_domain_ex', + 'ssdeep_fuzzy_hash'), + 'vpopmail': ('vpopmail_add_alias_domain_ex', 'vpopmail_add_alias_domain', 'vpopmail_add_domain_ex', 'vpopmail_add_domain', @@ -4636,9 +4636,9 @@ MODULES = {'.NET': ['dotnet_load'], 'vpopmail_del_user', 'vpopmail_error', 'vpopmail_passwd', - 'vpopmail_set_user_quota'], - 'win32ps': ['win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'], - 'win32service': ['win32_continue_service', + 'vpopmail_set_user_quota'), + 'win32ps': ('win32_ps_list_procs', 'win32_ps_stat_mem', 'win32_ps_stat_proc'), + 'win32service': ('win32_continue_service', 'win32_create_service', 'win32_delete_service', 'win32_get_last_control_message', @@ -4647,13 +4647,13 @@ MODULES = {'.NET': ['dotnet_load'], 'win32_set_service_status', 'win32_start_service_ctrl_dispatcher', 'win32_start_service', - 'win32_stop_service'], - 'xattr': ['xattr_get', + 'win32_stop_service'), + 'xattr': ('xattr_get', 'xattr_list', 'xattr_remove', 'xattr_set', - 'xattr_supported'], - 'xdiff': ['xdiff_file_bdiff_size', + 'xattr_supported'), + 'xdiff': ('xdiff_file_bdiff_size', 'xdiff_file_bdiff', 'xdiff_file_bpatch', 'xdiff_file_diff_binary', @@ -4670,7 +4670,7 @@ MODULES = {'.NET': ['dotnet_load'], 'xdiff_string_merge3', 'xdiff_string_patch_binary', 'xdiff_string_patch', - 'xdiff_string_rabdiff']} + 'xdiff_string_rabdiff')} if __name__ == '__main__': import glob diff --git a/pygments/lexers/_postgres_builtins.py b/pygments/lexers/_postgres_builtins.py index 11dc6dec..a11dd6d3 100644 --- a/pygments/lexers/_postgres_builtins.py +++ b/pygments/lexers/_postgres_builtins.py @@ -136,7 +136,7 @@ def update_consts(filename, constname, content): # Autogenerated: please edit them if you like wasting your time. -KEYWORDS = [ +KEYWORDS = ( 'ABORT', 'ABSOLUTE', 'ACCESS', 'ACTION', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE', 'ALL', 'ALSO', 'ALTER', 'ALWAYS', 'ANALYSE', 'ANALYZE', 'AND', 'ANY', 'ARRAY', 'AS', 'ASC', 'ASSERTION', 'ASSIGNMENT', @@ -199,9 +199,9 @@ KEYWORDS = [ 'XML', 'XMLATTRIBUTES', 'XMLCONCAT', 'XMLELEMENT', 'XMLEXISTS', 'XMLFOREST', 'XMLPARSE', 'XMLPI', 'XMLROOT', 'XMLSERIALIZE', 'YEAR', 'YES', 'ZONE', - ] +) -DATATYPES = [ +DATATYPES = ( 'bigint', 'bigserial', 'bit', 'bit varying', 'bool', 'boolean', 'box', 'bytea', 'char', 'character', 'character varying', 'cidr', 'circle', 'date', 'decimal', 'double precision', 'float4', 'float8', 'inet', @@ -211,23 +211,22 @@ DATATYPES = [ 'smallserial', 'text', 'time', 'timestamp', 'timestamptz', 'timetz', 'tsquery', 'tsvector', 'txid_snapshot', 'uuid', 'varbit', 'varchar', 'with time zone', 'without time zone', 'xml', - ] +) -PSEUDO_TYPES = [ +PSEUDO_TYPES = ( 'any', 'anyelement', 'anyarray', 'anynonarray', 'anyenum', 'anyrange', 'cstring', 'internal', 'language_handler', 'fdw_handler', 'record', 'trigger', 'void', 'opaque', - ] +) # Remove 'trigger' from types -PSEUDO_TYPES = sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS))) +PSEUDO_TYPES = tuple(sorted(set(PSEUDO_TYPES) - set(map(str.lower, KEYWORDS)))) -PLPGSQL_KEYWORDS = [ +PLPGSQL_KEYWORDS = ( 'ALIAS', 'CONSTANT', 'DIAGNOSTICS', 'ELSIF', 'EXCEPTION', 'EXIT', 'FOREACH', 'GET', 'LOOP', 'NOTICE', 'OPEN', 'PERFORM', 'QUERY', 'RAISE', 'RETURN', 'REVERSE', 'SQLSTATE', 'WHILE', - ] +) if __name__ == '__main__': update_myself() - diff --git a/pygments/lexers/_scilab_builtins.py b/pygments/lexers/_scilab_builtins.py index 7b27daab..80556352 100644 --- a/pygments/lexers/_scilab_builtins.py +++ b/pygments/lexers/_scilab_builtins.py @@ -31,10 +31,10 @@ # # Then replace "$" by "\\$" manually. -functions_kw = ["%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect"] +functions_kw = ("%XMLAttr_6","%XMLAttr_e","%XMLAttr_i_XMLElem","%XMLAttr_length","%XMLAttr_p","%XMLAttr_size","%XMLDoc_6","%XMLDoc_e","%XMLDoc_i_XMLList","%XMLDoc_p","%XMLElem_6","%XMLElem_e","%XMLElem_i_XMLDoc","%XMLElem_i_XMLElem","%XMLElem_i_XMLList","%XMLElem_p","%XMLList_6","%XMLList_e","%XMLList_i_XMLElem","%XMLList_i_XMLList","%XMLList_length","%XMLList_p","%XMLList_size","%XMLNs_6","%XMLNs_e","%XMLNs_i_XMLElem","%XMLNs_p","%XMLSet_6","%XMLSet_e","%XMLSet_length","%XMLSet_p","%XMLSet_size","%XMLValid_p","%b_i_XMLList","%c_i_XMLAttr","%c_i_XMLDoc","%c_i_XMLElem","%c_i_XMLList","%ce_i_XMLList","%fptr_i_XMLList","%h_i_XMLList","%hm_i_XMLList","%i_abs","%i_cumprod","%i_cumsum","%i_diag","%i_i_XMLList","%i_matrix","%i_max","%i_maxi","%i_min","%i_mini","%i_mput","%i_p","%i_prod","%i_sum","%i_tril","%i_triu","%ip_i_XMLList","%l_i_XMLList","%lss_i_XMLList","%mc_i_XMLList","%msp_full","%msp_i_XMLList","%msp_spget","%p_i_XMLList","%ptr_i_XMLList","%r_i_XMLList","%s_i_XMLList","%sp_i_XMLList","%spb_i_XMLList","%st_i_XMLList","Calendar","ClipBoard","Matplot","Matplot1","PlaySound","TCL_DeleteInterp","TCL_DoOneEvent","TCL_EvalFile","TCL_EvalStr","TCL_ExistArray","TCL_ExistInterp","TCL_ExistVar","TCL_GetVar","TCL_GetVersion","TCL_SetVar","TCL_UnsetVar","TCL_UpVar","_","_code2str","_str2code","about","abs","acos","addcb","addf","addhistory","addinter","amell","and","argn","arl2_ius","ascii","asin","atan","backslash","balanc","banner","base2dec","basename","bdiag","beep","besselh","besseli","besselj","besselk","bessely","beta","bezout","bfinit","blkfc1i","blkslvi","bool2s","browsehistory","browsevar","bsplin3val","buildDocv2","buildouttb","bvode","c_link","calerf","call","callblk","captions","cd","cdfbet","cdfbin","cdfchi","cdfchn","cdff","cdffnc","cdfgam","cdfnbn","cdfnor","cdfpoi","cdft","ceil","champ","champ1","chdir","chol","clc","clean","clear","clear_pixmap","clearfun","clearglobal","closeEditor","closeXcos","code2str","coeff","comp","completion","conj","contour2di","contr","conv2","convstr","copy","copyfile","corr","cos","coserror","createdir","cshep2d","ctree2","ctree3","ctree4","cumprod","cumsum","curblock","curblockc","dasrt","dassl","data2sig","debug","dec2base","deff","definedfields","degree","delbpt","delete","deletefile","delip","delmenu","det","dgettext","dhinf","diag","diary","diffobjs","disp","dispbpt","displayhistory","disposefftwlibrary","dlgamma","dnaupd","dneupd","double","draw","drawaxis","drawlater","drawnow","dsaupd","dsearch","dseupd","duplicate","editor","editvar","emptystr","end_scicosim","ereduc","errcatch","errclear","error","eval_cshep2d","exec","execstr","exists","exit","exp","expm","exportUI","export_to_hdf5","eye","fadj2sp","fec","feval","fft","fftw","fftw_flags","fftw_forget_wisdom","fftwlibraryisloaded","file","filebrowser","fileext","fileinfo","fileparts","filesep","find","findBD","findfiles","floor","format","fort","fprintfMat","freq","frexp","fromc","fromjava","fscanfMat","fsolve","fstair","full","fullpath","funcprot","funptr","gamma","gammaln","geom3d","get","get_absolute_file_path","get_fftw_wisdom","getblocklabel","getcallbackobject","getdate","getdebuginfo","getdefaultlanguage","getdrives","getdynlibext","getenv","getfield","gethistory","gethistoryfile","getinstalledlookandfeels","getio","getlanguage","getlongpathname","getlookandfeel","getmd5","getmemory","getmodules","getos","getpid","getrelativefilename","getscicosvars","getscilabmode","getshortpathname","gettext","getvariablesonstack","getversion","glist","global","glue","grand","grayplot","grep","gsort","gstacksize","havewindow","helpbrowser","hess","hinf","historymanager","historysize","host","iconvert","iconvert","ieee","ilib_verbose","imag","impl","import_from_hdf5","imult","inpnvi","int","int16","int2d","int32","int3d","int8","interp","interp2d","interp3d","intg","intppty","inttype","inv","is_handle_valid","isalphanum","isascii","isdef","isdigit","isdir","isequal","isequalbitwise","iserror","isfile","isglobal","isletter","isreal","iswaitingforinput","javaclasspath","javalibrarypath","kron","lasterror","ldiv","ldivf","legendre","length","lib","librarieslist","libraryinfo","linear_interpn","lines","link","linmeq","list","load","loadScicos","loadfftwlibrary","loadhistory","log","log1p","lsq","lsq_splin","lsqrsolve","lsslist","lstcat","lstsize","ltitr","lu","ludel","lufact","luget","lusolve","macr2lst","macr2tree","matfile_close","matfile_listvar","matfile_open","matfile_varreadnext","matfile_varwrite","matrix","max","maxfiles","mclearerr","mclose","meof","merror","messagebox","mfprintf","mfscanf","mget","mgeti","mgetl","mgetstr","min","mlist","mode","model2blk","mopen","move","movefile","mprintf","mput","mputl","mputstr","mscanf","mseek","msprintf","msscanf","mtell","mtlb_mode","mtlb_sparse","mucomp","mulf","nearfloat","newaxes","newest","newfun","nnz","notify","number_properties","ode","odedc","ones","opentk","optim","or","ordmmd","parallel_concurrency","parallel_run","param3d","param3d1","part","pathconvert","pathsep","phase_simulation","plot2d","plot2d1","plot2d2","plot2d3","plot2d4","plot3d","plot3d1","pointer_xproperty","poly","ppol","pppdiv","predef","print","printf","printfigure","printsetupbox","prod","progressionbar","prompt","pwd","qld","qp_solve","qr","raise_window","rand","rankqr","rat","rcond","rdivf","read","read4b","readb","readgateway","readmps","real","realtime","realtimeinit","regexp","relocate_handle","remez","removedir","removelinehistory","res_with_prec","resethistory","residu","resume","return","ricc","ricc_old","rlist","roots","rotate_axes","round","rpem","rtitr","rubberbox","save","saveafterncommands","saveconsecutivecommands","savehistory","schur","sci_haltscicos","sci_tree2","sci_tree3","sci_tree4","sciargs","scicos_debug","scicos_debug_count","scicos_time","scicosim","scinotes","sctree","semidef","set","set_blockerror","set_fftw_wisdom","set_xproperty","setbpt","setdefaultlanguage","setenv","setfield","sethistoryfile","setlanguage","setlookandfeel","setmenu","sfact","sfinit","show_pixmap","show_window","showalluimenushandles","sident","sig2data","sign","simp","simp_mode","sin","size","slash","sleep","sorder","sparse","spchol","spcompack","spec","spget","splin","splin2d","splin3d","spones","sprintf","sqrt","stacksize","str2code","strcat","strchr","strcmp","strcspn","strindex","string","stringbox","stripblanks","strncpy","strrchr","strrev","strsplit","strspn","strstr","strsubst","strtod","strtok","subf","sum","svd","swap_handles","symfcti","syredi","system_getproperty","system_setproperty","ta2lpd","tan","taucs_chdel","taucs_chfact","taucs_chget","taucs_chinfo","taucs_chsolve","tempname","testmatrix","timer","tlist","tohome","tokens","toolbar","toprint","tr_zer","tril","triu","type","typename","uiDisplayTree","uicontextmenu","uicontrol","uigetcolor","uigetdir","uigetfile","uigetfont","uimenu","uint16","uint32","uint8","uipopup","uiputfile","uiwait","ulink","umf_ludel","umf_lufact","umf_luget","umf_luinfo","umf_lusolve","umfpack","unglue","unix","unsetmenu","unzoom","updatebrowsevar","usecanvas","user","var2vec","varn","vec2var","waitbar","warnBlockByUID","warning","what","where","whereis","who","winsid","with_embedded_jre","with_module","writb","write","write4b","x_choose","x_choose_modeless","x_dialog","x_mdialog","xarc","xarcs","xarrows","xchange","xchoicesi","xclick","xcos","xcosAddToolsMenu","xcosConfigureXmlFile","xcosDiagramToScilab","xcosPalCategoryAdd","xcosPalDelete","xcosPalDisable","xcosPalEnable","xcosPalGenerateIcon","xcosPalLoad","xcosPalMove","xcosUpdateBlock","xdel","xfarc","xfarcs","xfpoly","xfpolys","xfrect","xget","xgetech","xgetmouse","xgraduate","xgrid","xlfont","xls_open","xls_read","xmlAddNs","xmlAsNumber","xmlAsText","xmlDTD","xmlDelete","xmlDocument","xmlDump","xmlElement","xmlFormat","xmlGetNsByHref","xmlGetNsByPrefix","xmlGetOpenDocs","xmlIsValidObject","xmlNs","xmlRead","xmlReadStr","xmlRelaxNG","xmlRemove","xmlSchema","xmlSetAttributes","xmlValidate","xmlWrite","xmlXPath","xname","xpause","xpoly","xpolys","xrect","xrects","xs2bmp","xs2eps","xs2gif","xs2jpg","xs2pdf","xs2png","xs2ppm","xs2ps","xs2svg","xsegs","xset","xsetech","xstring","xstringb","xtitle","zeros","znaupd","zneupd","zoom_rect") -commands_kw = ["abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who"] +commands_kw = ("abort","apropos","break","case","catch","clc","clear","continue","do","else","elseif","end","endfunction","exit","for","function","help","if","pause","pwd","quit","resume","return","select","then","try","what","while","who") -macros_kw = ["%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell"] +macros_kw = ("%0_i_st","%3d_i_h","%Block_xcosUpdateBlock","%TNELDER_p","%TNELDER_string","%TNMPLOT_p","%TNMPLOT_string","%TOPTIM_p","%TOPTIM_string","%TSIMPLEX_p","%TSIMPLEX_string","%_gsort","%_strsplit","%ar_p","%asn","%b_a_b","%b_a_s","%b_c_s","%b_c_spb","%b_cumprod","%b_cumsum","%b_d_s","%b_diag","%b_e","%b_f_s","%b_f_spb","%b_g_s","%b_g_spb","%b_h_s","%b_h_spb","%b_i_b","%b_i_ce","%b_i_h","%b_i_hm","%b_i_s","%b_i_sp","%b_i_spb","%b_i_st","%b_iconvert","%b_l_b","%b_l_s","%b_m_b","%b_m_s","%b_matrix","%b_n_hm","%b_o_hm","%b_p_s","%b_prod","%b_r_b","%b_r_s","%b_s_b","%b_s_s","%b_string","%b_sum","%b_tril","%b_triu","%b_x_b","%b_x_s","%c_a_c","%c_b_c","%c_b_s","%c_diag","%c_e","%c_eye","%c_f_s","%c_i_c","%c_i_ce","%c_i_h","%c_i_hm","%c_i_lss","%c_i_r","%c_i_s","%c_i_st","%c_matrix","%c_n_l","%c_n_st","%c_o_l","%c_o_st","%c_ones","%c_rand","%c_tril","%c_triu","%cblock_c_cblock","%cblock_c_s","%cblock_e","%cblock_f_cblock","%cblock_p","%cblock_size","%ce_6","%ce_c_ce","%ce_e","%ce_f_ce","%ce_i_ce","%ce_i_s","%ce_i_st","%ce_matrix","%ce_p","%ce_size","%ce_string","%ce_t","%champdat_i_h","%choose","%diagram_xcos","%dir_p","%fptr_i_st","%grayplot_i_h","%h_i_st","%hm_1_hm","%hm_1_s","%hm_2_hm","%hm_2_s","%hm_3_hm","%hm_3_s","%hm_4_hm","%hm_4_s","%hm_5","%hm_a_hm","%hm_a_r","%hm_a_s","%hm_abs","%hm_and","%hm_bool2s","%hm_c_hm","%hm_ceil","%hm_conj","%hm_cos","%hm_cumprod","%hm_cumsum","%hm_d_hm","%hm_d_s","%hm_degree","%hm_e","%hm_exp","%hm_f_hm","%hm_fft","%hm_find","%hm_floor","%hm_g_hm","%hm_h_hm","%hm_i_b","%hm_i_ce","%hm_i_hm","%hm_i_i","%hm_i_p","%hm_i_r","%hm_i_s","%hm_i_st","%hm_iconvert","%hm_imag","%hm_int","%hm_isnan","%hm_isreal","%hm_j_hm","%hm_j_s","%hm_k_hm","%hm_k_s","%hm_log","%hm_m_p","%hm_m_r","%hm_m_s","%hm_matrix","%hm_maxi","%hm_mean","%hm_median","%hm_mini","%hm_n_b","%hm_n_c","%hm_n_hm","%hm_n_i","%hm_n_p","%hm_n_s","%hm_o_b","%hm_o_c","%hm_o_hm","%hm_o_i","%hm_o_p","%hm_o_s","%hm_ones","%hm_or","%hm_p","%hm_prod","%hm_q_hm","%hm_r_s","%hm_rand","%hm_real","%hm_round","%hm_s","%hm_s_hm","%hm_s_r","%hm_s_s","%hm_sign","%hm_sin","%hm_size","%hm_sqrt","%hm_st_deviation","%hm_string","%hm_sum","%hm_x_hm","%hm_x_p","%hm_x_s","%hm_zeros","%i_1_s","%i_2_s","%i_3_s","%i_4_s","%i_Matplot","%i_a_i","%i_a_s","%i_and","%i_ascii","%i_b_s","%i_bezout","%i_champ","%i_champ1","%i_contour","%i_contour2d","%i_d_i","%i_d_s","%i_e","%i_fft","%i_g_i","%i_gcd","%i_h_i","%i_i_ce","%i_i_h","%i_i_hm","%i_i_i","%i_i_s","%i_i_st","%i_j_i","%i_j_s","%i_l_s","%i_lcm","%i_length","%i_m_i","%i_m_s","%i_mfprintf","%i_mprintf","%i_msprintf","%i_n_s","%i_o_s","%i_or","%i_p_i","%i_p_s","%i_plot2d","%i_plot2d1","%i_plot2d2","%i_q_s","%i_r_i","%i_r_s","%i_round","%i_s_i","%i_s_s","%i_sign","%i_string","%i_x_i","%i_x_s","%ip_a_s","%ip_i_st","%ip_m_s","%ip_n_ip","%ip_o_ip","%ip_p","%ip_s_s","%ip_string","%k","%l_i_h","%l_i_s","%l_i_st","%l_isequal","%l_n_c","%l_n_l","%l_n_m","%l_n_p","%l_n_s","%l_n_st","%l_o_c","%l_o_l","%l_o_m","%l_o_p","%l_o_s","%l_o_st","%lss_a_lss","%lss_a_p","%lss_a_r","%lss_a_s","%lss_c_lss","%lss_c_p","%lss_c_r","%lss_c_s","%lss_e","%lss_eye","%lss_f_lss","%lss_f_p","%lss_f_r","%lss_f_s","%lss_i_ce","%lss_i_lss","%lss_i_p","%lss_i_r","%lss_i_s","%lss_i_st","%lss_inv","%lss_l_lss","%lss_l_p","%lss_l_r","%lss_l_s","%lss_m_lss","%lss_m_p","%lss_m_r","%lss_m_s","%lss_n_lss","%lss_n_p","%lss_n_r","%lss_n_s","%lss_norm","%lss_o_lss","%lss_o_p","%lss_o_r","%lss_o_s","%lss_ones","%lss_r_lss","%lss_r_p","%lss_r_r","%lss_r_s","%lss_rand","%lss_s","%lss_s_lss","%lss_s_p","%lss_s_r","%lss_s_s","%lss_size","%lss_t","%lss_v_lss","%lss_v_p","%lss_v_r","%lss_v_s","%lt_i_s","%m_n_l","%m_o_l","%mc_i_h","%mc_i_s","%mc_i_st","%mc_n_st","%mc_o_st","%mc_string","%mps_p","%mps_string","%msp_a_s","%msp_abs","%msp_e","%msp_find","%msp_i_s","%msp_i_st","%msp_length","%msp_m_s","%msp_maxi","%msp_n_msp","%msp_nnz","%msp_o_msp","%msp_p","%msp_sparse","%msp_spones","%msp_t","%p_a_lss","%p_a_r","%p_c_lss","%p_c_r","%p_cumprod","%p_cumsum","%p_d_p","%p_d_r","%p_d_s","%p_det","%p_e","%p_f_lss","%p_f_r","%p_i_ce","%p_i_h","%p_i_hm","%p_i_lss","%p_i_p","%p_i_r","%p_i_s","%p_i_st","%p_inv","%p_j_s","%p_k_p","%p_k_r","%p_k_s","%p_l_lss","%p_l_p","%p_l_r","%p_l_s","%p_m_hm","%p_m_lss","%p_m_r","%p_matrix","%p_n_l","%p_n_lss","%p_n_r","%p_o_l","%p_o_lss","%p_o_r","%p_o_sp","%p_p_s","%p_prod","%p_q_p","%p_q_r","%p_q_s","%p_r_lss","%p_r_p","%p_r_r","%p_r_s","%p_s_lss","%p_s_r","%p_simp","%p_string","%p_sum","%p_v_lss","%p_v_p","%p_v_r","%p_v_s","%p_x_hm","%p_x_r","%p_y_p","%p_y_r","%p_y_s","%p_z_p","%p_z_r","%p_z_s","%r_a_hm","%r_a_lss","%r_a_p","%r_a_r","%r_a_s","%r_c_lss","%r_c_p","%r_c_r","%r_c_s","%r_clean","%r_cumprod","%r_d_p","%r_d_r","%r_d_s","%r_det","%r_diag","%r_e","%r_eye","%r_f_lss","%r_f_p","%r_f_r","%r_f_s","%r_i_ce","%r_i_hm","%r_i_lss","%r_i_p","%r_i_r","%r_i_s","%r_i_st","%r_inv","%r_j_s","%r_k_p","%r_k_r","%r_k_s","%r_l_lss","%r_l_p","%r_l_r","%r_l_s","%r_m_hm","%r_m_lss","%r_m_p","%r_m_r","%r_m_s","%r_matrix","%r_n_lss","%r_n_p","%r_n_r","%r_n_s","%r_norm","%r_o_lss","%r_o_p","%r_o_r","%r_o_s","%r_ones","%r_p","%r_p_s","%r_prod","%r_q_p","%r_q_r","%r_q_s","%r_r_lss","%r_r_p","%r_r_r","%r_r_s","%r_rand","%r_s","%r_s_hm","%r_s_lss","%r_s_p","%r_s_r","%r_s_s","%r_simp","%r_size","%r_string","%r_sum","%r_t","%r_tril","%r_triu","%r_v_lss","%r_v_p","%r_v_r","%r_v_s","%r_x_p","%r_x_r","%r_x_s","%r_y_p","%r_y_r","%r_y_s","%r_z_p","%r_z_r","%r_z_s","%s_1_hm","%s_1_i","%s_2_hm","%s_2_i","%s_3_hm","%s_3_i","%s_4_hm","%s_4_i","%s_5","%s_a_b","%s_a_hm","%s_a_i","%s_a_ip","%s_a_lss","%s_a_msp","%s_a_r","%s_a_sp","%s_and","%s_b_i","%s_b_s","%s_c_b","%s_c_cblock","%s_c_lss","%s_c_r","%s_c_sp","%s_d_b","%s_d_i","%s_d_p","%s_d_r","%s_d_sp","%s_e","%s_f_b","%s_f_cblock","%s_f_lss","%s_f_r","%s_f_sp","%s_g_b","%s_g_s","%s_h_b","%s_h_s","%s_i_b","%s_i_c","%s_i_ce","%s_i_h","%s_i_hm","%s_i_i","%s_i_lss","%s_i_p","%s_i_r","%s_i_s","%s_i_sp","%s_i_spb","%s_i_st","%s_j_i","%s_k_hm","%s_k_p","%s_k_r","%s_k_sp","%s_l_b","%s_l_hm","%s_l_i","%s_l_lss","%s_l_p","%s_l_r","%s_l_s","%s_l_sp","%s_m_b","%s_m_hm","%s_m_i","%s_m_ip","%s_m_lss","%s_m_msp","%s_m_r","%s_matrix","%s_n_hm","%s_n_i","%s_n_l","%s_n_lss","%s_n_r","%s_n_st","%s_o_hm","%s_o_i","%s_o_l","%s_o_lss","%s_o_r","%s_o_st","%s_or","%s_p_b","%s_p_i","%s_pow","%s_q_hm","%s_q_i","%s_q_p","%s_q_r","%s_q_sp","%s_r_b","%s_r_i","%s_r_lss","%s_r_p","%s_r_r","%s_r_s","%s_r_sp","%s_s_b","%s_s_hm","%s_s_i","%s_s_ip","%s_s_lss","%s_s_r","%s_s_sp","%s_simp","%s_v_lss","%s_v_p","%s_v_r","%s_v_s","%s_x_b","%s_x_hm","%s_x_i","%s_x_r","%s_y_p","%s_y_r","%s_y_sp","%s_z_p","%s_z_r","%s_z_sp","%sn","%sp_a_s","%sp_a_sp","%sp_and","%sp_c_s","%sp_ceil","%sp_cos","%sp_cumprod","%sp_cumsum","%sp_d_s","%sp_d_sp","%sp_diag","%sp_e","%sp_exp","%sp_f_s","%sp_floor","%sp_gsort","%sp_i_ce","%sp_i_h","%sp_i_s","%sp_i_sp","%sp_i_st","%sp_int","%sp_inv","%sp_k_s","%sp_k_sp","%sp_l_s","%sp_l_sp","%sp_length","%sp_norm","%sp_or","%sp_p_s","%sp_prod","%sp_q_s","%sp_q_sp","%sp_r_s","%sp_r_sp","%sp_round","%sp_s_s","%sp_s_sp","%sp_sin","%sp_sqrt","%sp_string","%sp_sum","%sp_tril","%sp_triu","%sp_y_s","%sp_y_sp","%sp_z_s","%sp_z_sp","%spb_and","%spb_c_b","%spb_cumprod","%spb_cumsum","%spb_diag","%spb_e","%spb_f_b","%spb_g_b","%spb_g_spb","%spb_h_b","%spb_h_spb","%spb_i_b","%spb_i_ce","%spb_i_h","%spb_i_st","%spb_or","%spb_prod","%spb_sum","%spb_tril","%spb_triu","%st_6","%st_c_st","%st_e","%st_f_st","%st_i_b","%st_i_c","%st_i_fptr","%st_i_h","%st_i_i","%st_i_ip","%st_i_lss","%st_i_msp","%st_i_p","%st_i_r","%st_i_s","%st_i_sp","%st_i_spb","%st_i_st","%st_matrix","%st_n_c","%st_n_l","%st_n_mc","%st_n_p","%st_n_s","%st_o_c","%st_o_l","%st_o_mc","%st_o_p","%st_o_s","%st_o_tl","%st_p","%st_size","%st_string","%st_t","%ticks_i_h","%xls_e","%xls_p","%xlssheet_e","%xlssheet_p","%xlssheet_size","%xlssheet_string","DominationRank","G_make","IsAScalar","NDcost","OS_Version","PlotSparse","ReadHBSparse","ReadmiMatrix","TCL_CreateSlave","WritemiMatrix","abcd","abinv","accept_func_default","accept_func_vfsa","acf","acosd","acosh","acoshm","acosm","acot","acotd","acoth","acsc","acscd","acsch","add_demo","add_help_chapter","add_module_help_chapter","add_param","add_profiling","adj2sp","aff2ab","ana_style","analpf","analyze","aplat","apropos","arhnk","arl2","arma2p","armac","armax","armax1","arobasestring2strings","arsimul","ascii2string","asciimat","asec","asecd","asech","asind","asinh","asinhm","asinm","assert_checkalmostequal","assert_checkequal","assert_checkerror","assert_checkfalse","assert_checkfilesequal","assert_checktrue","assert_comparecomplex","assert_computedigits","assert_cond2reltol","assert_cond2reqdigits","assert_generror","atand","atanh","atanhm","atanm","atomsAutoload","atomsAutoloadAdd","atomsAutoloadDel","atomsAutoloadList","atomsCategoryList","atomsCheckModule","atomsDepTreeShow","atomsGetConfig","atomsGetInstalled","atomsGetLoaded","atomsGetLoadedPath","atomsInstall","atomsIsInstalled","atomsIsLoaded","atomsList","atomsLoad","atomsRemove","atomsRepositoryAdd","atomsRepositoryDel","atomsRepositoryList","atomsRestoreConfig","atomsSaveConfig","atomsSearch","atomsSetConfig","atomsShow","atomsSystemInit","atomsSystemUpdate","atomsTest","atomsUpdate","atomsVersion","augment","auread","auwrite","balreal","bench_run","bilin","bilt","bin2dec","binomial","bitand","bitcmp","bitget","bitor","bitset","bitxor","black","blanks","bloc2exp","bloc2ss","block_parameter_error","bode","bstap","buttmag","bvodeS","bytecode","bytecodewalk","cainv","calendar","calfrq","canon","casc","cat","cat_code","cb_m2sci_gui","ccontrg","cell","cell2mat","cellstr","center","cepstrum","cfspec","char","chart","cheb1mag","cheb2mag","check_gateways","check_help","check_modules_xml","check_versions","chepol","chfact","chsolve","classmarkov","clean_help","clock","cls2dls","cmb_lin","cmndred","cmoment","coding_ga_binary","coding_ga_identity","coff","coffg","colcomp","colcompr","colinout","colregul","companion","complex","compute_initial_temp","cond","cond2sp","condestsp","config","configure_msifort","configure_msvc","cont_frm","cont_mat","contrss","conv","convert_to_float","convertindex","convol","convol2d","copfac","correl","cosd","cosh","coshm","cosm","cotd","cotg","coth","cothm","covar","createfun","createstruct","crossover_ga_binary","crossover_ga_default","csc","cscd","csch","csgn","csim","cspect","ctr_gram","czt","dae","daeoptions","damp","datafit","date","datenum","datevec","dbphi","dcf","ddp","dec2bin","dec2hex","dec2oct","del_help_chapter","del_module_help_chapter","demo_begin","demo_choose","demo_compiler","demo_end","demo_file_choice","demo_folder_choice","demo_function_choice","demo_gui","demo_mdialog","demo_message","demo_run","demo_viewCode","denom","derivat","derivative","des2ss","des2tf","detectmsifort64tools","detectmsvc64tools","determ","detr","detrend","devtools_run_builder","dft","dhnorm","diff","diophant","dir","dirname","dispfiles","dllinfo","dscr","dsimul","dt_ility","dtsi","edit","edit_error","eigenmarkov","ell1mag","enlarge_shape","entropy","eomday","epred","eqfir","eqiir","equil","equil1","erf","erfc","erfcx","erfinv","etime","eval","evans","evstr","expression2code","extract_help_examples","factor","factorial","factors","faurre","ffilt","fft2","fftshift","fieldnames","filt_sinc","filter","findABCD","findAC","findBDK","findR","find_freq","find_links","find_scicos_version","findm","findmsifortcompiler","findmsvccompiler","findx0BD","firstnonsingleton","fit_dat","fix","fixedpointgcd","flipdim","flts","fminsearch","format_txt","fourplan","fprintf","frep2tf","freson","frfit","frmag","fscanf","fseek_origin","fsfirlin","fspec","fspecg","fstabst","ftest","ftuneq","fullfile","fullrf","fullrfk","fun2string","g_margin","gainplot","gamitg","gcare","gcd","gencompilationflags_unix","generateBlockImage","generateBlockImages","generic_i_ce","generic_i_h","generic_i_hm","generic_i_s","generic_i_st","genlib","genlib_old","genmarkov","geomean","getDiagramVersion","getModelicaPath","get_file_path","get_function_path","get_param","get_profile","get_scicos_version","getd","getscilabkeywords","getshell","gettklib","gfare","gfrancis","givens","glever","gmres","group","gschur","gspec","gtild","h2norm","h_cl","h_inf","h_inf_st","h_norm","hallchart","halt","hank","hankelsv","harmean","haveacompiler","head_comments","help","help_from_sci","help_skeleton","hermit","hex2dec","hilb","hilbert","horner","householder","hrmt","htrianr","hypermat","ifft","iir","iirgroup","iirlp","iirmod","ilib_build","ilib_compile","ilib_for_link","ilib_gen_Make","ilib_gen_Make_unix","ilib_gen_cleaner","ilib_gen_gateway","ilib_gen_loader","ilib_include_flag","ilib_mex_build","im_inv","importScicosDiagram","importScicosPal","importXcosDiagram","imrep2ss","ind2sub","inistate","init_ga_default","init_param","initial_scicos_tables","input","instruction2code","intc","intdec","integrate","interp1","interpln","intersect","intl","intsplin","inttrap","inv_coeff","invr","invrs","invsyslin","iqr","isLeapYear","is_absolute_path","is_param","iscell","iscellstr","isempty","isfield","isinf","isnan","isnum","issparse","isstruct","isvector","jmat","justify","kalm","karmarkar","kernel","kpure","krac2","kroneck","lattn","launchtest","lcf","lcm","lcmdiag","leastsq","leqe","leqr","lev","levin","lex_sort","lft","lin","lin2mu","lincos","lindquist","linf","linfn","linsolve","linspace","list2vec","list_param","listfiles","listfunctions","listvarinfile","lmisolver","lmitool","loadXcosLibs","loadmatfile","loadwave","log10","log2","logm","logspace","lqe","lqg","lqg2stan","lqg_ltr","lqr","ls","lyap","m2sci_gui","m_circle","macglov","macrovar","mad","makecell","manedit","mapsound","markp2ss","matfile2sci","mdelete","mean","meanf","median","mese","meshgrid","mfft","mfile2sci","minreal","minss","mkdir","modulo","moment","mrfit","msd","mstr2sci","mtlb","mtlb_0","mtlb_a","mtlb_all","mtlb_any","mtlb_axes","mtlb_axis","mtlb_beta","mtlb_box","mtlb_choices","mtlb_close","mtlb_colordef","mtlb_cond","mtlb_conv","mtlb_cov","mtlb_cumprod","mtlb_cumsum","mtlb_dec2hex","mtlb_delete","mtlb_diag","mtlb_diff","mtlb_dir","mtlb_double","mtlb_e","mtlb_echo","mtlb_error","mtlb_eval","mtlb_exist","mtlb_eye","mtlb_false","mtlb_fft","mtlb_fftshift","mtlb_filter","mtlb_find","mtlb_findstr","mtlb_fliplr","mtlb_fopen","mtlb_format","mtlb_fprintf","mtlb_fread","mtlb_fscanf","mtlb_full","mtlb_fwrite","mtlb_get","mtlb_grid","mtlb_hold","mtlb_i","mtlb_ifft","mtlb_image","mtlb_imp","mtlb_int16","mtlb_int32","mtlb_int8","mtlb_is","mtlb_isa","mtlb_isfield","mtlb_isletter","mtlb_isspace","mtlb_l","mtlb_legendre","mtlb_linspace","mtlb_logic","mtlb_logical","mtlb_loglog","mtlb_lower","mtlb_max","mtlb_mean","mtlb_median","mtlb_mesh","mtlb_meshdom","mtlb_min","mtlb_more","mtlb_num2str","mtlb_ones","mtlb_pcolor","mtlb_plot","mtlb_prod","mtlb_qr","mtlb_qz","mtlb_rand","mtlb_randn","mtlb_rcond","mtlb_realmax","mtlb_realmin","mtlb_repmat","mtlb_s","mtlb_semilogx","mtlb_semilogy","mtlb_setstr","mtlb_size","mtlb_sort","mtlb_sortrows","mtlb_sprintf","mtlb_sscanf","mtlb_std","mtlb_strcmp","mtlb_strcmpi","mtlb_strfind","mtlb_strrep","mtlb_subplot","mtlb_sum","mtlb_t","mtlb_toeplitz","mtlb_tril","mtlb_triu","mtlb_true","mtlb_type","mtlb_uint16","mtlb_uint32","mtlb_uint8","mtlb_upper","mtlb_var","mtlb_zeros","mu2lin","mutation_ga_binary","mutation_ga_default","mvcorrel","mvvacov","nancumsum","nand2mean","nanmax","nanmean","nanmeanf","nanmedian","nanmin","nanstdev","nansum","narsimul","ndgrid","ndims","nehari","neigh_func_csa","neigh_func_default","neigh_func_fsa","neigh_func_vfsa","neldermead_cget","neldermead_configure","neldermead_costf","neldermead_defaultoutput","neldermead_destroy","neldermead_display","neldermead_function","neldermead_get","neldermead_log","neldermead_new","neldermead_restart","neldermead_search","neldermead_updatesimp","nextpow2","nfreq","nicholschart","nlev","nmplot_cget","nmplot_configure","nmplot_contour","nmplot_destroy","nmplot_display","nmplot_function","nmplot_get","nmplot_historyplot","nmplot_log","nmplot_new","nmplot_outputcmd","nmplot_restart","nmplot_search","nmplot_simplexhistory","noisegen","nonreg_test_run","norm","now","null","num2cell","numdiff","numer","nyquist","nyquistfrequencybounds","obs_gram","obscont","observer","obsv_mat","obsvss","oct2dec","odeoptions","optim_ga","optim_moga","optim_nsga","optim_nsga2","optim_sa","optimbase_cget","optimbase_checkbounds","optimbase_checkcostfun","optimbase_checkx0","optimbase_configure","optimbase_destroy","optimbase_display","optimbase_function","optimbase_get","optimbase_hasbounds","optimbase_hasconstraints","optimbase_hasnlcons","optimbase_histget","optimbase_histset","optimbase_incriter","optimbase_isfeasible","optimbase_isinbounds","optimbase_isinnonlincons","optimbase_log","optimbase_logshutdown","optimbase_logstartup","optimbase_new","optimbase_outputcmd","optimbase_outstruct","optimbase_proj2bnds","optimbase_set","optimbase_stoplog","optimbase_terminate","optimget","optimplotfunccount","optimplotfval","optimplotx","optimset","optimsimplex_center","optimsimplex_check","optimsimplex_compsomefv","optimsimplex_computefv","optimsimplex_deltafv","optimsimplex_deltafvmax","optimsimplex_destroy","optimsimplex_dirmat","optimsimplex_fvmean","optimsimplex_fvstdev","optimsimplex_fvvariance","optimsimplex_getall","optimsimplex_getallfv","optimsimplex_getallx","optimsimplex_getfv","optimsimplex_getn","optimsimplex_getnbve","optimsimplex_getve","optimsimplex_getx","optimsimplex_gradientfv","optimsimplex_log","optimsimplex_new","optimsimplex_print","optimsimplex_reflect","optimsimplex_setall","optimsimplex_setallfv","optimsimplex_setallx","optimsimplex_setfv","optimsimplex_setn","optimsimplex_setnbve","optimsimplex_setve","optimsimplex_setx","optimsimplex_shrink","optimsimplex_size","optimsimplex_sort","optimsimplex_tostring","optimsimplex_xbar","orth","p_margin","pack","pareto_filter","parrot","pbig","pca","pcg","pdiv","pen2ea","pencan","pencost","penlaur","perctl","perl","perms","permute","pertrans","pfactors","pfss","phasemag","phaseplot","phc","pinv","playsnd","plotprofile","plzr","pmodulo","pol2des","pol2str","polar","polfact","prbs_a","prettyprint","primes","princomp","profile","proj","projsl","projspec","psmall","pspect","qmr","qpsolve","quart","quaskro","rafiter","randpencil","range","rank","read_csv","readxls","recompilefunction","recons","reglin","regress","remezb","remove_param","remove_profiling","repfreq","replace_Ix_by_Fx","repmat","reset_profiling","resize_matrix","returntoscilab","rhs2code","ric_desc","riccati","rmdir","routh_t","rowcomp","rowcompr","rowinout","rowregul","rowshuff","rref","sample","samplef","samwr","savematfile","savewave","scanf","sci2exp","sciGUI_init","sci_sparse","scicos_getvalue","scicos_simulate","scicos_workspace_init","scisptdemo","scitest","sdiff","sec","secd","sech","selection_ga_elitist","selection_ga_random","sensi","set_param","setdiff","sgrid","show_margins","show_pca","showprofile","signm","sinc","sincd","sind","sinh","sinhm","sinm","sm2des","sm2ss","smga","smooth","solve","sound","soundsec","sp2adj","spaninter","spanplus","spantwo","specfact","speye","sprand","spzeros","sqroot","sqrtm","squarewave","squeeze","srfaur","srkf","ss2des","ss2ss","ss2tf","sscanf","sskf","ssprint","ssrand","st_deviation","st_i_generic","st_ility","stabil","statgain","stdev","stdevf","steadycos","strange","strcmpi","struct","sub2ind","sva","svplot","sylm","sylv","sysconv","sysdiag","sysfact","syslin","syssize","system","systmat","tabul","tand","tanh","tanhm","tanm","tbx_build_blocks","tbx_build_cleaner","tbx_build_gateway","tbx_build_gateway_clean","tbx_build_gateway_loader","tbx_build_help","tbx_build_help_loader","tbx_build_loader","tbx_build_macros","tbx_build_src","tbx_builder","tbx_builder_gateway","tbx_builder_gateway_lang","tbx_builder_help","tbx_builder_help_lang","tbx_builder_macros","tbx_builder_src","tbx_builder_src_lang","temp_law_csa","temp_law_default","temp_law_fsa","temp_law_huang","temp_law_vfsa","test_clean","test_on_columns","test_run","test_run_level","testexamples","tf2des","tf2ss","thrownan","tic","time_id","toc","toeplitz","tokenpos","toolboxes","trace","trans","translatepaths","tree2code","trfmod","trianfml","trimmean","trisolve","trzeros","typeof","ui_observer","union","unique","unit_test_run","unix_g","unix_s","unix_w","unix_x","unobs","unpack","variance","variancef","vec2list","vectorfind","ver","warnobsolete","wavread","wavwrite","wcenter","weekday","wfir","wfir_gui","whereami","who_user","whos","wiener","wigner","winclose","window","winlist","with_javasci","with_macros_source","with_modelica_compiler","with_pvm","with_texmacs","with_tk","write_csv","xcosBlockEval","xcosBlockInterface","xcosCodeGeneration","xcosConfigureModelica","xcosPal","xcosPalAdd","xcosPalAddBlock","xcosPalExport","xcosShowBlockWarning","xcosValidateBlockSet","xcosValidateCompareBlock","xcos_compile","xcos_run","xcos_simulate","xcos_workspace_init","xmltochm","xmltoformat","xmltohtml","xmltojar","xmltopdf","xmltops","xmltoweb","yulewalk","zeropen","zgrid","zpbutt","zpch1","zpch2","zpell") -builtin_consts = ["\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib"] +builtin_consts = ("\\$","%F","%T","%e","%eps","%f","%fftw","%gui","%i","%inf","%io","%modalWarning","%nan","%pi","%s","%t","%tk","%toolboxes","%toolboxes_dir","%z","PWD","SCI","SCIHOME","TMPDIR","a","ans","assertlib","atomslib","cacsdlib","compatibility_functilib","corelib","data_structureslib","demo_toolslib","development_toolslib","differential_equationlib","dynamic_linklib","elementary_functionslib","fd","fileiolib","functionslib","genetic_algorithmslib","helptoolslib","home","i","integerlib","interpolationlib","iolib","j","linear_algebralib","m2scilib","matiolib","modules_managerlib","myStr","neldermeadlib","optimbaselib","optimizationlib","optimsimplexlib","output_streamlib","overloadinglib","parameterslib","polynomialslib","scicos_autolib","scicos_utilslib","scinoteslib","signal_processinglib","simulated_annealinglib","soundlib","sparselib","special_functionslib","spreadsheetlib","statisticslib","stringlib","tclscilib","timelib","umfpacklib","varType","xcoslib") diff --git a/pygments/lexers/_sourcemodbuiltins.py b/pygments/lexers/_sourcemodbuiltins.py index eee84d0b..2e68c506 100644 --- a/pygments/lexers/_sourcemodbuiltins.py +++ b/pygments/lexers/_sourcemodbuiltins.py @@ -14,7 +14,7 @@ from __future__ import print_function -FUNCTIONS = ['TopMenuHandler', +FUNCTIONS = ('TopMenuHandler', 'CreateTopMenu', 'LoadTopMenuConfig', 'AddToTopMenu', @@ -1008,7 +1008,7 @@ FUNCTIONS = ['TopMenuHandler', 'PrepSDKCall_SetReturnInfo', 'PrepSDKCall_AddParameter', 'EndPrepSDKCall', - 'SDKCall'] + 'SDKCall') if __name__ == '__main__': import pprint diff --git a/pygments/lexers/_stan_builtins.py b/pygments/lexers/_stan_builtins.py index 4c4a27c1..97e98352 100644 --- a/pygments/lexers/_stan_builtins.py +++ b/pygments/lexers/_stan_builtins.py @@ -10,7 +10,7 @@ :license: BSD, see LICENSE for details. """ -KEYWORDS = [ u'else', +KEYWORDS = ( u'else', u'for', u'if', u'in', @@ -18,9 +18,9 @@ KEYWORDS = [ u'else', u'lp__', u'print', u'return', - u'while'] + u'while') -TYPES = [ u'cholesky_factor_corr', +TYPES = ( u'cholesky_factor_corr', u'cholesky_factor_cov', u'corr_matrix', u'cov_matrix', @@ -33,9 +33,9 @@ TYPES = [ u'cholesky_factor_corr', u'simplex', u'unit_vector', u'vector', - u'void'] + u'void') -FUNCTIONS = [ u'Phi', +FUNCTIONS = ( u'Phi', u'Phi_approx', u'abs', u'acos', @@ -349,9 +349,9 @@ FUNCTIONS = [ u'Phi', u'weibull_log', u'weibull_rng', u'wishart_log', - u'wishart_rng'] + u'wishart_rng') -DISTRIBUTIONS = [ u'bernoulli', +DISTRIBUTIONS = ( u'bernoulli', u'bernoulli_logit', u'beta', u'beta_binomial', @@ -398,9 +398,9 @@ DISTRIBUTIONS = [ u'bernoulli', u'uniform', u'von_mises', u'weibull', - u'wishart'] + u'wishart') -RESERVED = [ u'alignas', +RESERVED = ( u'alignas', u'alignof', u'and', u'and_eq', @@ -485,5 +485,4 @@ RESERVED = [ u'alignas', u'volatile', u'wchar_t', u'xor', - u'xor_eq'] - + u'xor_eq') diff --git a/pygments/lexers/_vimbuiltins.py b/pygments/lexers/_vimbuiltins.py index e95a8ec5..f3571e8f 100644 --- a/pygments/lexers/_vimbuiltins.py +++ b/pygments/lexers/_vimbuiltins.py @@ -2,11 +2,11 @@ # per-method size limit. def _getauto(): - return [('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')] + return (('BufAdd','BufAdd'),('BufCreate','BufCreate'),('BufDelete','BufDelete'),('BufEnter','BufEnter'),('BufFilePost','BufFilePost'),('BufFilePre','BufFilePre'),('BufHidden','BufHidden'),('BufLeave','BufLeave'),('BufNew','BufNew'),('BufNewFile','BufNewFile'),('BufRead','BufRead'),('BufReadCmd','BufReadCmd'),('BufReadPost','BufReadPost'),('BufReadPre','BufReadPre'),('BufUnload','BufUnload'),('BufWinEnter','BufWinEnter'),('BufWinLeave','BufWinLeave'),('BufWipeout','BufWipeout'),('BufWrite','BufWrite'),('BufWriteCmd','BufWriteCmd'),('BufWritePost','BufWritePost'),('BufWritePre','BufWritePre'),('Cmd','Cmd'),('CmdwinEnter','CmdwinEnter'),('CmdwinLeave','CmdwinLeave'),('ColorScheme','ColorScheme'),('CursorHold','CursorHold'),('CursorHoldI','CursorHoldI'),('CursorMoved','CursorMoved'),('CursorMovedI','CursorMovedI'),('EncodingChanged','EncodingChanged'),('FileAppendCmd','FileAppendCmd'),('FileAppendPost','FileAppendPost'),('FileAppendPre','FileAppendPre'),('FileChangedRO','FileChangedRO'),('FileChangedShell','FileChangedShell'),('FileChangedShellPost','FileChangedShellPost'),('FileEncoding','FileEncoding'),('FileReadCmd','FileReadCmd'),('FileReadPost','FileReadPost'),('FileReadPre','FileReadPre'),('FileType','FileType'),('FileWriteCmd','FileWriteCmd'),('FileWritePost','FileWritePost'),('FileWritePre','FileWritePre'),('FilterReadPost','FilterReadPost'),('FilterReadPre','FilterReadPre'),('FilterWritePost','FilterWritePost'),('FilterWritePre','FilterWritePre'),('FocusGained','FocusGained'),('FocusLost','FocusLost'),('FuncUndefined','FuncUndefined'),('GUIEnter','GUIEnter'),('GUIFailed','GUIFailed'),('InsertChange','InsertChange'),('InsertCharPre','InsertCharPre'),('InsertEnter','InsertEnter'),('InsertLeave','InsertLeave'),('MenuPopup','MenuPopup'),('QuickFixCmdPost','QuickFixCmdPost'),('QuickFixCmdPre','QuickFixCmdPre'),('RemoteReply','RemoteReply'),('SessionLoadPost','SessionLoadPost'),('ShellCmdPost','ShellCmdPost'),('ShellFilterPost','ShellFilterPost'),('SourceCmd','SourceCmd'),('SourcePre','SourcePre'),('SpellFileMissing','SpellFileMissing'),('StdinReadPost','StdinReadPost'),('StdinReadPre','StdinReadPre'),('SwapExists','SwapExists'),('Syntax','Syntax'),('TabEnter','TabEnter'),('TabLeave','TabLeave'),('TermChanged','TermChanged'),('TermResponse','TermResponse'),('User','User'),('UserGettingBored','UserGettingBored'),('VimEnter','VimEnter'),('VimLeave','VimLeave'),('VimLeavePre','VimLeavePre'),('VimResized','VimResized'),('WinEnter','WinEnter'),('WinLeave','WinLeave'),('event','event')) def _getcommand(): - return [('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','autocmd'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')] + return (('Allargs','Allargs'),('DiffOrig','DiffOrig'),('Error','Error'),('Man','Man'),('MyCommand','MyCommand'),('Mycmd','Mycmd'),('N','N'),('N','Next'),('P','P'),('P','Print'),('Ren','Ren'),('Rena','Rena'),('Renu','Renu'),('TOhtml','TOhtml'),('X','X'),('XMLent','XMLent'),('XMLns','XMLns'),('a','a'),('ab','ab'),('abc','abclear'),('abo','aboveleft'),('al','all'),('ar','ar'),('ar','args'),('arga','argadd'),('argd','argdelete'),('argdo','argdo'),('arge','argedit'),('argg','argglobal'),('argl','arglocal'),('argu','argument'),('as','ascii'),('au','autocmd'),('b','buffer'),('bN','bNext'),('ba','ball'),('bad','badd'),('bar','bar'),('bd','bdelete'),('bel','belowright'),('bf','bfirst'),('bl','blast'),('bm','bmodified'),('bn','bnext'),('bo','botright'),('bp','bprevious'),('br','br'),('br','brewind'),('brea','break'),('breaka','breakadd'),('breakd','breakdel'),('breakl','breaklist'),('bro','browse'),('browseset','browseset'),('bu','bu'),('buf','buf'),('bufdo','bufdo'),('buffers','buffers'),('bun','bunload'),('bw','bwipeout'),('c','c'),('c','change'),('cN','cN'),('cN','cNext'),('cNf','cNf'),('cNf','cNfile'),('cabc','cabclear'),('cad','cad'),('cad','caddexpr'),('caddb','caddbuffer'),('caddf','caddfile'),('cal','call'),('cat','catch'),('cb','cbuffer'),('cc','cc'),('ccl','cclose'),('cd','cd'),('ce','center'),('cex','cexpr'),('cf','cfile'),('cfir','cfirst'),('cg','cgetfile'),('cgetb','cgetbuffer'),('cgete','cgetexpr'),('changes','changes'),('chd','chdir'),('che','checkpath'),('checkt','checktime'),('cl','cl'),('cl','clist'),('cla','clast'),('clo','close'),('cmapc','cmapclear'),('cmdname','cmdname'),('cn','cn'),('cn','cnext'),('cnew','cnewer'),('cnf','cnf'),('cnf','cnfile'),('co','copy'),('col','colder'),('colo','colorscheme'),('com','com'),('comc','comclear'),('comment','comment'),('comp','compiler'),('con','con'),('con','continue'),('conf','confirm'),('cope','copen'),('count','count'),('cp','cprevious'),('cpf','cpfile'),('cq','cquit'),('cr','crewind'),('cs','cs'),('cscope','cscope'),('cstag','cstag'),('cuna','cunabbrev'),('cw','cwindow'),('d','d'),('d','delete'),('de','de'),('debug','debug'),('debugg','debuggreedy'),('del','del'),('delc','delcommand'),('delf','delf'),('delf','delfunction'),('delm','delmarks'),('di','di'),('di','display'),('diffg','diffget'),('diffo','diffo'),('diffoff','diffoff'),('diffp','diffp'),('diffpatch','diffpatch'),('diffpu','diffput'),('diffsplit','diffsplit'),('difft','difft'),('diffthis','diffthis'),('diffu','diffupdate'),('dig','dig'),('dig','digraphs'),('dj','djump'),('dl','dlist'),('do','do'),('doau','doau'),('dr','drop'),('ds','dsearch'),('dsp','dsplit'),('dwim','dwim'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','e'),('e','edit'),('ea','ea'),('earlier','earlier'),('ec','ec'),('echoe','echoerr'),('echom','echomsg'),('echon','echon'),('el','else'),('elsei','elseif'),('em','emenu'),('emenu','emenu'),('en','en'),('en','endif'),('endf','endf'),('endf','endfunction'),('endfo','endfor'),('endfun','endfun'),('endt','endtry'),('endw','endwhile'),('ene','enew'),('ex','ex'),('exi','exit'),('exu','exusage'),('f','f'),('f','file'),('filename','filename'),('files','files'),('filet','filet'),('filetype','filetype'),('fin','fin'),('fin','find'),('fina','finally'),('fini','finish'),('fir','first'),('fix','fixdel'),('fo','fold'),('foldc','foldclose'),('foldd','folddoopen'),('folddoc','folddoclosed'),('foldo','foldopen'),('for','for'),('fu','fu'),('fu','function'),('fun','fun'),('g','g'),('get','get'),('go','goto'),('gr','grep'),('grepa','grepadd'),('gs','gs'),('gs','gs'),('gui','gui'),('gvim','gvim'),('h','h'),('h','h'),('h','h'),('h','h'),('h','help'),('ha','hardcopy'),('helpf','helpfind'),('helpg','helpgrep'),('helpt','helptags'),('hi','hi'),('hid','hide'),('his','history'),('i','i'),('ia','ia'),('iabc','iabclear'),('if','if'),('ij','ijump'),('il','ilist'),('imapc','imapclear'),('in','in'),('index','index'),('intro','intro'),('is','isearch'),('isp','isplit'),('iuna','iunabbrev'),('j','join'),('ju','jumps'),('k','k'),('kee','keepmarks'),('keepa','keepa'),('keepalt','keepalt'),('keepj','keepjumps'),('l','l'),('l','list'),('lN','lN'),('lN','lNext'),('lNf','lNf'),('lNf','lNfile'),('la','la'),('la','last'),('lad','lad'),('lad','laddexpr'),('laddb','laddbuffer'),('laddf','laddfile'),('lan','lan'),('lan','language'),('lat','lat'),('later','later'),('lb','lbuffer'),('lc','lcd'),('lch','lchdir'),('lcl','lclose'),('lcs','lcs'),('lcscope','lcscope'),('le','left'),('lefta','leftabove'),('let','let'),('lex','lexpr'),('lf','lfile'),('lfir','lfirst'),('lg','lgetfile'),('lgetb','lgetbuffer'),('lgete','lgetexpr'),('lgr','lgrep'),('lgrepa','lgrepadd'),('lh','lhelpgrep'),('ll','ll'),('lla','llast'),('lli','llist'),('lmak','lmake'),('lmapc','lmapclear'),('lne','lne'),('lne','lnext'),('lnew','lnewer'),('lnf','lnf'),('lnf','lnfile'),('lo','lo'),('lo','loadview'),('loadk','loadk'),('loadkeymap','loadkeymap'),('loc','lockmarks'),('locale','locale'),('lockv','lockvar'),('lol','lolder'),('lop','lopen'),('lp','lprevious'),('lpf','lpfile'),('lr','lrewind'),('ls','ls'),('lt','ltag'),('lua','lua'),('luado','luado'),('luafile','luafile'),('lv','lvimgrep'),('lvimgrepa','lvimgrepadd'),('lw','lwindow'),('m','move'),('ma','ma'),('ma','mark'),('main','main'),('main','main'),('mak','make'),('marks','marks'),('mat','match'),('menut','menut'),('menut','menutranslate'),('mes','mes'),('messages','messages'),('mk','mk'),('mk','mkexrc'),('mkdir','mkdir'),('mks','mksession'),('mksp','mkspell'),('mkv','mkv'),('mkv','mkvimrc'),('mkvie','mkview'),('mo','mo'),('mod','mode'),('mv','mv'),('mz','mz'),('mz','mzscheme'),('mzf','mzfile'),('n','n'),('n','n'),('n','next'),('nb','nbkey'),('nbc','nbclose'),('nbs','nbstart'),('ne','ne'),('new','new'),('nkf','nkf'),('nmapc','nmapclear'),('noa','noa'),('noautocmd','noautocmd'),('noh','nohlsearch'),('nu','number'),('o','o'),('o','open'),('ol','oldfiles'),('omapc','omapclear'),('on','only'),('opt','options'),('ownsyntax','ownsyntax'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','p'),('p','print'),('pat','pat'),('pat','pat'),('pc','pclose'),('pe','pe'),('pe','perl'),('ped','pedit'),('perld','perldo'),('po','pop'),('popu','popu'),('popu','popup'),('pp','ppop'),('pr','pr'),('pre','preserve'),('prev','previous'),('pro','pro'),('prof','profile'),('profd','profdel'),('promptf','promptfind'),('promptr','promptrepl'),('ps','psearch'),('ptN','ptN'),('ptN','ptNext'),('pta','ptag'),('ptf','ptfirst'),('ptj','ptjump'),('ptl','ptlast'),('ptn','ptn'),('ptn','ptnext'),('ptp','ptprevious'),('ptr','ptrewind'),('pts','ptselect'),('pu','put'),('pw','pwd'),('py','py'),('py','python'),('py3','py3'),('py3','py3'),('py3file','py3file'),('pyf','pyfile'),('python3','python3'),('q','q'),('q','quit'),('qa','qall'),('quita','quitall'),('quote','quote'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','r'),('r','read'),('re','re'),('rec','recover'),('red','red'),('red','redo'),('redi','redir'),('redr','redraw'),('redraws','redrawstatus'),('reg','registers'),('res','resize'),('ret','retab'),('retu','return'),('rew','rewind'),('ri','right'),('rightb','rightbelow'),('ru','ru'),('ru','runtime'),('rub','ruby'),('rubyd','rubydo'),('rubyf','rubyfile'),('rundo','rundo'),('rv','rviminfo'),('s','s'),('s','s'),('s','s'),('s','s'),('sN','sNext'),('sa','sargument'),('sal','sall'),('san','sandbox'),('sav','saveas'),('sb','sbuffer'),('sbN','sbNext'),('sba','sball'),('sbf','sbfirst'),('sbl','sblast'),('sbm','sbmodified'),('sbn','sbnext'),('sbp','sbprevious'),('sbr','sbrewind'),('scrip','scrip'),('scrip','scriptnames'),('scripte','scriptencoding'),('scs','scs'),('scscope','scscope'),('se','set'),('setf','setfiletype'),('setg','setglobal'),('setl','setlocal'),('sf','sfind'),('sfir','sfirst'),('sh','shell'),('si','si'),('sig','sig'),('sign','sign'),('sil','silent'),('sim','simalt'),('sl','sl'),('sl','sleep'),('sla','slast'),('sm','smagic'),('sm','smap'),('sme','sme'),('smenu','smenu'),('sn','snext'),('sni','sniff'),('sno','snomagic'),('snoreme','snoreme'),('snoremenu','snoremenu'),('so','so'),('so','source'),('sor','sort'),('sp','split'),('spe','spe'),('spe','spellgood'),('spelld','spelldump'),('spelli','spellinfo'),('spellr','spellrepall'),('spellu','spellundo'),('spellw','spellwrong'),('spr','sprevious'),('sre','srewind'),('st','st'),('st','stop'),('sta','stag'),('star','star'),('star','startinsert'),('start','start'),('startg','startgreplace'),('startr','startreplace'),('stj','stjump'),('stopi','stopinsert'),('sts','stselect'),('sub','sub'),('sub','sub'),('sun','sunhide'),('sunme','sunme'),('sunmenu','sunmenu'),('sus','suspend'),('sv','sview'),('sw','swapname'),('sy','sy'),('syn','syn'),('sync','sync'),('syncbind','syncbind'),('synlist','synlist'),('t','t'),('t','t'),('t','t'),('tN','tN'),('tN','tNext'),('ta','ta'),('ta','tag'),('tab','tab'),('tabN','tabN'),('tabN','tabNext'),('tabc','tabclose'),('tabd','tabdo'),('tabe','tabedit'),('tabf','tabfind'),('tabfir','tabfirst'),('tabl','tablast'),('tabm','tabmove'),('tabn','tabnext'),('tabnew','tabnew'),('tabo','tabonly'),('tabp','tabprevious'),('tabr','tabrewind'),('tabs','tabs'),('tags','tags'),('tc','tcl'),('tcld','tcldo'),('tclf','tclfile'),('te','tearoff'),('tf','tfirst'),('th','throw'),('tj','tjump'),('tl','tlast'),('tm','tm'),('tm','tmenu'),('tn','tn'),('tn','tnext'),('to','topleft'),('tp','tprevious'),('tr','tr'),('tr','trewind'),('try','try'),('ts','tselect'),('tu','tu'),('tu','tunmenu'),('u','u'),('u','undo'),('un','un'),('una','unabbreviate'),('undoj','undojoin'),('undol','undolist'),('unh','unhide'),('unl','unl'),('unlo','unlockvar'),('uns','unsilent'),('up','update'),('v','v'),('ve','ve'),('ve','version'),('verb','verbose'),('version','version'),('version','version'),('vert','vertical'),('vi','vi'),('vi','visual'),('vie','view'),('vim','vimgrep'),('vimgrepa','vimgrepadd'),('viu','viusage'),('vmapc','vmapclear'),('vne','vnew'),('vs','vsplit'),('w','w'),('w','write'),('wN','wNext'),('wa','wall'),('wh','while'),('win','win'),('win','winsize'),('winc','wincmd'),('windo','windo'),('winp','winpos'),('wn','wnext'),('wp','wprevious'),('wq','wq'),('wqa','wqall'),('ws','wsverb'),('wundo','wundo'),('wv','wviminfo'),('x','x'),('x','xit'),('xa','xall'),('xmapc','xmapclear'),('xme','xme'),('xmenu','xmenu'),('xnoreme','xnoreme'),('xnoremenu','xnoremenu'),('xterm','xterm'),('xunme','xunme'),('xunmenu','xunmenu'),('xwininfo','xwininfo'),('y','yank')) def _getoption(): - return [('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')] + return (('acd','acd'),('ai','ai'),('akm','akm'),('al','al'),('aleph','aleph'),('allowrevins','allowrevins'),('altkeymap','altkeymap'),('ambiwidth','ambiwidth'),('ambw','ambw'),('anti','anti'),('antialias','antialias'),('ar','ar'),('arab','arab'),('arabic','arabic'),('arabicshape','arabicshape'),('ari','ari'),('arshape','arshape'),('autochdir','autochdir'),('autoindent','autoindent'),('autoread','autoread'),('autowrite','autowrite'),('autowriteall','autowriteall'),('aw','aw'),('awa','awa'),('background','background'),('backspace','backspace'),('backup','backup'),('backupcopy','backupcopy'),('backupdir','backupdir'),('backupext','backupext'),('backupskip','backupskip'),('balloondelay','balloondelay'),('ballooneval','ballooneval'),('balloonexpr','balloonexpr'),('bdir','bdir'),('bdlay','bdlay'),('beval','beval'),('bex','bex'),('bexpr','bexpr'),('bg','bg'),('bh','bh'),('bin','bin'),('binary','binary'),('biosk','biosk'),('bioskey','bioskey'),('bk','bk'),('bkc','bkc'),('bl','bl'),('bomb','bomb'),('breakat','breakat'),('brk','brk'),('browsedir','browsedir'),('bs','bs'),('bsdir','bsdir'),('bsk','bsk'),('bt','bt'),('bufhidden','bufhidden'),('buflisted','buflisted'),('buftype','buftype'),('casemap','casemap'),('cb','cb'),('cc','cc'),('ccv','ccv'),('cd','cd'),('cdpath','cdpath'),('cedit','cedit'),('cf','cf'),('cfu','cfu'),('ch','ch'),('charconvert','charconvert'),('ci','ci'),('cin','cin'),('cindent','cindent'),('cink','cink'),('cinkeys','cinkeys'),('cino','cino'),('cinoptions','cinoptions'),('cinw','cinw'),('cinwords','cinwords'),('clipboard','clipboard'),('cmdheight','cmdheight'),('cmdwinheight','cmdwinheight'),('cmp','cmp'),('cms','cms'),('co','co'),('cocu','cocu'),('cole','cole'),('colorcolumn','colorcolumn'),('columns','columns'),('com','com'),('comments','comments'),('commentstring','commentstring'),('compatible','compatible'),('complete','complete'),('completefunc','completefunc'),('completeopt','completeopt'),('concealcursor','concealcursor'),('conceallevel','conceallevel'),('confirm','confirm'),('consk','consk'),('conskey','conskey'),('copyindent','copyindent'),('cot','cot'),('cp','cp'),('cpo','cpo'),('cpoptions','cpoptions'),('cpt','cpt'),('crb','crb'),('cryptmethod','cryptmethod'),('cscopepathcomp','cscopepathcomp'),('cscopeprg','cscopeprg'),('cscopequickfix','cscopequickfix'),('cscoperelative','cscoperelative'),('cscopetag','cscopetag'),('cscopetagorder','cscopetagorder'),('cscopeverbose','cscopeverbose'),('cspc','cspc'),('csprg','csprg'),('csqf','csqf'),('csre','csre'),('cst','cst'),('csto','csto'),('csverb','csverb'),('cuc','cuc'),('cul','cul'),('cursorbind','cursorbind'),('cursorcolumn','cursorcolumn'),('cursorline','cursorline'),('cwh','cwh'),('debug','debug'),('deco','deco'),('def','def'),('define','define'),('delcombine','delcombine'),('dex','dex'),('dg','dg'),('dict','dict'),('dictionary','dictionary'),('diff','diff'),('diffexpr','diffexpr'),('diffopt','diffopt'),('digraph','digraph'),('dip','dip'),('dir','dir'),('directory','directory'),('display','display'),('dy','dy'),('ea','ea'),('ead','ead'),('eadirection','eadirection'),('eb','eb'),('ed','ed'),('edcompatible','edcompatible'),('ef','ef'),('efm','efm'),('ei','ei'),('ek','ek'),('enc','enc'),('encoding','encoding'),('endofline','endofline'),('eol','eol'),('ep','ep'),('equalalways','equalalways'),('equalprg','equalprg'),('errorbells','errorbells'),('errorfile','errorfile'),('errorformat','errorformat'),('esckeys','esckeys'),('et','et'),('eventignore','eventignore'),('ex','ex'),('expandtab','expandtab'),('exrc','exrc'),('fcl','fcl'),('fcs','fcs'),('fdc','fdc'),('fde','fde'),('fdi','fdi'),('fdl','fdl'),('fdls','fdls'),('fdm','fdm'),('fdn','fdn'),('fdo','fdo'),('fdt','fdt'),('fen','fen'),('fenc','fenc'),('fencs','fencs'),('fex','fex'),('ff','ff'),('ffs','ffs'),('fileencoding','fileencoding'),('fileencodings','fileencodings'),('fileformat','fileformat'),('fileformats','fileformats'),('filetype','filetype'),('fillchars','fillchars'),('fk','fk'),('fkmap','fkmap'),('flp','flp'),('fml','fml'),('fmr','fmr'),('fo','fo'),('foldclose','foldclose'),('foldcolumn','foldcolumn'),('foldenable','foldenable'),('foldexpr','foldexpr'),('foldignore','foldignore'),('foldlevel','foldlevel'),('foldlevelstart','foldlevelstart'),('foldmarker','foldmarker'),('foldmethod','foldmethod'),('foldminlines','foldminlines'),('foldnestmax','foldnestmax'),('foldopen','foldopen'),('foldtext','foldtext'),('formatexpr','formatexpr'),('formatlistpat','formatlistpat'),('formatoptions','formatoptions'),('formatprg','formatprg'),('fp','fp'),('fs','fs'),('fsync','fsync'),('ft','ft'),('gcr','gcr'),('gd','gd'),('gdefault','gdefault'),('gfm','gfm'),('gfn','gfn'),('gfs','gfs'),('gfw','gfw'),('ghr','ghr'),('go','go'),('gp','gp'),('grepformat','grepformat'),('grepprg','grepprg'),('gtl','gtl'),('gtt','gtt'),('guicursor','guicursor'),('guifont','guifont'),('guifontset','guifontset'),('guifontwide','guifontwide'),('guiheadroom','guiheadroom'),('guioptions','guioptions'),('guipty','guipty'),('guitablabel','guitablabel'),('guitabtooltip','guitabtooltip'),('helpfile','helpfile'),('helpheight','helpheight'),('helplang','helplang'),('hf','hf'),('hh','hh'),('hi','hi'),('hid','hid'),('hidden','hidden'),('highlight','highlight'),('history','history'),('hk','hk'),('hkmap','hkmap'),('hkmapp','hkmapp'),('hkp','hkp'),('hl','hl'),('hlg','hlg'),('hls','hls'),('hlsearch','hlsearch'),('ic','ic'),('icon','icon'),('iconstring','iconstring'),('ignorecase','ignorecase'),('im','im'),('imactivatekey','imactivatekey'),('imak','imak'),('imc','imc'),('imcmdline','imcmdline'),('imd','imd'),('imdisable','imdisable'),('imi','imi'),('iminsert','iminsert'),('ims','ims'),('imsearch','imsearch'),('inc','inc'),('include','include'),('includeexpr','includeexpr'),('incsearch','incsearch'),('inde','inde'),('indentexpr','indentexpr'),('indentkeys','indentkeys'),('indk','indk'),('inex','inex'),('inf','inf'),('infercase','infercase'),('inoremap','inoremap'),('insertmode','insertmode'),('invacd','invacd'),('invai','invai'),('invakm','invakm'),('invallowrevins','invallowrevins'),('invaltkeymap','invaltkeymap'),('invanti','invanti'),('invantialias','invantialias'),('invar','invar'),('invarab','invarab'),('invarabic','invarabic'),('invarabicshape','invarabicshape'),('invari','invari'),('invarshape','invarshape'),('invautochdir','invautochdir'),('invautoindent','invautoindent'),('invautoread','invautoread'),('invautowrite','invautowrite'),('invautowriteall','invautowriteall'),('invaw','invaw'),('invawa','invawa'),('invbackup','invbackup'),('invballooneval','invballooneval'),('invbeval','invbeval'),('invbin','invbin'),('invbinary','invbinary'),('invbiosk','invbiosk'),('invbioskey','invbioskey'),('invbk','invbk'),('invbl','invbl'),('invbomb','invbomb'),('invbuflisted','invbuflisted'),('invcf','invcf'),('invci','invci'),('invcin','invcin'),('invcindent','invcindent'),('invcompatible','invcompatible'),('invconfirm','invconfirm'),('invconsk','invconsk'),('invconskey','invconskey'),('invcopyindent','invcopyindent'),('invcp','invcp'),('invcrb','invcrb'),('invcscopetag','invcscopetag'),('invcscopeverbose','invcscopeverbose'),('invcst','invcst'),('invcsverb','invcsverb'),('invcuc','invcuc'),('invcul','invcul'),('invcursorbind','invcursorbind'),('invcursorcolumn','invcursorcolumn'),('invcursorline','invcursorline'),('invdeco','invdeco'),('invdelcombine','invdelcombine'),('invdg','invdg'),('invdiff','invdiff'),('invdigraph','invdigraph'),('invea','invea'),('inveb','inveb'),('inved','inved'),('invedcompatible','invedcompatible'),('invek','invek'),('invendofline','invendofline'),('inveol','inveol'),('invequalalways','invequalalways'),('inverrorbells','inverrorbells'),('invesckeys','invesckeys'),('invet','invet'),('invex','invex'),('invexpandtab','invexpandtab'),('invexrc','invexrc'),('invfen','invfen'),('invfk','invfk'),('invfkmap','invfkmap'),('invfoldenable','invfoldenable'),('invgd','invgd'),('invgdefault','invgdefault'),('invguipty','invguipty'),('invhid','invhid'),('invhidden','invhidden'),('invhk','invhk'),('invhkmap','invhkmap'),('invhkmapp','invhkmapp'),('invhkp','invhkp'),('invhls','invhls'),('invhlsearch','invhlsearch'),('invic','invic'),('invicon','invicon'),('invignorecase','invignorecase'),('invim','invim'),('invimc','invimc'),('invimcmdline','invimcmdline'),('invimd','invimd'),('invimdisable','invimdisable'),('invincsearch','invincsearch'),('invinf','invinf'),('invinfercase','invinfercase'),('invinsertmode','invinsertmode'),('invis','invis'),('invjoinspaces','invjoinspaces'),('invjs','invjs'),('invlazyredraw','invlazyredraw'),('invlbr','invlbr'),('invlinebreak','invlinebreak'),('invlisp','invlisp'),('invlist','invlist'),('invloadplugins','invloadplugins'),('invlpl','invlpl'),('invlz','invlz'),('invma','invma'),('invmacatsui','invmacatsui'),('invmagic','invmagic'),('invmh','invmh'),('invml','invml'),('invmod','invmod'),('invmodeline','invmodeline'),('invmodifiable','invmodifiable'),('invmodified','invmodified'),('invmore','invmore'),('invmousef','invmousef'),('invmousefocus','invmousefocus'),('invmousehide','invmousehide'),('invnu','invnu'),('invnumber','invnumber'),('invodev','invodev'),('invopendevice','invopendevice'),('invpaste','invpaste'),('invpi','invpi'),('invpreserveindent','invpreserveindent'),('invpreviewwindow','invpreviewwindow'),('invprompt','invprompt'),('invpvw','invpvw'),('invreadonly','invreadonly'),('invrelativenumber','invrelativenumber'),('invremap','invremap'),('invrestorescreen','invrestorescreen'),('invrevins','invrevins'),('invri','invri'),('invrightleft','invrightleft'),('invrl','invrl'),('invrnu','invrnu'),('invro','invro'),('invrs','invrs'),('invru','invru'),('invruler','invruler'),('invsb','invsb'),('invsc','invsc'),('invscb','invscb'),('invscrollbind','invscrollbind'),('invscs','invscs'),('invsecure','invsecure'),('invsft','invsft'),('invshellslash','invshellslash'),('invshelltemp','invshelltemp'),('invshiftround','invshiftround'),('invshortname','invshortname'),('invshowcmd','invshowcmd'),('invshowfulltag','invshowfulltag'),('invshowmatch','invshowmatch'),('invshowmode','invshowmode'),('invsi','invsi'),('invsm','invsm'),('invsmartcase','invsmartcase'),('invsmartindent','invsmartindent'),('invsmarttab','invsmarttab'),('invsmd','invsmd'),('invsn','invsn'),('invsol','invsol'),('invspell','invspell'),('invsplitbelow','invsplitbelow'),('invsplitright','invsplitright'),('invspr','invspr'),('invsr','invsr'),('invssl','invssl'),('invsta','invsta'),('invstartofline','invstartofline'),('invstmp','invstmp'),('invswapfile','invswapfile'),('invswf','invswf'),('invta','invta'),('invtagbsearch','invtagbsearch'),('invtagrelative','invtagrelative'),('invtagstack','invtagstack'),('invtbi','invtbi'),('invtbidi','invtbidi'),('invtbs','invtbs'),('invtermbidi','invtermbidi'),('invterse','invterse'),('invtextauto','invtextauto'),('invtextmode','invtextmode'),('invtf','invtf'),('invtgst','invtgst'),('invtildeop','invtildeop'),('invtimeout','invtimeout'),('invtitle','invtitle'),('invto','invto'),('invtop','invtop'),('invtr','invtr'),('invttimeout','invttimeout'),('invttybuiltin','invttybuiltin'),('invttyfast','invttyfast'),('invtx','invtx'),('invvb','invvb'),('invvisualbell','invvisualbell'),('invwa','invwa'),('invwarn','invwarn'),('invwb','invwb'),('invweirdinvert','invweirdinvert'),('invwfh','invwfh'),('invwfw','invwfw'),('invwildignorecase','invwildignorecase'),('invwildmenu','invwildmenu'),('invwinfixheight','invwinfixheight'),('invwinfixwidth','invwinfixwidth'),('invwiv','invwiv'),('invwmnu','invwmnu'),('invwrap','invwrap'),('invwrapscan','invwrapscan'),('invwrite','invwrite'),('invwriteany','invwriteany'),('invwritebackup','invwritebackup'),('invws','invws'),('is','is'),('isf','isf'),('isfname','isfname'),('isi','isi'),('isident','isident'),('isk','isk'),('iskeyword','iskeyword'),('isp','isp'),('isprint','isprint'),('joinspaces','joinspaces'),('js','js'),('key','key'),('keymap','keymap'),('keymodel','keymodel'),('keywordprg','keywordprg'),('km','km'),('kmp','kmp'),('kp','kp'),('langmap','langmap'),('langmenu','langmenu'),('laststatus','laststatus'),('lazyredraw','lazyredraw'),('lbr','lbr'),('lcs','lcs'),('linebreak','linebreak'),('lines','lines'),('linespace','linespace'),('lisp','lisp'),('lispwords','lispwords'),('list','list'),('listchars','listchars'),('lm','lm'),('lmap','lmap'),('loadplugins','loadplugins'),('lpl','lpl'),('ls','ls'),('lsp','lsp'),('lw','lw'),('lz','lz'),('ma','ma'),('macatsui','macatsui'),('magic','magic'),('makeef','makeef'),('makeprg','makeprg'),('mat','mat'),('matchpairs','matchpairs'),('matchtime','matchtime'),('maxcombine','maxcombine'),('maxfuncdepth','maxfuncdepth'),('maxmapdepth','maxmapdepth'),('maxmem','maxmem'),('maxmempattern','maxmempattern'),('maxmemtot','maxmemtot'),('mco','mco'),('mef','mef'),('menuitems','menuitems'),('mfd','mfd'),('mh','mh'),('mis','mis'),('mkspellmem','mkspellmem'),('ml','ml'),('mls','mls'),('mm','mm'),('mmd','mmd'),('mmp','mmp'),('mmt','mmt'),('mod','mod'),('modeline','modeline'),('modelines','modelines'),('modifiable','modifiable'),('modified','modified'),('more','more'),('mouse','mouse'),('mousef','mousef'),('mousefocus','mousefocus'),('mousehide','mousehide'),('mousem','mousem'),('mousemodel','mousemodel'),('mouses','mouses'),('mouseshape','mouseshape'),('mouset','mouset'),('mousetime','mousetime'),('mp','mp'),('mps','mps'),('msm','msm'),('mzq','mzq'),('mzquantum','mzquantum'),('nf','nf'),('nnoremap','nnoremap'),('noacd','noacd'),('noai','noai'),('noakm','noakm'),('noallowrevins','noallowrevins'),('noaltkeymap','noaltkeymap'),('noanti','noanti'),('noantialias','noantialias'),('noar','noar'),('noarab','noarab'),('noarabic','noarabic'),('noarabicshape','noarabicshape'),('noari','noari'),('noarshape','noarshape'),('noautochdir','noautochdir'),('noautoindent','noautoindent'),('noautoread','noautoread'),('noautowrite','noautowrite'),('noautowriteall','noautowriteall'),('noaw','noaw'),('noawa','noawa'),('nobackup','nobackup'),('noballooneval','noballooneval'),('nobeval','nobeval'),('nobin','nobin'),('nobinary','nobinary'),('nobiosk','nobiosk'),('nobioskey','nobioskey'),('nobk','nobk'),('nobl','nobl'),('nobomb','nobomb'),('nobuflisted','nobuflisted'),('nocf','nocf'),('noci','noci'),('nocin','nocin'),('nocindent','nocindent'),('nocompatible','nocompatible'),('noconfirm','noconfirm'),('noconsk','noconsk'),('noconskey','noconskey'),('nocopyindent','nocopyindent'),('nocp','nocp'),('nocrb','nocrb'),('nocscopetag','nocscopetag'),('nocscopeverbose','nocscopeverbose'),('nocst','nocst'),('nocsverb','nocsverb'),('nocuc','nocuc'),('nocul','nocul'),('nocursorbind','nocursorbind'),('nocursorcolumn','nocursorcolumn'),('nocursorline','nocursorline'),('nodeco','nodeco'),('nodelcombine','nodelcombine'),('nodg','nodg'),('nodiff','nodiff'),('nodigraph','nodigraph'),('noea','noea'),('noeb','noeb'),('noed','noed'),('noedcompatible','noedcompatible'),('noek','noek'),('noendofline','noendofline'),('noeol','noeol'),('noequalalways','noequalalways'),('noerrorbells','noerrorbells'),('noesckeys','noesckeys'),('noet','noet'),('noex','noex'),('noexpandtab','noexpandtab'),('noexrc','noexrc'),('nofen','nofen'),('nofk','nofk'),('nofkmap','nofkmap'),('nofoldenable','nofoldenable'),('nogd','nogd'),('nogdefault','nogdefault'),('noguipty','noguipty'),('nohid','nohid'),('nohidden','nohidden'),('nohk','nohk'),('nohkmap','nohkmap'),('nohkmapp','nohkmapp'),('nohkp','nohkp'),('nohls','nohls'),('nohlsearch','nohlsearch'),('noic','noic'),('noicon','noicon'),('noignorecase','noignorecase'),('noim','noim'),('noimc','noimc'),('noimcmdline','noimcmdline'),('noimd','noimd'),('noimdisable','noimdisable'),('noincsearch','noincsearch'),('noinf','noinf'),('noinfercase','noinfercase'),('noinsertmode','noinsertmode'),('nois','nois'),('nojoinspaces','nojoinspaces'),('nojs','nojs'),('nolazyredraw','nolazyredraw'),('nolbr','nolbr'),('nolinebreak','nolinebreak'),('nolisp','nolisp'),('nolist','nolist'),('noloadplugins','noloadplugins'),('nolpl','nolpl'),('nolz','nolz'),('noma','noma'),('nomacatsui','nomacatsui'),('nomagic','nomagic'),('nomh','nomh'),('noml','noml'),('nomod','nomod'),('nomodeline','nomodeline'),('nomodifiable','nomodifiable'),('nomodified','nomodified'),('nomore','nomore'),('nomousef','nomousef'),('nomousefocus','nomousefocus'),('nomousehide','nomousehide'),('nonu','nonu'),('nonumber','nonumber'),('noodev','noodev'),('noopendevice','noopendevice'),('nopaste','nopaste'),('nopi','nopi'),('nopreserveindent','nopreserveindent'),('nopreviewwindow','nopreviewwindow'),('noprompt','noprompt'),('nopvw','nopvw'),('noreadonly','noreadonly'),('norelativenumber','norelativenumber'),('noremap','noremap'),('norestorescreen','norestorescreen'),('norevins','norevins'),('nori','nori'),('norightleft','norightleft'),('norl','norl'),('nornu','nornu'),('noro','noro'),('nors','nors'),('noru','noru'),('noruler','noruler'),('nosb','nosb'),('nosc','nosc'),('noscb','noscb'),('noscrollbind','noscrollbind'),('noscs','noscs'),('nosecure','nosecure'),('nosft','nosft'),('noshellslash','noshellslash'),('noshelltemp','noshelltemp'),('noshiftround','noshiftround'),('noshortname','noshortname'),('noshowcmd','noshowcmd'),('noshowfulltag','noshowfulltag'),('noshowmatch','noshowmatch'),('noshowmode','noshowmode'),('nosi','nosi'),('nosm','nosm'),('nosmartcase','nosmartcase'),('nosmartindent','nosmartindent'),('nosmarttab','nosmarttab'),('nosmd','nosmd'),('nosn','nosn'),('nosol','nosol'),('nospell','nospell'),('nosplitbelow','nosplitbelow'),('nosplitright','nosplitright'),('nospr','nospr'),('nosr','nosr'),('nossl','nossl'),('nosta','nosta'),('nostartofline','nostartofline'),('nostmp','nostmp'),('noswapfile','noswapfile'),('noswf','noswf'),('nota','nota'),('notagbsearch','notagbsearch'),('notagrelative','notagrelative'),('notagstack','notagstack'),('notbi','notbi'),('notbidi','notbidi'),('notbs','notbs'),('notermbidi','notermbidi'),('noterse','noterse'),('notextauto','notextauto'),('notextmode','notextmode'),('notf','notf'),('notgst','notgst'),('notildeop','notildeop'),('notimeout','notimeout'),('notitle','notitle'),('noto','noto'),('notop','notop'),('notr','notr'),('nottimeout','nottimeout'),('nottybuiltin','nottybuiltin'),('nottyfast','nottyfast'),('notx','notx'),('novb','novb'),('novisualbell','novisualbell'),('nowa','nowa'),('nowarn','nowarn'),('nowb','nowb'),('noweirdinvert','noweirdinvert'),('nowfh','nowfh'),('nowfw','nowfw'),('nowildignorecase','nowildignorecase'),('nowildmenu','nowildmenu'),('nowinfixheight','nowinfixheight'),('nowinfixwidth','nowinfixwidth'),('nowiv','nowiv'),('nowmnu','nowmnu'),('nowrap','nowrap'),('nowrapscan','nowrapscan'),('nowrite','nowrite'),('nowriteany','nowriteany'),('nowritebackup','nowritebackup'),('nows','nows'),('nrformats','nrformats'),('nu','nu'),('number','number'),('numberwidth','numberwidth'),('nuw','nuw'),('odev','odev'),('oft','oft'),('ofu','ofu'),('omnifunc','omnifunc'),('opendevice','opendevice'),('operatorfunc','operatorfunc'),('opfunc','opfunc'),('osfiletype','osfiletype'),('pa','pa'),('para','para'),('paragraphs','paragraphs'),('paste','paste'),('pastetoggle','pastetoggle'),('patchexpr','patchexpr'),('patchmode','patchmode'),('path','path'),('pdev','pdev'),('penc','penc'),('pex','pex'),('pexpr','pexpr'),('pfn','pfn'),('ph','ph'),('pheader','pheader'),('pi','pi'),('pm','pm'),('pmbcs','pmbcs'),('pmbfn','pmbfn'),('popt','popt'),('preserveindent','preserveindent'),('previewheight','previewheight'),('previewwindow','previewwindow'),('printdevice','printdevice'),('printencoding','printencoding'),('printexpr','printexpr'),('printfont','printfont'),('printheader','printheader'),('printmbcharset','printmbcharset'),('printmbfont','printmbfont'),('printoptions','printoptions'),('prompt','prompt'),('pt','pt'),('pumheight','pumheight'),('pvh','pvh'),('pvw','pvw'),('qe','qe'),('quoteescape','quoteescape'),('rdt','rdt'),('readonly','readonly'),('redrawtime','redrawtime'),('relativenumber','relativenumber'),('remap','remap'),('report','report'),('restorescreen','restorescreen'),('revins','revins'),('ri','ri'),('rightleft','rightleft'),('rightleftcmd','rightleftcmd'),('rl','rl'),('rlc','rlc'),('rnu','rnu'),('ro','ro'),('rs','rs'),('rtp','rtp'),('ru','ru'),('ruf','ruf'),('ruler','ruler'),('rulerformat','rulerformat'),('runtimepath','runtimepath'),('sb','sb'),('sbo','sbo'),('sbr','sbr'),('sc','sc'),('scb','scb'),('scr','scr'),('scroll','scroll'),('scrollbind','scrollbind'),('scrolljump','scrolljump'),('scrolloff','scrolloff'),('scrollopt','scrollopt'),('scs','scs'),('sect','sect'),('sections','sections'),('secure','secure'),('sel','sel'),('selection','selection'),('selectmode','selectmode'),('sessionoptions','sessionoptions'),('sft','sft'),('sh','sh'),('shcf','shcf'),('shell','shell'),('shellcmdflag','shellcmdflag'),('shellpipe','shellpipe'),('shellquote','shellquote'),('shellredir','shellredir'),('shellslash','shellslash'),('shelltemp','shelltemp'),('shelltype','shelltype'),('shellxquote','shellxquote'),('shiftround','shiftround'),('shiftwidth','shiftwidth'),('shm','shm'),('shortmess','shortmess'),('shortname','shortname'),('showbreak','showbreak'),('showcmd','showcmd'),('showfulltag','showfulltag'),('showmatch','showmatch'),('showmode','showmode'),('showtabline','showtabline'),('shq','shq'),('si','si'),('sidescroll','sidescroll'),('sidescrolloff','sidescrolloff'),('siso','siso'),('sj','sj'),('slm','slm'),('sm','sm'),('smartcase','smartcase'),('smartindent','smartindent'),('smarttab','smarttab'),('smc','smc'),('smd','smd'),('sn','sn'),('so','so'),('softtabstop','softtabstop'),('sol','sol'),('sp','sp'),('spc','spc'),('spell','spell'),('spellcapcheck','spellcapcheck'),('spellfile','spellfile'),('spelllang','spelllang'),('spellsuggest','spellsuggest'),('spf','spf'),('spl','spl'),('splitbelow','splitbelow'),('splitright','splitright'),('spr','spr'),('sps','sps'),('sr','sr'),('srr','srr'),('ss','ss'),('ssl','ssl'),('ssop','ssop'),('st','st'),('sta','sta'),('stal','stal'),('startofline','startofline'),('statusline','statusline'),('stl','stl'),('stmp','stmp'),('sts','sts'),('su','su'),('sua','sua'),('suffixes','suffixes'),('suffixesadd','suffixesadd'),('sw','sw'),('swapfile','swapfile'),('swapsync','swapsync'),('swb','swb'),('swf','swf'),('switchbuf','switchbuf'),('sws','sws'),('sxq','sxq'),('syn','syn'),('synmaxcol','synmaxcol'),('syntax','syntax'),('t_AB','t_AB'),('t_AF','t_AF'),('t_AL','t_AL'),('t_CS','t_CS'),('t_CV','t_CV'),('t_Ce','t_Ce'),('t_Co','t_Co'),('t_Cs','t_Cs'),('t_DL','t_DL'),('t_EI','t_EI'),('t_F1','t_F1'),('t_F2','t_F2'),('t_F3','t_F3'),('t_F4','t_F4'),('t_F5','t_F5'),('t_F6','t_F6'),('t_F7','t_F7'),('t_F8','t_F8'),('t_F9','t_F9'),('t_IE','t_IE'),('t_IS','t_IS'),('t_K1','t_K1'),('t_K3','t_K3'),('t_K4','t_K4'),('t_K5','t_K5'),('t_K6','t_K6'),('t_K7','t_K7'),('t_K8','t_K8'),('t_K9','t_K9'),('t_KA','t_KA'),('t_KB','t_KB'),('t_KC','t_KC'),('t_KD','t_KD'),('t_KE','t_KE'),('t_KF','t_KF'),('t_KG','t_KG'),('t_KH','t_KH'),('t_KI','t_KI'),('t_KJ','t_KJ'),('t_KK','t_KK'),('t_KL','t_KL'),('t_RI','t_RI'),('t_RV','t_RV'),('t_SI','t_SI'),('t_Sb','t_Sb'),('t_Sf','t_Sf'),('t_WP','t_WP'),('t_WS','t_WS'),('t_ZH','t_ZH'),('t_ZR','t_ZR'),('t_al','t_al'),('t_bc','t_bc'),('t_cd','t_cd'),('t_ce','t_ce'),('t_cl','t_cl'),('t_cm','t_cm'),('t_cs','t_cs'),('t_da','t_da'),('t_db','t_db'),('t_dl','t_dl'),('t_fs','t_fs'),('t_k1','t_k1'),('t_k2','t_k2'),('t_k3','t_k3'),('t_k4','t_k4'),('t_k5','t_k5'),('t_k6','t_k6'),('t_k7','t_k7'),('t_k8','t_k8'),('t_k9','t_k9'),('t_kB','t_kB'),('t_kD','t_kD'),('t_kI','t_kI'),('t_kN','t_kN'),('t_kP','t_kP'),('t_kb','t_kb'),('t_kd','t_kd'),('t_ke','t_ke'),('t_kh','t_kh'),('t_kl','t_kl'),('t_kr','t_kr'),('t_ks','t_ks'),('t_ku','t_ku'),('t_le','t_le'),('t_mb','t_mb'),('t_md','t_md'),('t_me','t_me'),('t_mr','t_mr'),('t_ms','t_ms'),('t_nd','t_nd'),('t_op','t_op'),('t_se','t_se'),('t_so','t_so'),('t_sr','t_sr'),('t_te','t_te'),('t_ti','t_ti'),('t_ts','t_ts'),('t_ue','t_ue'),('t_us','t_us'),('t_ut','t_ut'),('t_vb','t_vb'),('t_ve','t_ve'),('t_vi','t_vi'),('t_vs','t_vs'),('t_xs','t_xs'),('ta','ta'),('tabline','tabline'),('tabpagemax','tabpagemax'),('tabstop','tabstop'),('tag','tag'),('tagbsearch','tagbsearch'),('taglength','taglength'),('tagrelative','tagrelative'),('tags','tags'),('tagstack','tagstack'),('tal','tal'),('tb','tb'),('tbi','tbi'),('tbidi','tbidi'),('tbis','tbis'),('tbs','tbs'),('tenc','tenc'),('term','term'),('termbidi','termbidi'),('termencoding','termencoding'),('terse','terse'),('textauto','textauto'),('textmode','textmode'),('textwidth','textwidth'),('tf','tf'),('tgst','tgst'),('thesaurus','thesaurus'),('tildeop','tildeop'),('timeout','timeout'),('timeoutlen','timeoutlen'),('title','title'),('titlelen','titlelen'),('titleold','titleold'),('titlestring','titlestring'),('tl','tl'),('tm','tm'),('to','to'),('toolbar','toolbar'),('toolbariconsize','toolbariconsize'),('top','top'),('tpm','tpm'),('tr','tr'),('ts','ts'),('tsl','tsl'),('tsr','tsr'),('ttimeout','ttimeout'),('ttimeoutlen','ttimeoutlen'),('ttm','ttm'),('tty','tty'),('ttybuiltin','ttybuiltin'),('ttyfast','ttyfast'),('ttym','ttym'),('ttymouse','ttymouse'),('ttyscroll','ttyscroll'),('ttytype','ttytype'),('tw','tw'),('tx','tx'),('uc','uc'),('udf','udf'),('udir','udir'),('ul','ul'),('undodir','undodir'),('undofile','undofile'),('undolevels','undolevels'),('undoreload','undoreload'),('updatecount','updatecount'),('updatetime','updatetime'),('ur','ur'),('ut','ut'),('vb','vb'),('vbs','vbs'),('vdir','vdir'),('ve','ve'),('verbose','verbose'),('verbosefile','verbosefile'),('vfile','vfile'),('vi','vi'),('viewdir','viewdir'),('viewoptions','viewoptions'),('viminfo','viminfo'),('virtualedit','virtualedit'),('visualbell','visualbell'),('vnoremap','vnoremap'),('vop','vop'),('wa','wa'),('wak','wak'),('warn','warn'),('wb','wb'),('wc','wc'),('wcm','wcm'),('wd','wd'),('weirdinvert','weirdinvert'),('wfh','wfh'),('wfw','wfw'),('wh','wh'),('whichwrap','whichwrap'),('wi','wi'),('wic','wic'),('wig','wig'),('wildchar','wildchar'),('wildcharm','wildcharm'),('wildignore','wildignore'),('wildignorecase','wildignorecase'),('wildmenu','wildmenu'),('wildmode','wildmode'),('wildoptions','wildoptions'),('wim','wim'),('winaltkeys','winaltkeys'),('window','window'),('winfixheight','winfixheight'),('winfixwidth','winfixwidth'),('winheight','winheight'),('winminheight','winminheight'),('winminwidth','winminwidth'),('winwidth','winwidth'),('wiv','wiv'),('wiw','wiw'),('wm','wm'),('wmh','wmh'),('wmnu','wmnu'),('wmw','wmw'),('wop','wop'),('wrap','wrap'),('wrapmargin','wrapmargin'),('wrapscan','wrapscan'),('write','write'),('writeany','writeany'),('writebackup','writebackup'),('writedelay','writedelay'),('ws','ws'),('ww','ww')) option = _getoption() command = _getcommand() diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index 5070d487..9c73fcf3 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -138,9 +138,9 @@ class PostgresLexer(PostgresBase, RegexLexer): (r'\s+', Text), (r'--.*?\n', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'(' + '|'.join([s.replace(" ", "\s+") - for s in DATATYPES + PSEUDO_TYPES]) - + r')\b', Name.Builtin), + (r'(' + '|'.join(s.replace(" ", "\s+") + for s in DATATYPES + PSEUDO_TYPES) + + r')\b', Name.Builtin), (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword), (r'[+*/<>=~!@#%^&|`?-]+', Operator), (r'::', Operator), # cast diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py index 1b81b643..1b6f05bf 100644 --- a/tests/test_qbasiclexer.py +++ b/tests/test_qbasiclexer.py @@ -12,7 +12,8 @@ import os import unittest from pygments.token import Token -from pygments.lexers.qbasic import QBasicLexer +from pygments.lexers.misc.basic import QBasicLexer + class QBasicTest(unittest.TestCase): def setUp(self): @@ -40,4 +41,3 @@ class QBasicTest(unittest.TestCase): (Token.Text, u'\n'), ] self.assertEqual(expected, list(self.lexer.get_tokens(fragment))) - -- cgit v1.2.1 From 9cb037001991ce4ef3444820d695ddd202dd3b26 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 22:21:14 +0200 Subject: use words() in sql --- pygments/lexers/sql.py | 183 +++++++++++++++++++++++++------------------------ 1 file changed, 95 insertions(+), 88 deletions(-) diff --git a/pygments/lexers/sql.py b/pygments/lexers/sql.py index 9c73fcf3..3112fc3e 100644 --- a/pygments/lexers/sql.py +++ b/pygments/lexers/sql.py @@ -40,14 +40,14 @@ import re -from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups +from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, words from pygments.token import Punctuation, \ Text, Comment, Operator, Keyword, Name, String, Number, Generic from pygments.lexers import get_lexer_by_name, ClassNotFound from pygments.util import iteritems from pygments.lexers._postgres_builtins import KEYWORDS, DATATYPES, \ - PSEUDO_TYPES, PLPGSQL_KEYWORDS + PSEUDO_TYPES, PLPGSQL_KEYWORDS __all__ = ['PostgresLexer', 'PlPgsqlLexer', 'PostgresConsoleLexer', @@ -57,6 +57,7 @@ line_re = re.compile('.*?\n') language_re = re.compile(r"\s+LANGUAGE\s+'?(\w+)'?", re.IGNORECASE) + def language_callback(lexer, match): """Parse the content of a $-string using a lexer @@ -102,7 +103,7 @@ class PostgresBase(object): if lang.lower() == 'sql': return get_lexer_by_name('postgresql', **self.options) - tries = [ lang ] + tries = [lang] if lang.startswith('pl'): tries.append(lang[2:]) if lang.endswith('u'): @@ -141,14 +142,14 @@ class PostgresLexer(PostgresBase, RegexLexer): (r'(' + '|'.join(s.replace(" ", "\s+") for s in DATATYPES + PSEUDO_TYPES) + r')\b', Name.Builtin), - (r'(' + '|'.join(KEYWORDS) + r')\b', Keyword), + (words(KEYWORDS, suffix=r'\b'), Keyword), (r'[+*/<>=~!@#%^&|`?-]+', Operator), (r'::', Operator), # cast (r'\$\d+', Name.Variable), (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float), (r'[0-9]+', Number.Integer), (r"(E|U&)?'(''|[^'])*'", String.Single), - (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier + (r'(U&)?"(""|[^"])*"', String.Name), # quoted identifier (r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback), (r'[a-z_]\w*', Name), @@ -183,7 +184,7 @@ class PlPgsqlLexer(PostgresBase, RegexLexer): for i, pattern in enumerate(tokens['root']): if pattern[1] == Keyword: tokens['root'][i] = ( - r'(' + '|'.join(KEYWORDS + PLPGSQL_KEYWORDS) + r')\b', + words(KEYWORDS + PLPGSQL_KEYWORDS, suffix=r'\b'), Keyword) del i break @@ -240,11 +241,14 @@ class lookahead(object): def __init__(self, x): self.iter = iter(x) self._nextitem = None + def __iter__(self): return self + def send(self, i): self._nextitem = i return i + def __next__(self): if self._nextitem is not None: ni = self._nextitem @@ -305,12 +309,12 @@ class PostgresConsoleLexer(Lexer): # TODO: better handle multiline comments at the end with # a lexer with an external state? if re_psql_command.match(curcode) \ - or re_end_command.search(curcode): + or re_end_command.search(curcode): break # Emit the combined stream of command and prompt(s) for item in do_insertions(insertions, - sql.get_tokens_unprocessed(curcode)): + sql.get_tokens_unprocessed(curcode)): yield item # Emit the output lines @@ -326,7 +330,7 @@ class PostgresConsoleLexer(Lexer): mmsg = re_message.match(line) if mmsg is not None: if mmsg.group(1).startswith("ERROR") \ - or mmsg.group(1).startswith("FATAL"): + or mmsg.group(1).startswith("FATAL"): out_token = Generic.Error yield (mmsg.start(1), Generic.Strong, mmsg.group(1)) yield (mmsg.start(2), out_token, mmsg.group(2)) @@ -351,90 +355,93 @@ class SqlLexer(RegexLexer): (r'\s+', Text), (r'--.*?\n', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), - (r'(ABORT|ABS|ABSOLUTE|ACCESS|ADA|ADD|ADMIN|AFTER|AGGREGATE|' - r'ALIAS|ALL|ALLOCATE|ALTER|ANALYSE|ANALYZE|AND|ANY|ARE|AS|' - r'ASC|ASENSITIVE|ASSERTION|ASSIGNMENT|ASYMMETRIC|AT|ATOMIC|' - r'AUTHORIZATION|AVG|BACKWARD|BEFORE|BEGIN|BETWEEN|BITVAR|' - r'BIT_LENGTH|BOTH|BREADTH|BY|C|CACHE|CALL|CALLED|CARDINALITY|' - r'CASCADE|CASCADED|CASE|CAST|CATALOG|CATALOG_NAME|CHAIN|' - r'CHARACTERISTICS|CHARACTER_LENGTH|CHARACTER_SET_CATALOG|' - r'CHARACTER_SET_NAME|CHARACTER_SET_SCHEMA|CHAR_LENGTH|CHECK|' - r'CHECKED|CHECKPOINT|CLASS|CLASS_ORIGIN|CLOB|CLOSE|CLUSTER|' - r'COALSECE|COBOL|COLLATE|COLLATION|COLLATION_CATALOG|' - r'COLLATION_NAME|COLLATION_SCHEMA|COLUMN|COLUMN_NAME|' - r'COMMAND_FUNCTION|COMMAND_FUNCTION_CODE|COMMENT|COMMIT|' - r'COMMITTED|COMPLETION|CONDITION_NUMBER|CONNECT|CONNECTION|' - r'CONNECTION_NAME|CONSTRAINT|CONSTRAINTS|CONSTRAINT_CATALOG|' - r'CONSTRAINT_NAME|CONSTRAINT_SCHEMA|CONSTRUCTOR|CONTAINS|' - r'CONTINUE|CONVERSION|CONVERT|COPY|CORRESPONTING|COUNT|' - r'CREATE|CREATEDB|CREATEUSER|CROSS|CUBE|CURRENT|CURRENT_DATE|' - r'CURRENT_PATH|CURRENT_ROLE|CURRENT_TIME|CURRENT_TIMESTAMP|' - r'CURRENT_USER|CURSOR|CURSOR_NAME|CYCLE|DATA|DATABASE|' - r'DATETIME_INTERVAL_CODE|DATETIME_INTERVAL_PRECISION|DAY|' - r'DEALLOCATE|DECLARE|DEFAULT|DEFAULTS|DEFERRABLE|DEFERRED|' - r'DEFINED|DEFINER|DELETE|DELIMITER|DELIMITERS|DEREF|DESC|' - r'DESCRIBE|DESCRIPTOR|DESTROY|DESTRUCTOR|DETERMINISTIC|' - r'DIAGNOSTICS|DICTIONARY|DISCONNECT|DISPATCH|DISTINCT|DO|' - r'DOMAIN|DROP|DYNAMIC|DYNAMIC_FUNCTION|DYNAMIC_FUNCTION_CODE|' - r'EACH|ELSE|ENCODING|ENCRYPTED|END|END-EXEC|EQUALS|ESCAPE|EVERY|' - r'EXCEPTION|EXCEPT|EXCLUDING|EXCLUSIVE|EXEC|EXECUTE|EXISTING|' - r'EXISTS|EXPLAIN|EXTERNAL|EXTRACT|FALSE|FETCH|FINAL|FIRST|FOR|' - r'FORCE|FOREIGN|FORTRAN|FORWARD|FOUND|FREE|FREEZE|FROM|FULL|' - r'FUNCTION|G|GENERAL|GENERATED|GET|GLOBAL|GO|GOTO|GRANT|GRANTED|' - r'GROUP|GROUPING|HANDLER|HAVING|HIERARCHY|HOLD|HOST|IDENTITY|' - r'IGNORE|ILIKE|IMMEDIATE|IMMUTABLE|IMPLEMENTATION|IMPLICIT|IN|' - r'INCLUDING|INCREMENT|INDEX|INDITCATOR|INFIX|INHERITS|INITIALIZE|' - r'INITIALLY|INNER|INOUT|INPUT|INSENSITIVE|INSERT|INSTANTIABLE|' - r'INSTEAD|INTERSECT|INTO|INVOKER|IS|ISNULL|ISOLATION|ITERATE|JOIN|' - r'KEY|KEY_MEMBER|KEY_TYPE|LANCOMPILER|LANGUAGE|LARGE|LAST|' - r'LATERAL|LEADING|LEFT|LENGTH|LESS|LEVEL|LIKE|LIMIT|LISTEN|LOAD|' - r'LOCAL|LOCALTIME|LOCALTIMESTAMP|LOCATION|LOCATOR|LOCK|LOWER|' - r'MAP|MATCH|MAX|MAXVALUE|MESSAGE_LENGTH|MESSAGE_OCTET_LENGTH|' - r'MESSAGE_TEXT|METHOD|MIN|MINUTE|MINVALUE|MOD|MODE|MODIFIES|' - r'MODIFY|MONTH|MORE|MOVE|MUMPS|NAMES|NATIONAL|NATURAL|NCHAR|' - r'NCLOB|NEW|NEXT|NO|NOCREATEDB|NOCREATEUSER|NONE|NOT|NOTHING|' - r'NOTIFY|NOTNULL|NULL|NULLABLE|NULLIF|OBJECT|OCTET_LENGTH|OF|OFF|' - r'OFFSET|OIDS|OLD|ON|ONLY|OPEN|OPERATION|OPERATOR|OPTION|OPTIONS|' - r'OR|ORDER|ORDINALITY|OUT|OUTER|OUTPUT|OVERLAPS|OVERLAY|OVERRIDING|' - r'OWNER|PAD|PARAMETER|PARAMETERS|PARAMETER_MODE|PARAMATER_NAME|' - r'PARAMATER_ORDINAL_POSITION|PARAMETER_SPECIFIC_CATALOG|' - r'PARAMETER_SPECIFIC_NAME|PARAMATER_SPECIFIC_SCHEMA|PARTIAL|' - r'PASCAL|PENDANT|PLACING|PLI|POSITION|POSTFIX|PRECISION|PREFIX|' - r'PREORDER|PREPARE|PRESERVE|PRIMARY|PRIOR|PRIVILEGES|PROCEDURAL|' - r'PROCEDURE|PUBLIC|READ|READS|RECHECK|RECURSIVE|REF|REFERENCES|' - r'REFERENCING|REINDEX|RELATIVE|RENAME|REPEATABLE|REPLACE|RESET|' - r'RESTART|RESTRICT|RESULT|RETURN|RETURNED_LENGTH|' - r'RETURNED_OCTET_LENGTH|RETURNED_SQLSTATE|RETURNS|REVOKE|RIGHT|' - r'ROLE|ROLLBACK|ROLLUP|ROUTINE|ROUTINE_CATALOG|ROUTINE_NAME|' - r'ROUTINE_SCHEMA|ROW|ROWS|ROW_COUNT|RULE|SAVE_POINT|SCALE|SCHEMA|' - r'SCHEMA_NAME|SCOPE|SCROLL|SEARCH|SECOND|SECURITY|SELECT|SELF|' - r'SENSITIVE|SERIALIZABLE|SERVER_NAME|SESSION|SESSION_USER|SET|' - r'SETOF|SETS|SHARE|SHOW|SIMILAR|SIMPLE|SIZE|SOME|SOURCE|SPACE|' - r'SPECIFIC|SPECIFICTYPE|SPECIFIC_NAME|SQL|SQLCODE|SQLERROR|' - r'SQLEXCEPTION|SQLSTATE|SQLWARNINIG|STABLE|START|STATE|STATEMENT|' - r'STATIC|STATISTICS|STDIN|STDOUT|STORAGE|STRICT|STRUCTURE|STYPE|' - r'SUBCLASS_ORIGIN|SUBLIST|SUBSTRING|SUM|SYMMETRIC|SYSID|SYSTEM|' - r'SYSTEM_USER|TABLE|TABLE_NAME| TEMP|TEMPLATE|TEMPORARY|TERMINATE|' - r'THAN|THEN|TIMESTAMP|TIMEZONE_HOUR|TIMEZONE_MINUTE|TO|TOAST|' - r'TRAILING|TRANSATION|TRANSACTIONS_COMMITTED|' - r'TRANSACTIONS_ROLLED_BACK|TRANSATION_ACTIVE|TRANSFORM|' - r'TRANSFORMS|TRANSLATE|TRANSLATION|TREAT|TRIGGER|TRIGGER_CATALOG|' - r'TRIGGER_NAME|TRIGGER_SCHEMA|TRIM|TRUE|TRUNCATE|TRUSTED|TYPE|' - r'UNCOMMITTED|UNDER|UNENCRYPTED|UNION|UNIQUE|UNKNOWN|UNLISTEN|' - r'UNNAMED|UNNEST|UNTIL|UPDATE|UPPER|USAGE|USER|' - r'USER_DEFINED_TYPE_CATALOG|USER_DEFINED_TYPE_NAME|' - r'USER_DEFINED_TYPE_SCHEMA|USING|VACUUM|VALID|VALIDATOR|VALUES|' - r'VARIABLE|VERBOSE|VERSION|VIEW|VOLATILE|WHEN|WHENEVER|WHERE|' - r'WITH|WITHOUT|WORK|WRITE|YEAR|ZONE)\b', Keyword), - (r'(ARRAY|BIGINT|BINARY|BIT|BLOB|BOOLEAN|CHAR|CHARACTER|DATE|' - r'DEC|DECIMAL|FLOAT|INT|INTEGER|INTERVAL|NUMBER|NUMERIC|REAL|' - r'SERIAL|SMALLINT|VARCHAR|VARYING|INT8|SERIAL8|TEXT)\b', + (words(( + 'ABORT', 'ABS', 'ABSOLUTE', 'ACCESS', 'ADA', 'ADD', 'ADMIN', 'AFTER', 'AGGREGATE', + 'ALIAS', 'ALL', 'ALLOCATE', 'ALTER', 'ANALYSE', 'ANALYZE', 'AND', 'ANY', 'ARE', 'AS', + 'ASC', 'ASENSITIVE', 'ASSERTION', 'ASSIGNMENT', 'ASYMMETRIC', 'AT', 'ATOMIC', + 'AUTHORIZATION', 'AVG', 'BACKWARD', 'BEFORE', 'BEGIN', 'BETWEEN', 'BITVAR', + 'BIT_LENGTH', 'BOTH', 'BREADTH', 'BY', 'C', 'CACHE', 'CALL', 'CALLED', 'CARDINALITY', + 'CASCADE', 'CASCADED', 'CASE', 'CAST', 'CATALOG', 'CATALOG_NAME', 'CHAIN', + 'CHARACTERISTICS', 'CHARACTER_LENGTH', 'CHARACTER_SET_CATALOG', + 'CHARACTER_SET_NAME', 'CHARACTER_SET_SCHEMA', 'CHAR_LENGTH', 'CHECK', + 'CHECKED', 'CHECKPOINT', 'CLASS', 'CLASS_ORIGIN', 'CLOB', 'CLOSE', 'CLUSTER', + 'COALSECE', 'COBOL', 'COLLATE', 'COLLATION', 'COLLATION_CATALOG', + 'COLLATION_NAME', 'COLLATION_SCHEMA', 'COLUMN', 'COLUMN_NAME', + 'COMMAND_FUNCTION', 'COMMAND_FUNCTION_CODE', 'COMMENT', 'COMMIT', + 'COMMITTED', 'COMPLETION', 'CONDITION_NUMBER', 'CONNECT', 'CONNECTION', + 'CONNECTION_NAME', 'CONSTRAINT', 'CONSTRAINTS', 'CONSTRAINT_CATALOG', + 'CONSTRAINT_NAME', 'CONSTRAINT_SCHEMA', 'CONSTRUCTOR', 'CONTAINS', + 'CONTINUE', 'CONVERSION', 'CONVERT', 'COPY', 'CORRESPONTING', 'COUNT', + 'CREATE', 'CREATEDB', 'CREATEUSER', 'CROSS', 'CUBE', 'CURRENT', 'CURRENT_DATE', + 'CURRENT_PATH', 'CURRENT_ROLE', 'CURRENT_TIME', 'CURRENT_TIMESTAMP', + 'CURRENT_USER', 'CURSOR', 'CURSOR_NAME', 'CYCLE', 'DATA', 'DATABASE', + 'DATETIME_INTERVAL_CODE', 'DATETIME_INTERVAL_PRECISION', 'DAY', + 'DEALLOCATE', 'DECLARE', 'DEFAULT', 'DEFAULTS', 'DEFERRABLE', 'DEFERRED', + 'DEFINED', 'DEFINER', 'DELETE', 'DELIMITER', 'DELIMITERS', 'DEREF', 'DESC', + 'DESCRIBE', 'DESCRIPTOR', 'DESTROY', 'DESTRUCTOR', 'DETERMINISTIC', + 'DIAGNOSTICS', 'DICTIONARY', 'DISCONNECT', 'DISPATCH', 'DISTINCT', 'DO', + 'DOMAIN', 'DROP', 'DYNAMIC', 'DYNAMIC_FUNCTION', 'DYNAMIC_FUNCTION_CODE', + 'EACH', 'ELSE', 'ENCODING', 'ENCRYPTED', 'END', 'END-EXEC', 'EQUALS', 'ESCAPE', 'EVERY', + 'EXCEPTION', 'EXCEPT', 'EXCLUDING', 'EXCLUSIVE', 'EXEC', 'EXECUTE', 'EXISTING', + 'EXISTS', 'EXPLAIN', 'EXTERNAL', 'EXTRACT', 'FALSE', 'FETCH', 'FINAL', 'FIRST', 'FOR', + 'FORCE', 'FOREIGN', 'FORTRAN', 'FORWARD', 'FOUND', 'FREE', 'FREEZE', 'FROM', 'FULL', + 'FUNCTION', 'G', 'GENERAL', 'GENERATED', 'GET', 'GLOBAL', 'GO', 'GOTO', 'GRANT', 'GRANTED', + 'GROUP', 'GROUPING', 'HANDLER', 'HAVING', 'HIERARCHY', 'HOLD', 'HOST', 'IDENTITY', + 'IGNORE', 'ILIKE', 'IMMEDIATE', 'IMMUTABLE', 'IMPLEMENTATION', 'IMPLICIT', 'IN', + 'INCLUDING', 'INCREMENT', 'INDEX', 'INDITCATOR', 'INFIX', 'INHERITS', 'INITIALIZE', + 'INITIALLY', 'INNER', 'INOUT', 'INPUT', 'INSENSITIVE', 'INSERT', 'INSTANTIABLE', + 'INSTEAD', 'INTERSECT', 'INTO', 'INVOKER', 'IS', 'ISNULL', 'ISOLATION', 'ITERATE', 'JOIN', + 'KEY', 'KEY_MEMBER', 'KEY_TYPE', 'LANCOMPILER', 'LANGUAGE', 'LARGE', 'LAST', + 'LATERAL', 'LEADING', 'LEFT', 'LENGTH', 'LESS', 'LEVEL', 'LIKE', 'LIMIT', 'LISTEN', 'LOAD', + 'LOCAL', 'LOCALTIME', 'LOCALTIMESTAMP', 'LOCATION', 'LOCATOR', 'LOCK', 'LOWER', + 'MAP', 'MATCH', 'MAX', 'MAXVALUE', 'MESSAGE_LENGTH', 'MESSAGE_OCTET_LENGTH', + 'MESSAGE_TEXT', 'METHOD', 'MIN', 'MINUTE', 'MINVALUE', 'MOD', 'MODE', 'MODIFIES', + 'MODIFY', 'MONTH', 'MORE', 'MOVE', 'MUMPS', 'NAMES', 'NATIONAL', 'NATURAL', 'NCHAR', + 'NCLOB', 'NEW', 'NEXT', 'NO', 'NOCREATEDB', 'NOCREATEUSER', 'NONE', 'NOT', 'NOTHING', + 'NOTIFY', 'NOTNULL', 'NULL', 'NULLABLE', 'NULLIF', 'OBJECT', 'OCTET_LENGTH', 'OF', 'OFF', + 'OFFSET', 'OIDS', 'OLD', 'ON', 'ONLY', 'OPEN', 'OPERATION', 'OPERATOR', 'OPTION', 'OPTIONS', + 'OR', 'ORDER', 'ORDINALITY', 'OUT', 'OUTER', 'OUTPUT', 'OVERLAPS', 'OVERLAY', 'OVERRIDING', + 'OWNER', 'PAD', 'PARAMETER', 'PARAMETERS', 'PARAMETER_MODE', 'PARAMATER_NAME', + 'PARAMATER_ORDINAL_POSITION', 'PARAMETER_SPECIFIC_CATALOG', + 'PARAMETER_SPECIFIC_NAME', 'PARAMATER_SPECIFIC_SCHEMA', 'PARTIAL', + 'PASCAL', 'PENDANT', 'PLACING', 'PLI', 'POSITION', 'POSTFIX', 'PRECISION', 'PREFIX', + 'PREORDER', 'PREPARE', 'PRESERVE', 'PRIMARY', 'PRIOR', 'PRIVILEGES', 'PROCEDURAL', + 'PROCEDURE', 'PUBLIC', 'READ', 'READS', 'RECHECK', 'RECURSIVE', 'REF', 'REFERENCES', + 'REFERENCING', 'REINDEX', 'RELATIVE', 'RENAME', 'REPEATABLE', 'REPLACE', 'RESET', + 'RESTART', 'RESTRICT', 'RESULT', 'RETURN', 'RETURNED_LENGTH', + 'RETURNED_OCTET_LENGTH', 'RETURNED_SQLSTATE', 'RETURNS', 'REVOKE', 'RIGHT', + 'ROLE', 'ROLLBACK', 'ROLLUP', 'ROUTINE', 'ROUTINE_CATALOG', 'ROUTINE_NAME', + 'ROUTINE_SCHEMA', 'ROW', 'ROWS', 'ROW_COUNT', 'RULE', 'SAVE_POINT', 'SCALE', 'SCHEMA', + 'SCHEMA_NAME', 'SCOPE', 'SCROLL', 'SEARCH', 'SECOND', 'SECURITY', 'SELECT', 'SELF', + 'SENSITIVE', 'SERIALIZABLE', 'SERVER_NAME', 'SESSION', 'SESSION_USER', 'SET', + 'SETOF', 'SETS', 'SHARE', 'SHOW', 'SIMILAR', 'SIMPLE', 'SIZE', 'SOME', 'SOURCE', 'SPACE', + 'SPECIFIC', 'SPECIFICTYPE', 'SPECIFIC_NAME', 'SQL', 'SQLCODE', 'SQLERROR', + 'SQLEXCEPTION', 'SQLSTATE', 'SQLWARNINIG', 'STABLE', 'START', 'STATE', 'STATEMENT', + 'STATIC', 'STATISTICS', 'STDIN', 'STDOUT', 'STORAGE', 'STRICT', 'STRUCTURE', 'STYPE', + 'SUBCLASS_ORIGIN', 'SUBLIST', 'SUBSTRING', 'SUM', 'SYMMETRIC', 'SYSID', 'SYSTEM', + 'SYSTEM_USER', 'TABLE', 'TABLE_NAME', ' TEMP', 'TEMPLATE', 'TEMPORARY', 'TERMINATE', + 'THAN', 'THEN', 'TIMESTAMP', 'TIMEZONE_HOUR', 'TIMEZONE_MINUTE', 'TO', 'TOAST', + 'TRAILING', 'TRANSATION', 'TRANSACTIONS_COMMITTED', + 'TRANSACTIONS_ROLLED_BACK', 'TRANSATION_ACTIVE', 'TRANSFORM', + 'TRANSFORMS', 'TRANSLATE', 'TRANSLATION', 'TREAT', 'TRIGGER', 'TRIGGER_CATALOG', + 'TRIGGER_NAME', 'TRIGGER_SCHEMA', 'TRIM', 'TRUE', 'TRUNCATE', 'TRUSTED', 'TYPE', + 'UNCOMMITTED', 'UNDER', 'UNENCRYPTED', 'UNION', 'UNIQUE', 'UNKNOWN', 'UNLISTEN', + 'UNNAMED', 'UNNEST', 'UNTIL', 'UPDATE', 'UPPER', 'USAGE', 'USER', + 'USER_DEFINED_TYPE_CATALOG', 'USER_DEFINED_TYPE_NAME', + 'USER_DEFINED_TYPE_SCHEMA', 'USING', 'VACUUM', 'VALID', 'VALIDATOR', 'VALUES', + 'VARIABLE', 'VERBOSE', 'VERSION', 'VIEW', 'VOLATILE', 'WHEN', 'WHENEVER', 'WHERE', + 'WITH', 'WITHOUT', 'WORK', 'WRITE', 'YEAR', 'ZONE'), suffix=r'\b'), + Keyword), + (words(( + 'ARRAY', 'BIGINT', 'BINARY', 'BIT', 'BLOB', 'BOOLEAN', 'CHAR', 'CHARACTER', 'DATE', + 'DEC', 'DECIMAL', 'FLOAT', 'INT', 'INTEGER', 'INTERVAL', 'NUMBER', 'NUMERIC', 'REAL', + 'SERIAL', 'SMALLINT', 'VARCHAR', 'VARYING', 'INT8', 'SERIAL8', 'TEXT'), suffix=r'\b'), Name.Builtin), (r'[+*/<>=~!@#%^&|`?-]', Operator), (r'[0-9]+', Number.Integer), # TODO: Backslash escapes? (r"'(''|[^'])*'", String.Single), - (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL + (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL (r'[a-z_]\w*', Name), (r'[;:()\[\],\.]', Punctuation) ], -- cgit v1.2.1 From 731527e9183d17d0f9eaf35bc0dd263ba84ae5c1 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 23:02:18 +0200 Subject: split up text lexers --- CHANGES | 2 + doc/docs/lexerdevelopment.rst | 2 +- pygments/lexers/_mapping.py | 56 +- pygments/lexers/agile.py | 2 +- pygments/lexers/asm.py | 22 +- pygments/lexers/compiled.py | 2 +- pygments/lexers/configs.py | 373 +++++++- pygments/lexers/console.py | 79 +- pygments/lexers/data.py | 427 +++++++++ pygments/lexers/diff.py | 106 +++ pygments/lexers/dsls.py | 45 +- pygments/lexers/haskell.py | 2 +- pygments/lexers/installers.py | 110 ++- pygments/lexers/markup.py | 379 ++++++++ pygments/lexers/math.py | 2 +- pygments/lexers/misc/make.py | 199 ++++ pygments/lexers/parsers.py | 224 +++-- pygments/lexers/templates.py | 19 +- pygments/lexers/text.py | 2059 +---------------------------------------- pygments/lexers/textedit.py | 103 ++- pygments/lexers/textfmts.py | 277 ++++++ pygments/lexers/web.py | 4 +- tests/test_perllexer.py | 2 +- 23 files changed, 2299 insertions(+), 2197 deletions(-) create mode 100644 pygments/lexers/data.py create mode 100644 pygments/lexers/diff.py create mode 100644 pygments/lexers/markup.py create mode 100644 pygments/lexers/misc/make.py create mode 100644 pygments/lexers/textfmts.py diff --git a/CHANGES b/CHANGES index f5196bde..b6b80f1d 100644 --- a/CHANGES +++ b/CHANGES @@ -44,6 +44,8 @@ Version 2.0 * Nit (PR#375) * LSL (PR#296) * Alloy (PR#355) + * Docker config files + * Todo.txt todo lists - Added a helper to "optimize" regular expressions that match one of many literal words; this can save 20% and more lexing time with lexers that diff --git a/doc/docs/lexerdevelopment.rst b/doc/docs/lexerdevelopment.rst index 23f7cdc6..48ede92c 100644 --- a/doc/docs/lexerdevelopment.rst +++ b/doc/docs/lexerdevelopment.rst @@ -584,7 +584,7 @@ the ``get_tokens_unprocessed()`` method. The following lexer subclasses the .. sourcecode:: python - from pygments.lexers.agile import PythonLexer + from pygments.lexers.python import PythonLexer from pygments.token import Name, Keyword class MyPythonLexer(PythonLexer): diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index 928f8a3f..f0848dd4 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -33,15 +33,15 @@ LEXERS = { 'AntlrPerlLexer': ('pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), 'AntlrPythonLexer': ('pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), 'AntlrRubyLexer': ('pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()), - 'ApacheConfLexer': ('pygments.lexers.text', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), + 'ApacheConfLexer': ('pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)), 'AppleScriptLexer': ('pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), 'AspectJLexer': ('pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), 'AsymptoteLexer': ('pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), 'AutoItLexer': ('pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), 'AutohotkeyLexer': ('pygments.lexers.automation', 'autohotkey', ('ahk', 'autohotkey'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)), 'AwkLexer': ('pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)), - 'BBCodeLexer': ('pygments.lexers.text', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), - 'BaseMakefileLexer': ('pygments.lexers.text', 'Base Makefile', ('basemake',), (), ()), + 'BBCodeLexer': ('pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)), + 'BaseMakefileLexer': ('pygments.lexers.misc.make', 'Base Makefile', ('basemake',), (), ()), 'BashLexer': ('pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript')), 'BashSessionLexer': ('pygments.lexers.shell', 'Bash Session', ('console',), ('*.sh-session',), ('application/x-shell-session',)), 'BatchLexer': ('pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), @@ -53,7 +53,7 @@ LEXERS = { 'BroLexer': ('pygments.lexers.dsls', 'Bro', ('bro',), ('*.bro',), ()), 'BugsLexer': ('pygments.lexers.math', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), 'CLexer': ('pygments.lexers.c_like.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), - 'CMakeLexer': ('pygments.lexers.text', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), + 'CMakeLexer': ('pygments.lexers.misc.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), 'CObjdumpLexer': ('pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), 'CSharpAspxLexer': ('pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), 'CSharpLexer': ('pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), @@ -94,14 +94,13 @@ LEXERS = { 'CythonLexer': ('pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), 'DLexer': ('pygments.lexers.c_like.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), - 'DarcsPatchLexer': ('pygments.lexers.text', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), + 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), - 'DebianControlLexer': ('pygments.lexers.text', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)), 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), - 'DiffLexer': ('pygments.lexers.text', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), + 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), - 'DockerLexer': ('pygments.lexers.text', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), + 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), 'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), 'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), 'DylanConsoleLexer': ('pygments.lexers.misc.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), @@ -109,7 +108,7 @@ LEXERS = { 'DylanLidLexer': ('pygments.lexers.misc.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), 'ECLLexer': ('pygments.lexers.misc.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)), 'ECLexer': ('pygments.lexers.c_like.other', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')), - 'EbnfLexer': ('pygments.lexers.text', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), + 'EbnfLexer': ('pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), 'EiffelLexer': ('pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), 'ElixirConsoleLexer': ('pygments.lexers.misc.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), 'ElixirLexer': ('pygments.lexers.misc.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.exs'), ('text/x-elixir',)), @@ -131,7 +130,7 @@ LEXERS = { 'GasLexer': ('pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), 'GenshiLexer': ('pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), 'GenshiTextLexer': ('pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), - 'GettextLexer': ('pygments.lexers.text', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), + 'GettextLexer': ('pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), 'GherkinLexer': ('pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), 'GnuplotLexer': ('pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), 'GoLexer': ('pygments.lexers.c_like.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), @@ -139,7 +138,7 @@ LEXERS = { 'GoodDataCLLexer': ('pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), 'GosuLexer': ('pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), - 'GroffLexer': ('pygments.lexers.text', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), + 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)), 'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), @@ -151,8 +150,8 @@ LEXERS = { 'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), - 'HttpLexer': ('pygments.lexers.text', 'HTTP', ('http',), (), ()), - 'HxmlLexer': ('pygments.lexers.text', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), + 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), + 'HxmlLexer': ('pygments.lexers.dsls', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), @@ -161,10 +160,10 @@ LEXERS = { 'Inform6Lexer': ('pygments.lexers.inform', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()), 'Inform6TemplateLexer': ('pygments.lexers.inform', 'Inform 6 template', ('i6t',), ('*.i6t',), ()), 'Inform7Lexer': ('pygments.lexers.inform', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()), - 'IniLexer': ('pygments.lexers.text', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)), + 'IniLexer': ('pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg'), ('text/x-ini',)), 'IoLexer': ('pygments.lexers.misc.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), - 'IrcLogsLexer': ('pygments.lexers.text', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), + 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), 'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), 'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), @@ -189,7 +188,7 @@ LEXERS = { 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), 'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), - 'LighttpdConfLexer': ('pygments.lexers.text', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), + 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), 'LiquidLexer': ('pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()), 'LiterateAgdaLexer': ('pygments.lexers.haskell', 'Literate Agda', ('lagda', 'literate-agda'), ('*.lagda',), ('text/x-literate-agda',)), @@ -202,7 +201,7 @@ LEXERS = { 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), 'LuaLexer': ('pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), 'MOOCodeLexer': ('pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), - 'MakefileLexer': ('pygments.lexers.text', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), + 'MakefileLexer': ('pygments.lexers.misc.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)), 'MakoCssLexer': ('pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)), 'MakoHtmlLexer': ('pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)), 'MakoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Mako', ('js+mako', 'javascript+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')), @@ -217,7 +216,7 @@ LEXERS = { 'MiniDLexer': ('pygments.lexers.c_like.d', 'MiniD', ('minid',), ('*.md',), ('text/x-minidsrc',)), 'ModelicaLexer': ('pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)), 'Modula2Lexer': ('pygments.lexers.pascal', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)), - 'MoinWikiLexer': ('pygments.lexers.text', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), + 'MoinWikiLexer': ('pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)), 'MonkeyLexer': ('pygments.lexers.misc.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)), 'MoonScriptLexer': ('pygments.lexers.scripting', 'MoonScript', ('moon', 'moonscript'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')), 'MqlLexer': ('pygments.lexers.c_like.other', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), @@ -237,7 +236,7 @@ LEXERS = { 'NesCLexer': ('pygments.lexers.c_like.other', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), 'NewLispLexer': ('pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl'), ('text/x-newlisp', 'application/x-newlisp')), 'NewspeakLexer': ('pygments.lexers.misc.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)), - 'NginxConfLexer': ('pygments.lexers.text', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), + 'NginxConfLexer': ('pygments.lexers.configs', 'Nginx configuration file', ('nginx',), (), ('text/x-nginx-conf',)), 'NimrodLexer': ('pygments.lexers.misc.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nimrod',)), 'NitLexer': ('pygments.lexers.misc.nit', 'Nit', ('nit',), ('*.nit',), ()), 'NixLexer': ('pygments.lexers.misc.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)), @@ -265,10 +264,10 @@ LEXERS = { 'PovrayLexer': ('pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), 'PowerShellLexer': ('pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), 'PrologLexer': ('pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), - 'PropertiesLexer': ('pygments.lexers.text', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), + 'PropertiesLexer': ('pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)), 'ProtoBufLexer': ('pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()), 'PuppetLexer': ('pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()), - 'PyPyLogLexer': ('pygments.lexers.text', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), + 'PyPyLogLexer': ('pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)), 'Python3Lexer': ('pygments.lexers.python', 'Python 3', ('python3', 'py3'), (), ('text/x-python3', 'application/x-python3')), 'Python3TracebackLexer': ('pygments.lexers.python', 'Python 3.0 Traceback', ('py3tb',), ('*.py3tb',), ('text/x-python3-traceback',)), 'PythonConsoleLexer': ('pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), @@ -292,13 +291,13 @@ LEXERS = { 'RebolLexer': ('pygments.lexers.misc.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)), 'RedLexer': ('pygments.lexers.misc.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')), 'RedcodeLexer': ('pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()), - 'RegeditLexer': ('pygments.lexers.text', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), + 'RegeditLexer': ('pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)), 'RexxLexer': ('pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)), 'RhtmlLexer': ('pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)), 'RobotFrameworkLexer': ('pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.txt', '*.robot'), ('text/x-robotframework',)), 'RqlLexer': ('pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)), 'RslLexer': ('pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)), - 'RstLexer': ('pygments.lexers.text', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), + 'RstLexer': ('pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), 'RubyConsoleLexer': ('pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), 'RubyLexer': ('pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby'), ('text/x-ruby', 'application/x-ruby')), 'RustLexer': ('pygments.lexers.c_like.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), @@ -317,11 +316,10 @@ LEXERS = { 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), 'SnobolLexer': ('pygments.lexers.misc.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), 'SourcePawnLexer': ('pygments.lexers.misc.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), - 'SourcesListLexer': ('pygments.lexers.text', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), - 'SquidConfLexer': ('pygments.lexers.text', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), + 'SquidConfLexer': ('pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)), 'SspLexer': ('pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)), 'StanLexer': ('pygments.lexers.math', 'Stan', ('stan',), ('*.stan',), ()), 'SwiftLexer': ('pygments.lexers.c_like.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), @@ -330,9 +328,9 @@ LEXERS = { 'TclLexer': ('pygments.lexers.misc.tcl', 'Tcl', ('tcl',), ('*.tcl',), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), 'TcshLexer': ('pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), 'TeaTemplateLexer': ('pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), - 'TexLexer': ('pygments.lexers.text', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), + 'TexLexer': ('pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), - 'TodotxtLexer': ('pygments.lexers.text', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), + 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), 'UrbiscriptLexer': ('pygments.lexers.misc.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), @@ -346,7 +344,7 @@ LEXERS = { 'VelocityXmlLexer': ('pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), - 'VimLexer': ('pygments.lexers.text', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), + 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), @@ -356,7 +354,7 @@ LEXERS = { 'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), - 'YamlLexer': ('pygments.lexers.text', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), + 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), 'ZephirLexer': ('pygments.lexers.web', 'Zephir', ('zephir',), ('*.zep',), ()), } diff --git a/pygments/lexers/agile.py b/pygments/lexers/agile.py index c90d3847..c467548c 100644 --- a/pygments/lexers/agile.py +++ b/pygments/lexers/agile.py @@ -9,7 +9,7 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexers.functional import SchemeLexer +from pygments.lexers.lisp import SchemeLexer from pygments.lexers.jvm import IokeLexer, ClojureLexer from pygments.lexers.python import PythonLexer, PythonConsoleLexer, \ PythonTracebackLexer, Python3Lexer, Python3TracebackLexer, DgLexer diff --git a/pygments/lexers/asm.py b/pygments/lexers/asm.py index f995264c..0f7673f9 100644 --- a/pygments/lexers/asm.py +++ b/pygments/lexers/asm.py @@ -12,12 +12,14 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, DelegatingLexer -from pygments.lexers.compiled import DLexer, CppLexer, CLexer +from pygments.lexers.c_like.c_cpp import CppLexer, CLexer +from pygments.lexers.c_like.d import DLexer from pygments.token import Text, Name, Number, String, Comment, Punctuation, \ - Other, Keyword, Operator + Other, Keyword, Operator -__all__ = ['GasLexer', 'ObjdumpLexer','DObjdumpLexer', 'CppObjdumpLexer', - 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer', 'Ca65Lexer'] +__all__ = ['GasLexer', 'ObjdumpLexer', 'DObjdumpLexer', 'CppObjdumpLexer', + 'CObjdumpLexer', 'LlvmLexer', 'NasmLexer', 'NasmObjdumpLexer', + 'Ca65Lexer'] class GasLexer(RegexLexer): @@ -154,11 +156,9 @@ class ObjdumpLexer(RegexLexer): filenames = ['*.objdump'] mimetypes = ['text/x-objdump'] - tokens = _objdump_lexer_tokens(GasLexer) - class DObjdumpLexer(DelegatingLexer): """ For the output of 'objdump -Sr on compiled D files' @@ -220,11 +220,11 @@ class LlvmLexer(RegexLexer): include('keyword'), - (r'%' + identifier, Name.Variable),#Name.Identifier.Local), - (r'@' + identifier, Name.Variable.Global),#Name.Identifier.Global), - (r'%\d+', Name.Variable.Anonymous),#Name.Identifier.Anonymous), - (r'@\d+', Name.Variable.Global),#Name.Identifier.Anonymous), - (r'#\d+', Name.Variable.Global),#Name.Identifier.Global), + (r'%' + identifier, Name.Variable), + (r'@' + identifier, Name.Variable.Global), + (r'%\d+', Name.Variable.Anonymous), + (r'@\d+', Name.Variable.Global), + (r'#\d+', Name.Variable.Global), (r'!' + identifier, Name.Variable), (r'!\d+', Name.Variable.Anonymous), (r'c?' + string, String), diff --git a/pygments/lexers/compiled.py b/pygments/lexers/compiled.py index 8edd6cb5..755a0155 100644 --- a/pygments/lexers/compiled.py +++ b/pygments/lexers/compiled.py @@ -9,7 +9,6 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexers.functional import OcamlLexer from pygments.lexers.jvm import JavaLexer, ScalaLexer from pygments.lexers.c_like.c_cpp import CLexer, CppLexer from pygments.lexers.c_like.d import DLexer @@ -24,6 +23,7 @@ from pygments.lexers.fortran import FortranLexer from pygments.lexers.prolog import PrologLexer from pygments.lexers.python import CythonLexer from pygments.lexers.graphics import GLShaderLexer +from pygments.lexers.misc.ml import OcamlLexer from pygments.lexers.misc.basic import BlitzBasicLexer, BlitzMaxLexer, \ MonkeyLexer from pygments.lexers.misc.dylan import DylanLexer, DylanLidLexer, \ diff --git a/pygments/lexers/configs.py b/pygments/lexers/configs.py index 44408199..6540615a 100644 --- a/pygments/lexers/configs.py +++ b/pygments/lexers/configs.py @@ -9,11 +9,108 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, default, words, bygroups +import re + +from pygments.lexer import RegexLexer, default, words, bygroups, include, using from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation + Number, Punctuation, Whitespace +from pygments.lexers.shell import BashLexer + +__all__ = ['IniLexer', 'RegeditLexer', 'PropertiesLexer', 'KconfigLexer', + 'Cfengine3Lexer', 'ApacheConfLexer', 'SquidConfLexer', + 'NginxConfLexer', 'LighttpdConfLexer', 'DockerLexer'] + + +class IniLexer(RegexLexer): + """ + Lexer for configuration files in INI style. + """ + + name = 'INI' + aliases = ['ini', 'cfg', 'dosini'] + filenames = ['*.ini', '*.cfg'] + mimetypes = ['text/x-ini'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'[;#].*', Comment.Single), + (r'\[.*?\]$', Keyword), + (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)', + bygroups(Name.Attribute, Text, Operator, Text, String)) + ] + } + + def analyse_text(text): + npos = text.find('\n') + if npos < 3: + return False + return text[0] == '[' and text[npos-1] == ']' + + +class RegeditLexer(RegexLexer): + """ + Lexer for `Windows Registry + `_ files produced + by regedit. + + .. versionadded:: 1.6 + """ + + name = 'reg' + aliases = ['registry'] + filenames = ['*.reg'] + mimetypes = ['text/x-windows-registry'] + + tokens = { + 'root': [ + (r'Windows Registry Editor.*', Text), + (r'\s+', Text), + (r'[;#].*', Comment.Single), + (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$', + bygroups(Keyword, Operator, Name.Builtin, Keyword)), + # String keys, which obey somewhat normal escaping + (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)', + bygroups(Name.Attribute, Text, Operator, Text), + 'value'), + # Bare keys (includes @) + (r'(.*?)([ \t]*)(=)([ \t]*)', + bygroups(Name.Attribute, Text, Operator, Text), + 'value'), + ], + 'value': [ + (r'-', Operator, '#pop'), # delete value + (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)', + bygroups(Name.Variable, Punctuation, Number), '#pop'), + # As far as I know, .reg files do not support line continuation. + (r'.*', String, '#pop'), + ] + } -__all__ = ['KconfigLexer', 'Cfengine3Lexer'] + def analyse_text(text): + return text.startswith('Windows Registry Editor') + + +class PropertiesLexer(RegexLexer): + """ + Lexer for configuration files in Java's properties format. + + .. versionadded:: 1.4 + """ + + name = 'Properties' + aliases = ['properties', 'jproperties'] + filenames = ['*.properties'] + mimetypes = ['text/x-java-properties'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'(?:[;#]|//).*$', Comment), + (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)', + bygroups(Name.Attribute, Text, Operator, Text, String)), + ], + } def _rx_indent(level): @@ -164,3 +261,273 @@ class Cfengine3Lexer(RegexLexer): (r'\s+', Text), ], } + + +class ApacheConfLexer(RegexLexer): + """ + Lexer for configuration files following the Apache config file + format. + + .. versionadded:: 0.6 + """ + + name = 'ApacheConf' + aliases = ['apacheconf', 'aconf', 'apache'] + filenames = ['.htaccess', 'apache.conf', 'apache2.conf'] + mimetypes = ['text/x-apacheconf'] + flags = re.MULTILINE | re.IGNORECASE + + tokens = { + 'root': [ + (r'\s+', Text), + (r'(#.*?)$', Comment), + (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)', + bygroups(Name.Tag, Text, String, Name.Tag)), + (r'([a-z]\w*)(\s+)', + bygroups(Name.Builtin, Text), 'value'), + (r'\.+', Text), + ], + 'value': [ + (r'$', Text, '#pop'), + (r'[^\S\n]+', Text), + (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), + (r'\d+', Number), + (r'/([a-z0-9][\w./-]+)', String.Other), + (r'(on|off|none|any|all|double|email|dns|min|minimal|' + r'os|productonly|full|emerg|alert|crit|error|warn|' + r'notice|info|debug|registry|script|inetd|standalone|' + r'user|group)\b', Keyword), + (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), + (r'[^\s"]+', Text) + ] + } + + +class SquidConfLexer(RegexLexer): + """ + Lexer for `squid `_ configuration files. + + .. versionadded:: 0.9 + """ + + name = 'SquidConf' + aliases = ['squidconf', 'squid.conf', 'squid'] + filenames = ['squid.conf'] + mimetypes = ['text/x-squidconf'] + flags = re.IGNORECASE + + keywords = ( + "access_log", "acl", "always_direct", "announce_host", + "announce_period", "announce_port", "announce_to", "anonymize_headers", + "append_domain", "as_whois_server", "auth_param_basic", + "authenticate_children", "authenticate_program", "authenticate_ttl", + "broken_posts", "buffered_logs", "cache_access_log", "cache_announce", + "cache_dir", "cache_dns_program", "cache_effective_group", + "cache_effective_user", "cache_host", "cache_host_acl", + "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high", + "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer", + "cache_peer_access", "cahce_replacement_policy", "cache_stoplist", + "cache_stoplist_pattern", "cache_store_log", "cache_swap", + "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db", + "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir", + "dead_peer_timeout", "debug_options", "delay_access", "delay_class", + "delay_initial_bucket_level", "delay_parameters", "delay_pools", + "deny_info", "dns_children", "dns_defnames", "dns_nameservers", + "dns_testnames", "emulate_httpd_log", "err_html_text", + "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port", + "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width", + "ftp_passive", "ftp_user", "half_closed_clients", "header_access", + "header_replace", "hierarchy_stoplist", "high_response_time_warning", + "high_page_fault_warning", "hosts_file", "htcp_port", "http_access", + "http_anonymizer", "httpd_accel", "httpd_accel_host", + "httpd_accel_port", "httpd_accel_uses_host_header", + "httpd_accel_with_proxy", "http_port", "http_reply_access", + "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout", + "ident_lookup", "ident_lookup_access", "ident_timeout", + "incoming_http_average", "incoming_icp_average", "inside_firewall", + "ipcache_high", "ipcache_low", "ipcache_size", "local_domain", + "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries", + "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries", + "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr", + "mcast_miss_encode_key", "mcast_miss_port", "memory_pools", + "memory_pools_limit", "memory_replacement_policy", "mime_table", + "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops", + "minimum_object_size", "minimum_retry_timeout", "miss_access", + "negative_dns_ttl", "negative_ttl", "neighbor_timeout", + "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period", + "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy", + "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl", + "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp", + "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min", + "quick_abort_pct", "range_offset_limit", "read_timeout", + "redirect_children", "redirect_program", + "redirect_rewrites_host_header", "reference_age", "reference_age", + "refresh_pattern", "reload_into_ims", "request_body_max_size", + "request_size", "request_timeout", "shutdown_lifetime", + "single_parent_bypass", "siteselect_timeout", "snmp_access", + "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy", + "store_avg_object_size", "store_objects_per_bucket", + "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs", + "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize", + "test_reachability", "udp_hit_obj", "udp_hit_obj_size", + "udp_incoming_address", "udp_outgoing_address", "unique_hostname", + "unlinkd_program", "uri_whitespace", "useragent_log", + "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port", + ) + + opts = ( + "proxy-only", "weight", "ttl", "no-query", "default", "round-robin", + "multicast-responder", "on", "off", "all", "deny", "allow", "via", + "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2", + "credentialsttl", "none", "disable", "offline_toggle", "diskd", + ) + + actions = ( + "shutdown", "info", "parameter", "server_list", "client_list", + r'squid.conf', + ) + + actions_stats = ( + "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns", + "redirector", "io", "reply_headers", "filedescriptors", "netdb", + ) + + actions_log = ("status", "enable", "disable", "clear") + + acls = ( + "url_regex", "urlpath_regex", "referer_regex", "port", "proto", + "req_mime_type", "rep_mime_type", "method", "browser", "user", "src", + "dst", "time", "dstdomain", "ident", "snmp_community", + ) + + ip_re = ( + r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|' + r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|' + r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|' + r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?`_ configuration files. + + .. versionadded:: 0.11 + """ + name = 'Nginx configuration file' + aliases = ['nginx'] + filenames = [] + mimetypes = ['text/x-nginx-conf'] + + tokens = { + 'root': [ + (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)), + (r'[^\s;#]+', Keyword, 'stmt'), + include('base'), + ], + 'block': [ + (r'}', Punctuation, '#pop:2'), + (r'[^\s;#]+', Keyword.Namespace, 'stmt'), + include('base'), + ], + 'stmt': [ + (r'{', Punctuation, 'block'), + (r';', Punctuation, '#pop'), + include('base'), + ], + 'base': [ + (r'#.*\n', Comment.Single), + (r'on|off', Name.Constant), + (r'\$[^\s;#()]+', Name.Variable), + (r'([a-z0-9.-]+)(:)([0-9]+)', + bygroups(Name, Punctuation, Number.Integer)), + (r'[a-z-]+/[a-z-+]+', String), # mimetype + # (r'[a-zA-Z._-]+', Keyword), + (r'[0-9]+[km]?\b', Number.Integer), + (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)), + (r'[:=~]', Punctuation), + (r'[^\s;#{}$]+', String), # catch all + (r'/[^\s;#]*', Name), # pathname + (r'\s+', Text), + (r'[$;]', Text), # leftover characters + ], + } + + +class LighttpdConfLexer(RegexLexer): + """ + Lexer for `Lighttpd `_ configuration files. + + .. versionadded:: 0.11 + """ + name = 'Lighttpd configuration file' + aliases = ['lighty', 'lighttpd'] + filenames = [] + mimetypes = ['text/x-lighttpd-conf'] + + tokens = { + 'root': [ + (r'#.*\n', Comment.Single), + (r'/\S*', Name), # pathname + (r'[a-zA-Z._-]+', Keyword), + (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), + (r'[0-9]+', Number), + (r'=>|=~|\+=|==|=|\+', Operator), + (r'\$[A-Z]+', Name.Builtin), + (r'[(){}\[\],]', Punctuation), + (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), + (r'\s+', Text), + ], + + } + + +class DockerLexer(RegexLexer): + """ + Lexer for `Docker `_ configuration files. + + .. versionadded:: 2.0 + """ + name = 'Docker' + aliases = ['docker', 'dockerfile'] + filenames = ['Dockerfile', '*.docker'] + mimetypes = ['text/x-dockerfile-config'] + + _keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|' + r'VOLUME|WORKDIR)') + + flags = re.IGNORECASE | re.MULTILINE + + tokens = { + 'root': [ + (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,), + bygroups(Name.Keyword, Whitespace, Keyword)), + (_keywords + r'\b', Keyword), + (r'#.*', Comment), + (r'.+', using(BashLexer)), + ], + } diff --git a/pygments/lexers/console.py b/pygments/lexers/console.py index f259ab9f..334e7195 100644 --- a/pygments/lexers/console.py +++ b/pygments/lexers/console.py @@ -9,10 +9,11 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer -from pygments.token import Generic, Comment, String, Text +from pygments.lexer import RegexLexer, include, bygroups +from pygments.token import Generic, Comment, String, Text, Keyword, Name, \ + Punctuation, Number -__all__ = ['VCTreeStatusLexer'] +__all__ = ['VCTreeStatusLexer', 'PyPyLogLexer'] class VCTreeStatusLexer(RegexLexer): @@ -39,3 +40,75 @@ class VCTreeStatusLexer(RegexLexer): (r'.*\n', Text) ] } + + +class PyPyLogLexer(RegexLexer): + """ + Lexer for PyPy log files. + + .. versionadded:: 1.5 + """ + name = "PyPy Log" + aliases = ["pypylog", "pypy"] + filenames = ["*.pypylog"] + mimetypes = ['application/x-pypylog'] + + tokens = { + "root": [ + (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"), + (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"), + include("extra-stuff"), + ], + "jit-log": [ + (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"), + (r"^\+\d+: ", Comment), + (r"--end of the loop--", Comment), + (r"[ifp]\d+", Name), + (r"ptr\d+", Name), + (r"(\()(\w+(?:\.\w+)?)(\))", + bygroups(Punctuation, Name.Builtin, Punctuation)), + (r"[\[\]=,()]", Punctuation), + (r"(\d+\.\d+|inf|-inf)", Number.Float), + (r"-?\d+", Number.Integer), + (r"'.*'", String), + (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name), + (r"<.*?>+", Name.Builtin), + (r"(label|debug_merge_point|jump|finish)", Name.Class), + (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|" + r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|" + r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|" + r"int_is_true|" + r"uint_floordiv|uint_ge|uint_lt|" + r"float_add|float_sub|float_mul|float_truediv|float_neg|" + r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|" + r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|" + r"cast_int_to_float|cast_float_to_int|" + r"force_token|quasiimmut_field|same_as|virtual_ref_finish|" + r"virtual_ref|mark_opaque_ptr|" + r"call_may_force|call_assembler|call_loopinvariant|" + r"call_release_gil|call_pure|call|" + r"new_with_vtable|new_array|newstr|newunicode|new|" + r"arraylen_gc|" + r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|" + r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|" + r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|" + r"getfield_raw|setfield_gc|setfield_raw|" + r"strgetitem|strsetitem|strlen|copystrcontent|" + r"unicodegetitem|unicodesetitem|unicodelen|" + r"guard_true|guard_false|guard_value|guard_isnull|" + r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|" + r"guard_not_forced|guard_no_exception|guard_not_invalidated)", + Name.Builtin), + include("extra-stuff"), + ], + "jit-backend-counts": [ + (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"), + (r":", Punctuation), + (r"\d+", Number), + include("extra-stuff"), + ], + "extra-stuff": [ + (r"\s+", Text), + (r"#.*?$", Comment), + ], + } diff --git a/pygments/lexers/data.py b/pygments/lexers/data.py new file mode 100644 index 00000000..d1d33a1e --- /dev/null +++ b/pygments/lexers/data.py @@ -0,0 +1,427 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.data + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for data file format. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import ExtendedRegexLexer, LexerContext, include, bygroups +from pygments.token import Text, Comment, Keyword, Name, String, Number, \ + Punctuation, Literal + +__all__ = ['YamlLexer'] + + +class YamlLexerContext(LexerContext): + """Indentation context for the YAML lexer.""" + + def __init__(self, *args, **kwds): + super(YamlLexerContext, self).__init__(*args, **kwds) + self.indent_stack = [] + self.indent = -1 + self.next_indent = 0 + self.block_scalar_indent = None + + +class YamlLexer(ExtendedRegexLexer): + """ + Lexer for `YAML `_, a human-friendly data serialization + language. + + .. versionadded:: 0.11 + """ + + name = 'YAML' + aliases = ['yaml'] + filenames = ['*.yaml', '*.yml'] + mimetypes = ['text/x-yaml'] + + def something(token_class): + """Do not produce empty tokens.""" + def callback(lexer, match, context): + text = match.group() + if not text: + return + yield match.start(), token_class, text + context.pos = match.end() + return callback + + def reset_indent(token_class): + """Reset the indentation levels.""" + def callback(lexer, match, context): + text = match.group() + context.indent_stack = [] + context.indent = -1 + context.next_indent = 0 + context.block_scalar_indent = None + yield match.start(), token_class, text + context.pos = match.end() + return callback + + def save_indent(token_class, start=False): + """Save a possible indentation level.""" + def callback(lexer, match, context): + text = match.group() + extra = '' + if start: + context.next_indent = len(text) + if context.next_indent < context.indent: + while context.next_indent < context.indent: + context.indent = context.indent_stack.pop() + if context.next_indent > context.indent: + extra = text[context.indent:] + text = text[:context.indent] + else: + context.next_indent += len(text) + if text: + yield match.start(), token_class, text + if extra: + yield match.start()+len(text), token_class.Error, extra + context.pos = match.end() + return callback + + def set_indent(token_class, implicit=False): + """Set the previously saved indentation level.""" + def callback(lexer, match, context): + text = match.group() + if context.indent < context.next_indent: + context.indent_stack.append(context.indent) + context.indent = context.next_indent + if not implicit: + context.next_indent += len(text) + yield match.start(), token_class, text + context.pos = match.end() + return callback + + def set_block_scalar_indent(token_class): + """Set an explicit indentation level for a block scalar.""" + def callback(lexer, match, context): + text = match.group() + context.block_scalar_indent = None + if not text: + return + increment = match.group(1) + if increment: + current_indent = max(context.indent, 0) + increment = int(increment) + context.block_scalar_indent = current_indent + increment + if text: + yield match.start(), token_class, text + context.pos = match.end() + return callback + + def parse_block_scalar_empty_line(indent_token_class, content_token_class): + """Process an empty line in a block scalar.""" + def callback(lexer, match, context): + text = match.group() + if (context.block_scalar_indent is None or + len(text) <= context.block_scalar_indent): + if text: + yield match.start(), indent_token_class, text + else: + indentation = text[:context.block_scalar_indent] + content = text[context.block_scalar_indent:] + yield match.start(), indent_token_class, indentation + yield (match.start()+context.block_scalar_indent, + content_token_class, content) + context.pos = match.end() + return callback + + def parse_block_scalar_indent(token_class): + """Process indentation spaces in a block scalar.""" + def callback(lexer, match, context): + text = match.group() + if context.block_scalar_indent is None: + if len(text) <= max(context.indent, 0): + context.stack.pop() + context.stack.pop() + return + context.block_scalar_indent = len(text) + else: + if len(text) < context.block_scalar_indent: + context.stack.pop() + context.stack.pop() + return + if text: + yield match.start(), token_class, text + context.pos = match.end() + return callback + + def parse_plain_scalar_indent(token_class): + """Process indentation spaces in a plain scalar.""" + def callback(lexer, match, context): + text = match.group() + if len(text) <= context.indent: + context.stack.pop() + context.stack.pop() + return + if text: + yield match.start(), token_class, text + context.pos = match.end() + return callback + + tokens = { + # the root rules + 'root': [ + # ignored whitespaces + (r'[ ]+(?=#|$)', Text), + # line breaks + (r'\n+', Text), + # a comment + (r'#[^\n]*', Comment.Single), + # the '%YAML' directive + (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'), + # the %TAG directive + (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'), + # document start and document end indicators + (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace), + 'block-line'), + # indentation spaces + (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True), + ('block-line', 'indentation')), + ], + + # trailing whitespaces after directives or a block scalar indicator + 'ignored-line': [ + # ignored whitespaces + (r'[ ]+(?=#|$)', Text), + # a comment + (r'#[^\n]*', Comment.Single), + # line break + (r'\n', Text, '#pop:2'), + ], + + # the %YAML directive + 'yaml-directive': [ + # the version number + (r'([ ]+)([0-9]+\.[0-9]+)', + bygroups(Text, Number), 'ignored-line'), + ], + + # the %YAG directive + 'tag-directive': [ + # a tag handle and the corresponding prefix + (r'([ ]+)(!|![0-9A-Za-z_-]*!)' + r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)', + bygroups(Text, Keyword.Type, Text, Keyword.Type), + 'ignored-line'), + ], + + # block scalar indicators and indentation spaces + 'indentation': [ + # trailing whitespaces are ignored + (r'[ ]*$', something(Text), '#pop:2'), + # whitespaces preceeding block collection indicators + (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)), + # block collection indicators + (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)), + # the beginning a block line + (r'[ ]*', save_indent(Text), '#pop'), + ], + + # an indented line in the block context + 'block-line': [ + # the line end + (r'[ ]*(?=#|$)', something(Text), '#pop'), + # whitespaces separating tokens + (r'[ ]+', Text), + # tags, anchors and aliases, + include('descriptors'), + # block collections and scalars + include('block-nodes'), + # flow collections and quoted scalars + include('flow-nodes'), + # a plain scalar + (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])', + something(Name.Variable), + 'plain-scalar-in-block-context'), + ], + + # tags, anchors, aliases + 'descriptors': [ + # a full-form tag + (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type), + # a tag in the form '!', '!suffix' or '!handle!suffix' + (r'!(?:[0-9A-Za-z_-]+)?' + r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type), + # an anchor + (r'&[0-9A-Za-z_-]+', Name.Label), + # an alias + (r'\*[0-9A-Za-z_-]+', Name.Variable), + ], + + # block collections and scalars + 'block-nodes': [ + # implicit key + (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)), + # literal and folded scalars + (r'[|>]', Punctuation.Indicator, + ('block-scalar-content', 'block-scalar-header')), + ], + + # flow collections and quoted scalars + 'flow-nodes': [ + # a flow sequence + (r'\[', Punctuation.Indicator, 'flow-sequence'), + # a flow mapping + (r'\{', Punctuation.Indicator, 'flow-mapping'), + # a single-quoted scalar + (r'\'', String, 'single-quoted-scalar'), + # a double-quoted scalar + (r'\"', String, 'double-quoted-scalar'), + ], + + # the content of a flow collection + 'flow-collection': [ + # whitespaces + (r'[ ]+', Text), + # line breaks + (r'\n+', Text), + # a comment + (r'#[^\n]*', Comment.Single), + # simple indicators + (r'[?:,]', Punctuation.Indicator), + # tags, anchors and aliases + include('descriptors'), + # nested collections and quoted scalars + include('flow-nodes'), + # a plain scalar + (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])', + something(Name.Variable), + 'plain-scalar-in-flow-context'), + ], + + # a flow sequence indicated by '[' and ']' + 'flow-sequence': [ + # include flow collection rules + include('flow-collection'), + # the closing indicator + (r'\]', Punctuation.Indicator, '#pop'), + ], + + # a flow mapping indicated by '{' and '}' + 'flow-mapping': [ + # include flow collection rules + include('flow-collection'), + # the closing indicator + (r'\}', Punctuation.Indicator, '#pop'), + ], + + # block scalar lines + 'block-scalar-content': [ + # line break + (r'\n', Text), + # empty line + (r'^[ ]+$', + parse_block_scalar_empty_line(Text, Name.Constant)), + # indentation spaces (we may leave the state here) + (r'^[ ]*', parse_block_scalar_indent(Text)), + # line content + (r'[^\n\r\f\v]+', Name.Constant), + ], + + # the content of a literal or folded scalar + 'block-scalar-header': [ + # indentation indicator followed by chomping flag + (r'([1-9])?[+-]?(?=[ ]|$)', + set_block_scalar_indent(Punctuation.Indicator), + 'ignored-line'), + # chomping flag followed by indentation indicator + (r'[+-]?([1-9])?(?=[ ]|$)', + set_block_scalar_indent(Punctuation.Indicator), + 'ignored-line'), + ], + + # ignored and regular whitespaces in quoted scalars + 'quoted-scalar-whitespaces': [ + # leading and trailing whitespaces are ignored + (r'^[ ]+', Text), + (r'[ ]+$', Text), + # line breaks are ignored + (r'\n+', Text), + # other whitespaces are a part of the value + (r'[ ]+', Name.Variable), + ], + + # single-quoted scalars + 'single-quoted-scalar': [ + # include whitespace and line break rules + include('quoted-scalar-whitespaces'), + # escaping of the quote character + (r'\'\'', String.Escape), + # regular non-whitespace characters + (r'[^ \t\n\r\f\v\']+', String), + # the closing quote + (r'\'', String, '#pop'), + ], + + # double-quoted scalars + 'double-quoted-scalar': [ + # include whitespace and line break rules + include('quoted-scalar-whitespaces'), + # escaping of special characters + (r'\\[0abt\tn\nvfre "\\N_LP]', String), + # escape codes + (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})', + String.Escape), + # regular non-whitespace characters + (r'[^ \t\n\r\f\v\"\\]+', String), + # the closing quote + (r'"', String, '#pop'), + ], + + # the beginning of a new line while scanning a plain scalar + 'plain-scalar-in-block-context-new-line': [ + # empty lines + (r'^[ ]+$', Text), + # line breaks + (r'\n+', Text), + # document start and document end indicators + (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'), + # indentation spaces (we may leave the block line state here) + (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'), + ], + + # a plain scalar in the block context + 'plain-scalar-in-block-context': [ + # the scalar ends with the ':' indicator + (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'), + # the scalar ends with whitespaces followed by a comment + (r'[ ]+(?=#)', Text, '#pop'), + # trailing whitespaces are ignored + (r'[ ]+$', Text), + # line breaks are ignored + (r'\n+', Text, 'plain-scalar-in-block-context-new-line'), + # other whitespaces are a part of the value + (r'[ ]+', Literal.Scalar.Plain), + # regular non-whitespace characters + (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain), + ], + + # a plain scalar is the flow context + 'plain-scalar-in-flow-context': [ + # the scalar ends with an indicator character + (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'), + # the scalar ends with a comment + (r'[ ]+(?=#)', Text, '#pop'), + # leading and trailing whitespaces are ignored + (r'^[ ]+', Text), + (r'[ ]+$', Text), + # line breaks are ignored + (r'\n+', Text), + # other whitespaces are a part of the value + (r'[ ]+', Name.Variable), + # regular non-whitespace characters + (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable), + ], + + } + + def get_tokens_unprocessed(self, text=None, context=None): + if context is None: + context = YamlLexerContext(text, 0) + return super(YamlLexer, self).get_tokens_unprocessed(text, context) diff --git a/pygments/lexers/diff.py b/pygments/lexers/diff.py new file mode 100644 index 00000000..fe6435c5 --- /dev/null +++ b/pygments/lexers/diff.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.diff + ~~~~~~~~~~~~~~~~~~~~ + + Lexers for diff/patch formats. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +from pygments.lexer import RegexLexer, include, bygroups +from pygments.token import Text, Comment, Operator, Keyword, Name, Generic, \ + Literal + +__all__ = ['DiffLexer', 'DarcsPatchLexer'] + + +class DiffLexer(RegexLexer): + """ + Lexer for unified or context-style diffs or patches. + """ + + name = 'Diff' + aliases = ['diff', 'udiff'] + filenames = ['*.diff', '*.patch'] + mimetypes = ['text/x-diff', 'text/x-patch'] + + tokens = { + 'root': [ + (r' .*\n', Text), + (r'\+.*\n', Generic.Inserted), + (r'-.*\n', Generic.Deleted), + (r'!.*\n', Generic.Strong), + (r'@.*\n', Generic.Subheading), + (r'([Ii]ndex|diff).*\n', Generic.Heading), + (r'=.*\n', Generic.Heading), + (r'.*\n', Text), + ] + } + + def analyse_text(text): + if text[:7] == 'Index: ': + return True + if text[:5] == 'diff ': + return True + if text[:4] == '--- ': + return 0.9 + + +class DarcsPatchLexer(RegexLexer): + """ + DarcsPatchLexer is a lexer for the various versions of the darcs patch + format. Examples of this format are derived by commands such as + ``darcs annotate --patch`` and ``darcs send``. + + .. versionadded:: 0.10 + """ + + name = 'Darcs Patch' + aliases = ['dpatch'] + filenames = ['*.dpatch', '*.darcspatch'] + + DPATCH_KEYWORDS = ('hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move', + 'replace') + + tokens = { + 'root': [ + (r'<', Operator), + (r'>', Operator), + (r'{', Operator), + (r'}', Operator), + (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])', + bygroups(Operator, Keyword, Name, Text, Name, Operator, + Literal.Date, Text, Operator)), + (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)', + bygroups(Operator, Keyword, Name, Text, Name, Operator, + Literal.Date, Text), 'comment'), + (r'New patches:', Generic.Heading), + (r'Context:', Generic.Heading), + (r'Patch bundle hash:', Generic.Heading), + (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS), + bygroups(Text, Keyword, Text)), + (r'\+', Generic.Inserted, "insert"), + (r'-', Generic.Deleted, "delete"), + (r'.*\n', Text), + ], + 'comment': [ + (r'[^\]].*\n', Comment), + (r'\]', Operator, "#pop"), + ], + 'specialText': [ # darcs add [_CODE_] special operators for clarity + (r'\n', Text, "#pop"), # line-based + (r'\[_[^_]*_]', Operator), + ], + 'insert': [ + include('specialText'), + (r'\[', Generic.Inserted), + (r'[^\n\[]+', Generic.Inserted), + ], + 'delete': [ + include('specialText'), + (r'\[', Generic.Deleted), + (r'[^\n\[]+', Generic.Deleted), + ], + } diff --git a/pygments/lexers/dsls.py b/pygments/lexers/dsls.py index bfbc860e..db3badea 100644 --- a/pygments/lexers/dsls.py +++ b/pygments/lexers/dsls.py @@ -13,10 +13,10 @@ import re from pygments.lexer import RegexLexer, bygroups, words, include from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Number, Punctuation, Literal + Number, Punctuation, Literal, Generic, Whitespace __all__ = ['ProtoBufLexer', 'BroLexer', 'PuppetLexer', 'RslLexer', - 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer'] + 'MscgenLexer', 'VGLLexer', 'AlloyLexer', 'PanLexer', 'HxmlLexer'] class ProtoBufLexer(RegexLexer): @@ -504,3 +504,44 @@ class PanLexer(RegexLexer): include('root'), ], } + + +class HxmlLexer(RegexLexer): + """ + Lexer for `haXe build `_ files. + + .. versionadded:: 1.6 + """ + name = 'Hxml' + aliases = ['haxeml', 'hxml'] + filenames = ['*.hxml'] + + tokens = { + 'root': [ + # Seperator + (r'(--)(next)', bygroups(Punctuation, Generic.Heading)), + # Compiler switches with one dash + (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)), + # Compilerswitches with two dashes + (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|' + r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)), + # Targets and other options that take an argument + (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|' + r'cp|cmd)( +)(.+)', + bygroups(Punctuation, Keyword, Whitespace, String)), + # Options that take only numerical arguments + (r'(-)(swf-version)( +)(\d+)', + bygroups(Punctuation, Keyword, Number.Integer)), + # An Option that defines the size, the fps and the background + # color of an flash movie + (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})', + bygroups(Punctuation, Keyword, Whitespace, Number.Integer, + Punctuation, Number.Integer, Punctuation, Number.Integer, + Punctuation, Number.Hex)), + # options with two dashes that takes arguments + (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)' + r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)), + # Single line comment, multiline ones are not allowed. + (r'#.*', Comment.Single) + ] + } diff --git a/pygments/lexers/haskell.py b/pygments/lexers/haskell.py index 27593986..a6aa55f6 100644 --- a/pygments/lexers/haskell.py +++ b/pygments/lexers/haskell.py @@ -503,7 +503,7 @@ class LiterateLexer(Lexer): insertions.append((len(code), [(0, Text, line)])) else: # latex-style - from pygments.lexers.text import TexLexer + from pygments.lexers.markup import TexLexer lxlexer = TexLexer(**self.options) codelines = 0 latex = '' diff --git a/pygments/lexers/installers.py b/pygments/lexers/installers.py index 758e8fbc..b91613cd 100644 --- a/pygments/lexers/installers.py +++ b/pygments/lexers/installers.py @@ -13,7 +13,7 @@ import re from pygments.lexer import RegexLexer, include, bygroups, using, this from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ - Punctuation, Generic + Punctuation, Generic, Number, Whitespace __all__ = ['NSISLexer', 'RPMSpecLexer'] @@ -213,3 +213,111 @@ class RPMSpecLexer(RegexLexer): (r'%\{[a-zA-Z]\w+\}', Keyword.Constant), ] } + + +class SourcesListLexer(RegexLexer): + """ + Lexer that highlights debian sources.list files. + + .. versionadded:: 0.7 + """ + + name = 'Debian Sourcelist' + aliases = ['sourceslist', 'sources.list', 'debsources'] + filenames = ['sources.list'] + mimetype = ['application/x-debian-sourceslist'] + + tokens = { + 'root': [ + (r'\s+', Text), + (r'#.*?$', Comment), + (r'^(deb(?:-src)?)(\s+)', + bygroups(Keyword, Text), 'distribution') + ], + 'distribution': [ + (r'#.*?$', Comment, '#pop'), + (r'\$\(ARCH\)', Name.Variable), + (r'[^\s$[]+', String), + (r'\[', String.Other, 'escaped-distribution'), + (r'\$', String), + (r'\s+', Text, 'components') + ], + 'escaped-distribution': [ + (r'\]', String.Other, '#pop'), + (r'\$\(ARCH\)', Name.Variable), + (r'[^\]$]+', String.Other), + (r'\$', String.Other) + ], + 'components': [ + (r'#.*?$', Comment, '#pop:2'), + (r'$', Text, '#pop:2'), + (r'\s+', Text), + (r'\S+', Keyword.Pseudo), + ] + } + + def analyse_text(text): + for line in text.split('\n'): + line = line.strip() + if not (line.startswith('#') or line.startswith('deb ') or + line.startswith('deb-src ') or not line): + return False + return True + + +class DebianControlLexer(RegexLexer): + """ + Lexer for Debian ``control`` files and ``apt-cache show `` outputs. + + .. versionadded:: 0.9 + """ + name = 'Debian Control file' + aliases = ['control', 'debcontrol'] + filenames = ['control'] + + tokens = { + 'root': [ + (r'^(Description)', Keyword, 'description'), + (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'), + (r'^((Build-)?Depends)', Keyword, 'depends'), + (r'^((?:Python-)?Version)(:\s*)(\S+)$', + bygroups(Keyword, Text, Number)), + (r'^((?:Installed-)?Size)(:\s*)(\S+)$', + bygroups(Keyword, Text, Number)), + (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$', + bygroups(Keyword, Text, Number)), + (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$', + bygroups(Keyword, Whitespace, String)), + ], + 'maintainer': [ + (r'<[^>]+>', Generic.Strong), + (r'<[^>]+>$', Generic.Strong, '#pop'), + (r',\n?', Text), + (r'.', Text), + ], + 'description': [ + (r'(.*)(Homepage)(: )(\S+)', + bygroups(Text, String, Name, Name.Class)), + (r':.*\n', Generic.Strong), + (r' .*\n', Text), + ('', Text, '#pop'), + ], + 'depends': [ + (r':\s*', Text), + (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)), + (r'\(', Text, 'depend_vers'), + (r',', Text), + (r'\|', Operator), + (r'[\s]+', Text), + (r'[}\)]\s*$', Text, '#pop'), + (r'}', Text), + (r'[^,]$', Name.Function, '#pop'), + (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)), + (r'\[.*?\]', Name.Entity), + ], + 'depend_vers': [ + (r'\),', Text, '#pop'), + (r'\)[^,]', Text, '#pop:2'), + (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number)) + ] + } diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py new file mode 100644 index 00000000..df269790 --- /dev/null +++ b/pygments/lexers/markup.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.markup + ~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for markup languages. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, include, bygroups, using, this, \ + do_insertions, default +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation, Generic +from pygments.util import get_bool_opt, ClassNotFound + +__all__ = ['BBCodeLexer', 'MoinWikiLexer', 'RstLexer', 'TexLexer', 'GroffLexer'] + + +class BBCodeLexer(RegexLexer): + """ + A lexer that highlights BBCode(-like) syntax. + + .. versionadded:: 0.6 + """ + + name = 'BBCode' + aliases = ['bbcode'] + mimetypes = ['text/x-bbcode'] + + tokens = { + 'root': [ + (r'[^[]+', Text), + # tag/end tag begin + (r'\[/?\w+', Keyword, 'tag'), + # stray bracket + (r'\[', Text), + ], + 'tag': [ + (r'\s+', Text), + # attribute with value + (r'(\w+)(=)("?[^\s"\]]+"?)', + bygroups(Name.Attribute, Operator, String)), + # tag argument (a la [color=green]) + (r'(=)("?[^\s"\]]+"?)', + bygroups(Operator, String)), + # tag end + (r'\]', Keyword, '#pop'), + ], + } + + +class MoinWikiLexer(RegexLexer): + """ + For MoinMoin (and Trac) Wiki markup. + + .. versionadded:: 0.7 + """ + + name = 'MoinMoin/Trac Wiki markup' + aliases = ['trac-wiki', 'moin'] + filenames = [] + mimetypes = ['text/x-trac-wiki'] + flags = re.MULTILINE | re.IGNORECASE + + tokens = { + 'root': [ + (r'^#.*$', Comment), + (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next + # Titles + (r'^(=+)([^=]+)(=+)(\s*#.+)?$', + bygroups(Generic.Heading, using(this), Generic.Heading, String)), + # Literal code blocks, with optional shebang + (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'), + (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting + # Lists + (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)), + (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)), + # Other Formatting + (r'\[\[\w+.*?\]\]', Keyword), # Macro + (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])', + bygroups(Keyword, String, Keyword)), # Link + (r'^----+$', Keyword), # Horizontal rules + (r'[^\n\'\[{!_~^,|]+', Text), + (r'\n', Text), + (r'.', Text), + ], + 'codeblock': [ + (r'}}}', Name.Builtin, '#pop'), + # these blocks are allowed to be nested in Trac, but not MoinMoin + (r'{{{', Text, '#push'), + (r'[^{}]+', Comment.Preproc), # slurp boring text + (r'.', Comment.Preproc), # allow loose { or } + ], + } + + +class RstLexer(RegexLexer): + """ + For `reStructuredText `_ markup. + + .. versionadded:: 0.7 + + Additional options accepted: + + `handlecodeblocks` + Highlight the contents of ``.. sourcecode:: language``, + ``.. code:: language`` and ``.. code-block:: language`` + directives with a lexer for the given language (default: + ``True``). + + .. versionadded:: 0.8 + """ + name = 'reStructuredText' + aliases = ['rst', 'rest', 'restructuredtext'] + filenames = ['*.rst', '*.rest'] + mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"] + flags = re.MULTILINE + + def _handle_sourcecode(self, match): + from pygments.lexers import get_lexer_by_name + + # section header + yield match.start(1), Punctuation, match.group(1) + yield match.start(2), Text, match.group(2) + yield match.start(3), Operator.Word, match.group(3) + yield match.start(4), Punctuation, match.group(4) + yield match.start(5), Text, match.group(5) + yield match.start(6), Keyword, match.group(6) + yield match.start(7), Text, match.group(7) + + # lookup lexer if wanted and existing + lexer = None + if self.handlecodeblocks: + try: + lexer = get_lexer_by_name(match.group(6).strip()) + except ClassNotFound: + pass + indention = match.group(8) + indention_size = len(indention) + code = (indention + match.group(9) + match.group(10) + match.group(11)) + + # no lexer for this language. handle it like it was a code block + if lexer is None: + yield match.start(8), String, code + return + + # highlight the lines with the lexer. + ins = [] + codelines = code.splitlines(True) + code = '' + for line in codelines: + if len(line) > indention_size: + ins.append((len(code), [(0, Text, line[:indention_size])])) + code += line[indention_size:] + else: + code += line + for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)): + yield item + + # from docutils.parsers.rst.states + closers = u'\'")]}>\u2019\u201d\xbb!?' + unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0' + end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))' + % (re.escape(unicode_delimiters), + re.escape(closers))) + + tokens = { + 'root': [ + # Heading with overline + (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)' + r'(.+)(\n)(\1)(\n)', + bygroups(Generic.Heading, Text, Generic.Heading, + Text, Generic.Heading, Text)), + # Plain heading + (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|' + r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)', + bygroups(Generic.Heading, Text, Generic.Heading, Text)), + # Bulleted lists + (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)', + bygroups(Text, Number, using(this, state='inline'))), + # Numbered lists + (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)', + bygroups(Text, Number, using(this, state='inline'))), + (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)', + bygroups(Text, Number, using(this, state='inline'))), + # Numbered, but keep words at BOL from becoming lists + (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)', + bygroups(Text, Number, using(this, state='inline'))), + (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)', + bygroups(Text, Number, using(this, state='inline'))), + # Line blocks + (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)', + bygroups(Text, Operator, using(this, state='inline'))), + # Sourcecode directives + (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)' + r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)', + _handle_sourcecode), + # A directive + (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))', + bygroups(Punctuation, Text, Operator.Word, Punctuation, Text, + using(this, state='inline'))), + # A reference target + (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$', + bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))), + # A footnote/citation target + (r'^( *\.\.)(\s*)(\[.+\])(.*?)$', + bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))), + # A substitution def + (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))', + bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word, + Punctuation, Text, using(this, state='inline'))), + # Comments + (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc), + # Field list + (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)), + (r'^( *)(:.*?:)([ \t]+)(.*?)$', + bygroups(Text, Name.Class, Text, Name.Function)), + # Definition list + (r'^(\S.*(?)(`__?)', # reference with inline target + bygroups(String, String.Interpol, String)), + (r'`.+?`__?', String), # reference + (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?', + bygroups(Name.Variable, Name.Attribute)), # role + (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)', + bygroups(Name.Attribute, Name.Variable)), # role (content first) + (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis + (r'\*.+?\*', Generic.Emph), # Emphasis + (r'\[.*?\]_', String), # Footnote or citation + (r'<.+?>', Name.Tag), # Hyperlink + (r'[^\\\n\[*`:]+', Text), + (r'.', Text), + ], + 'literal': [ + (r'[^`]+', String), + (r'``' + end_string_suffix, String, '#pop'), + (r'`', String), + ] + } + + def __init__(self, **options): + self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) + RegexLexer.__init__(self, **options) + + def analyse_text(text): + if text[:2] == '..' and text[2:3] != '.': + return 0.3 + p1 = text.find("\n") + p2 = text.find("\n", p1 + 1) + if (p2 > -1 and # has two lines + p1 * 2 + 1 == p2 and # they are the same length + text[p1+1] in '-=' and # the next line both starts and ends with + text[p1+1] == text[p2-1]): # ...a sufficiently high header + return 0.5 + + +class TexLexer(RegexLexer): + """ + Lexer for the TeX and LaTeX typesetting languages. + """ + + name = 'TeX' + aliases = ['tex', 'latex'] + filenames = ['*.tex', '*.aux', '*.toc'] + mimetypes = ['text/x-tex', 'text/x-latex'] + + tokens = { + 'general': [ + (r'%.*?\n', Comment), + (r'[{}]', Name.Builtin), + (r'[&_^]', Name.Builtin), + ], + 'root': [ + (r'\\\[', String.Backtick, 'displaymath'), + (r'\\\(', String, 'inlinemath'), + (r'\$\$', String.Backtick, 'displaymath'), + (r'\$', String, 'inlinemath'), + (r'\\([a-zA-Z]+|.)', Keyword, 'command'), + include('general'), + (r'[^\\$%&_^{}]+', Text), + ], + 'math': [ + (r'\\([a-zA-Z]+|.)', Name.Variable), + include('general'), + (r'[0-9]+', Number), + (r'[-=!+*/()\[\]]', Operator), + (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin), + ], + 'inlinemath': [ + (r'\\\)', String, '#pop'), + (r'\$', String, '#pop'), + include('math'), + ], + 'displaymath': [ + (r'\\\]', String, '#pop'), + (r'\$\$', String, '#pop'), + (r'\$', Name.Builtin), + include('math'), + ], + 'command': [ + (r'\[.*?\]', Name.Attribute), + (r'\*', Keyword), + default('#pop'), + ], + } + + def analyse_text(text): + for start in ("\\documentclass", "\\input", "\\documentstyle", + "\\relax"): + if text[:len(start)] == start: + return True + + +class GroffLexer(RegexLexer): + """ + Lexer for the (g)roff typesetting language, supporting groff + extensions. Mainly useful for highlighting manpage sources. + + .. versionadded:: 0.6 + """ + + name = 'Groff' + aliases = ['groff', 'nroff', 'man'] + filenames = ['*.[1234567]', '*.man'] + mimetypes = ['application/x-troff', 'text/troff'] + + tokens = { + 'root': [ + (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'), + (r'\.', Punctuation, 'request'), + # Regular characters, slurp till we find a backslash or newline + (r'[^\\\n]*', Text, 'textline'), + ], + 'textline': [ + include('escapes'), + (r'[^\\\n]+', Text), + (r'\n', Text, '#pop'), + ], + 'escapes': [ + # groff has many ways to write escapes. + (r'\\"[^\n]*', Comment), + (r'\\[fn]\w', String.Escape), + (r'\\\(.{2}', String.Escape), + (r'\\.\[.*\]', String.Escape), + (r'\\.', String.Escape), + (r'\\\n', Text, 'request'), + ], + 'request': [ + (r'\n', Text, '#pop'), + include('escapes'), + (r'"[^\n"]+"', String.Double), + (r'\d+', Number), + (r'\S+', String), + (r'\s+', Text), + ], + } + + def analyse_text(text): + if text[:1] != '.': + return False + if text[:3] == '.\\"': + return True + if text[:4] == '.TH ': + return True + if text[1:3].isalnum() and text[3].isspace(): + return 0.9 diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index c51403e2..27e4ad3a 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -19,7 +19,7 @@ from pygments.lexer import Lexer, RegexLexer, bygroups, include, \ from pygments.token import Comment, String, Punctuation, Keyword, Name, \ Operator, Number, Text, Generic -from pygments.lexers.agile import PythonLexer +from pygments.lexers.python import PythonLexer from pygments.lexers import _scilab_builtins from pygments.lexers import _stan_builtins diff --git a/pygments/lexers/misc/make.py b/pygments/lexers/misc/make.py new file mode 100644 index 00000000..c585640f --- /dev/null +++ b/pygments/lexers/misc/make.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.misc.make + ~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for Makefiles and similar. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import Lexer, RegexLexer, include, bygroups, \ + do_insertions, using +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Punctuation +from pygments.lexers.shell import BashLexer + +__all__ = ['MakefileLexer', 'BaseMakefileLexer', 'CMakeLexer'] + + +class MakefileLexer(Lexer): + """ + Lexer for BSD and GNU make extensions (lenient enough to handle both in + the same file even). + + *Rewritten in Pygments 0.10.* + """ + + name = 'Makefile' + aliases = ['make', 'makefile', 'mf', 'bsdmake'] + filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'] + mimetypes = ['text/x-makefile'] + + r_special = re.compile( + r'^(?:' + # BSD Make + r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|' + # GNU Make + r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)') + r_comment = re.compile(r'^\s*@?#') + + def get_tokens_unprocessed(self, text): + ins = [] + lines = text.splitlines(True) + done = '' + lex = BaseMakefileLexer(**self.options) + backslashflag = False + for line in lines: + if self.r_special.match(line) or backslashflag: + ins.append((len(done), [(0, Comment.Preproc, line)])) + backslashflag = line.strip().endswith('\\') + elif self.r_comment.match(line): + ins.append((len(done), [(0, Comment, line)])) + else: + done += line + for item in do_insertions(ins, lex.get_tokens_unprocessed(done)): + yield item + + def analyse_text(text): + # Many makefiles have $(BIG_CAPS) style variables + if re.search(r'\$\([A-Z_]+\)', text): + return 0.1 + + +class BaseMakefileLexer(RegexLexer): + """ + Lexer for simple Makefiles (no preprocessing). + + .. versionadded:: 0.10 + """ + + name = 'Base Makefile' + aliases = ['basemake'] + filenames = [] + mimetypes = [] + + tokens = { + 'root': [ + # recipes (need to allow spaces because of expandtabs) + (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)), + # special variables + (r'\$[<@$+%?|*]', Keyword), + (r'\s+', Text), + (r'#.*?\n', Comment), + (r'(export)(\s+)(?=[\w${}\t -]+\n)', + bygroups(Keyword, Text), 'export'), + (r'export\s+', Keyword), + # assignment + (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)', + bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))), + # strings + (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double), + (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single), + # targets + (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text), + 'block-header'), + # expansions + (r'\$\(', Keyword, 'expansion'), + ], + 'expansion': [ + (r'[^$a-zA-Z_)]+', Text), + (r'[a-zA-Z_]+', Name.Variable), + (r'\$', Keyword), + (r'\(', Keyword, '#push'), + (r'\)', Keyword, '#pop'), + ], + 'export': [ + (r'[\w${}-]+', Name.Variable), + (r'\n', Text, '#pop'), + (r'\s+', Text), + ], + 'block-header': [ + (r'[,|]', Punctuation), + (r'#.*?\n', Comment, '#pop'), + (r'\\\n', Text), # line continuation + (r'\$\(', Keyword, 'expansion'), + (r'[a-zA-Z_]+', Name), + (r'\n', Text, '#pop'), + (r'.', Text), + ], + } + + +class CMakeLexer(RegexLexer): + """ + Lexer for `CMake `_ files. + + .. versionadded:: 1.2 + """ + name = 'CMake' + aliases = ['cmake'] + filenames = ['*.cmake', 'CMakeLists.txt'] + mimetypes = ['text/x-cmake'] + + tokens = { + 'root': [ + # (r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|' + # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|' + # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|' + # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|' + # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|' + # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|' + # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|' + # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|' + # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|' + # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|' + # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|' + # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|' + # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|' + # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|' + # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|' + # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|' + # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|' + # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|' + # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|' + # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|' + # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|' + # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|' + # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|' + # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|' + # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|' + # r'COUNTARGS)\b', Name.Builtin, 'args'), + (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text, + Punctuation), 'args'), + include('keywords'), + include('ws') + ], + 'args': [ + (r'\(', Punctuation, '#push'), + (r'\)', Punctuation, '#pop'), + (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)), + (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)), + (r'(?s)".*?"', String.Double), + (r'\\\S+', String), + (r'[^\)$"# \t\n]+', String), + (r'\n', Text), # explicitly legal + include('keywords'), + include('ws') + ], + 'string': [ + + ], + 'keywords': [ + (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|' + r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword), + ], + 'ws': [ + (r'[ \t]+', Text), + (r'#.*\n', Comment), + ] + } + + def analyse_text(text): + exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$' + if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE): + return 0.8 + return 0.0 diff --git a/pygments/lexers/parsers.py b/pygments/lexers/parsers.py index 4c23c760..992f204a 100644 --- a/pygments/lexers/parsers.py +++ b/pygments/lexers/parsers.py @@ -14,22 +14,24 @@ import re from pygments.lexer import RegexLexer, DelegatingLexer, \ include, bygroups, using from pygments.token import Punctuation, Other, Text, Comment, Operator, \ - Keyword, Name, String, Number, Whitespace -from pygments.lexers.compiled import JavaLexer, CLexer, CppLexer, \ - ObjectiveCLexer, DLexer + Keyword, Name, String, Number, Whitespace +from pygments.lexers.jvm import JavaLexer +from pygments.lexers.c_like.c_cpp import CLexer, CppLexer +from pygments.lexers.c_like.objective import ObjectiveCLexer +from pygments.lexers.c_like.d import DLexer from pygments.lexers.dotnet import CSharpLexer -from pygments.lexers.agile import RubyLexer, PythonLexer, PerlLexer -from pygments.lexers.web import ActionScriptLexer - +from pygments.lexers.ruby import RubyLexer +from pygments.lexers.python import PythonLexer +from pygments.lexers.perl import PerlLexer __all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer', 'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer', 'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer', 'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer', - #'AntlrCLexer', + # 'AntlrCLexer', 'AntlrCSharpLexer', 'AntlrObjectiveCLexer', - 'AntlrJavaLexer', "AntlrActionScriptLexer", - 'TreetopLexer'] + 'AntlrJavaLexer', 'AntlrActionScriptLexer', + 'TreetopLexer', 'EbnfLexer'] class RagelLexer(RegexLexer): @@ -63,29 +65,29 @@ class RagelLexer(RegexLexer): (r'[+-]?[0-9]+', Number.Integer), ], 'literals': [ - (r'"(\\\\|\\"|[^"])*"', String), # double quote string - (r"'(\\\\|\\'|[^'])*'", String), # single quote string - (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals - (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions + (r'"(\\\\|\\"|[^"])*"', String), # double quote string + (r"'(\\\\|\\'|[^'])*'", String), # single quote string + (r'\[(\\\\|\\\]|[^\]])*\]', String), # square bracket literals + (r'/(?!\*)(\\\\|\\/|[^/])*/', String.Regex), # regular expressions ], 'identifiers': [ (r'[a-zA-Z_][a-zA-Z_0-9]*', Name.Variable), ], 'operators': [ - (r',', Operator), # Join - (r'\||&|--?', Operator), # Union, Intersection and Subtraction - (r'\.|<:|:>>?', Operator), # Concatention - (r':', Operator), # Label - (r'->', Operator), # Epsilon Transition - (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions - (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions - (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions - (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions - (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions - (r'>|@|\$|%', Operator), # Transition Actions and Priorities - (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition - (r'!|\^', Operator), # Negation - (r'\(|\)', Operator), # Grouping + (r',', Operator), # Join + (r'\||&|--?', Operator), # Union, Intersection and Subtraction + (r'\.|<:|:>>?', Operator), # Concatention + (r':', Operator), # Label + (r'->', Operator), # Epsilon Transition + (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator), # EOF Actions + (r'(>|\$|%|<|@|<>)(!|err\b)', Operator), # Global Error Actions + (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator), # Local Error Actions + (r'(>|\$|%|<|@|<>)(~|to\b)', Operator), # To-State Actions + (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator), # From-State Actions + (r'>|@|\$|%', Operator), # Transition Actions and Priorities + (r'\*|\?|\+|{[0-9]*,[0-9]*}', Operator), # Repetition + (r'!|\^', Operator), # Negation + (r'\(|\)', Operator), # Grouping ], 'root': [ include('literals'), @@ -100,16 +102,16 @@ class RagelLexer(RegexLexer): (r';', Punctuation), ], 'host': [ - (r'(' + r'|'.join(( # keep host code in largest possible chunks - r'[^{}\'"/#]+', # exclude unsafe characters - r'[^\\]\\[{}]', # allow escaped { or } + (r'(' + r'|'.join(( # keep host code in largest possible chunks + r'[^{}\'"/#]+', # exclude unsafe characters + r'[^\\]\\[{}]', # allow escaped { or } # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r'//.*$\n?', # single line comment - r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment - r'\#.*$\n?', # ruby comment + r'"(\\\\|\\"|[^"])*"', # double quote string + r"'(\\\\|\\'|[^'])*'", # single quote string + r'//.*$\n?', # single line comment + r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment + r'\#.*$\n?', # ruby comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. @@ -141,17 +143,17 @@ class RagelEmbeddedLexer(RegexLexer): tokens = { 'root': [ - (r'(' + r'|'.join(( # keep host code in largest possible chunks - r'[^%\'"/#]+', # exclude unsafe characters - r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them + (r'(' + r'|'.join(( # keep host code in largest possible chunks + r'[^%\'"/#]+', # exclude unsafe characters + r'%(?=[^%]|$)', # a single % sign is okay, just not 2 of them # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment - r'//.*$\n?', # single line comment - r'\#.*$\n?', # ruby/ragel comment - r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression + r'"(\\\\|\\"|[^"])*"', # double quote string + r"'(\\\\|\\'|[^'])*'", # single quote string + r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment + r'//.*$\n?', # single line comment + r'\#.*$\n?', # ruby/ragel comment + r'/(?!\*)(\\\\|\\/|[^/])*/', # regular expression # / is safe now that we've handled regex and javadoc comments r'/', @@ -168,12 +170,12 @@ class RagelEmbeddedLexer(RegexLexer): (r'(%%%%|%%){', Punctuation, 'multi-line-fsm'), ], 'multi-line-fsm': [ - (r'(' + r'|'.join(( # keep ragel code in largest possible chunks. + (r'(' + r'|'.join(( # keep ragel code in largest possible chunks. r'(' + r'|'.join(( - r'[^}\'"\[/#]', # exclude unsafe characters - r'}(?=[^%]|$)', # } is okay as long as it's not followed by % - r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two... - r'[^\\]\\[{}]', # ...and } is okay if it's escaped + r'[^}\'"\[/#]', # exclude unsafe characters + r'}(?=[^%]|$)', # } is okay as long as it's not followed by % + r'}%(?=[^%]|$)', # ...well, one %'s okay, just not two... + r'[^\\]\\[{}]', # ...and } is okay if it's escaped # allow / if it's preceded with one of these symbols # (ragel EOF actions) @@ -189,15 +191,15 @@ class RagelEmbeddedLexer(RegexLexer): # We want to match as many of these as we can in one block. # Not sure if we need the + sign here, # does it help performance? - )) + r')+', + )) + r')+', # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal - r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment - r'//.*$\n?', # single line comment - r'\#.*$\n?', # ruby/ragel comment + r'"(\\\\|\\"|[^"])*"', # double quote string + r"'(\\\\|\\'|[^'])*'", # single quote string + r"\[(\\\\|\\\]|[^\]])*\]", # square bracket literal + r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment + r'//.*$\n?', # single line comment + r'\#.*$\n?', # ruby/ragel comment )) + r')+', using(RagelLexer)), (r'}%%', Punctuation, '#pop'), @@ -221,7 +223,7 @@ class RagelRubyLexer(DelegatingLexer): def __init__(self, **options): super(RagelRubyLexer, self).__init__(RubyLexer, RagelEmbeddedLexer, - **options) + **options) def analyse_text(text): return '@LANG: ruby' in text @@ -336,9 +338,9 @@ class AntlrLexer(RegexLexer): aliases = ['antlr'] filenames = [] - _id = r'[A-Za-z][A-Za-z_0-9]*' - _TOKEN_REF = r'[A-Z][A-Za-z_0-9]*' - _RULE_REF = r'[a-z][A-Za-z_0-9]*' + _id = r'[A-Za-z][A-Za-z_0-9]*' + _TOKEN_REF = r'[A-Z][A-Za-z_0-9]*' + _RULE_REF = r'[a-z][A-Za-z_0-9]*' _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\'' _INT = r'[0-9]+' @@ -372,7 +374,7 @@ class AntlrLexer(RegexLexer): bygroups(Name.Label, Whitespace, Punctuation, Whitespace, Name.Label, Whitespace, Punctuation), 'action'), # rule - (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', \ + (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?', bygroups(Keyword, Whitespace, Name.Label, Punctuation), ('rule-alts', 'rule-prelims')), ], @@ -395,14 +397,14 @@ class AntlrLexer(RegexLexer): (r'(throws)(\s+)(' + _id + ')', bygroups(Keyword, Whitespace, Name.Label)), (r'(,)(\s*)(' + _id + ')', - bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws + bygroups(Punctuation, Whitespace, Name.Label)), # Additional throws # optionsSpec (r'options\b', Keyword, 'options'), # ruleScopeSpec - scope followed by target language code or name of action # TODO finish implementing other possibilities for scope # L173 ANTLRv3.g from ANTLR book (r'(scope)(\s+)({)', bygroups(Keyword, Whitespace, Punctuation), - 'action'), + 'action'), (r'(scope)(\s+)(' + _id + ')(\s*)(;)', bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)), # ruleAction @@ -450,20 +452,20 @@ class AntlrLexer(RegexLexer): include('comments'), (r'{', Punctuation), (r'(' + _id + r')(\s*)(=)(\s*)(' + - '|'.join((_id, _STRING_LITERAL, _INT, '\*'))+ ')(\s*)(;)', + '|'.join((_id, _STRING_LITERAL, _INT, '\*')) + ')(\s*)(;)', bygroups(Name.Variable, Whitespace, Punctuation, Whitespace, Text, Whitespace, Punctuation)), (r'}', Punctuation, '#pop'), ], 'action': [ - (r'(' + r'|'.join(( # keep host code in largest possible chunks - r'[^\${}\'"/\\]+', # exclude unsafe characters + (r'(' + r'|'.join(( # keep host code in largest possible chunks + r'[^\${}\'"/\\]+', # exclude unsafe characters # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r'//.*$\n?', # single line comment - r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment + r'"(\\\\|\\"|[^"])*"', # double quote string + r"'(\\\\|\\'|[^'])*'", # single quote string + r'//.*$\n?', # single line comment + r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. @@ -483,14 +485,14 @@ class AntlrLexer(RegexLexer): (r'}', Punctuation, '#pop'), ], 'nested-arg-action': [ - (r'(' + r'|'.join(( # keep host code in largest possible chunks. - r'[^\$\[\]\'"/]+', # exclude unsafe characters + (r'(' + r'|'.join(( # keep host code in largest possible chunks. + r'[^\$\[\]\'"/]+', # exclude unsafe characters # strings and comments may safely contain unsafe characters - r'"(\\\\|\\"|[^"])*"', # double quote string - r"'(\\\\|\\'|[^'])*'", # single quote string - r'//.*$\n?', # single line comment - r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment + r'"(\\\\|\\"|[^"])*"', # double quote string + r"'(\\\\|\\'|[^'])*'", # single quote string + r'//.*$\n?', # single line comment + r'/\*(.|\n)*?\*/', # multi-line javadoc-style comment # regular expression: There's no reason for it to start # with a * and this stops confusion with comments. @@ -520,7 +522,7 @@ class AntlrLexer(RegexLexer): # so just assume they're C++. No idea how to make Objective C work in the # future. -#class AntlrCLexer(DelegatingLexer): +# class AntlrCLexer(DelegatingLexer): # """ # ANTLR with C Target # @@ -537,6 +539,7 @@ class AntlrLexer(RegexLexer): # def analyse_text(text): # return re.match(r'^\s*language\s*=\s*C\s*;', text) + class AntlrCppLexer(DelegatingLexer): """ `ANTLR`_ with CPP Target @@ -553,7 +556,7 @@ class AntlrCppLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*C\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*C\s*;', text, re.M) class AntlrObjectiveCLexer(DelegatingLexer): @@ -573,7 +576,7 @@ class AntlrObjectiveCLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*ObjC\s*;', text) + re.search(r'^\s*language\s*=\s*ObjC\s*;', text) class AntlrCSharpLexer(DelegatingLexer): @@ -593,7 +596,7 @@ class AntlrCSharpLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M) class AntlrPythonLexer(DelegatingLexer): @@ -613,7 +616,7 @@ class AntlrPythonLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M) class AntlrJavaLexer(DelegatingLexer): @@ -653,7 +656,7 @@ class AntlrRubyLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M) class AntlrPerlLexer(DelegatingLexer): @@ -673,7 +676,7 @@ class AntlrPerlLexer(DelegatingLexer): def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M) class AntlrActionScriptLexer(DelegatingLexer): @@ -688,12 +691,14 @@ class AntlrActionScriptLexer(DelegatingLexer): filenames = ['*.G', '*.g'] def __init__(self, **options): + from pygments.lexers.web import ActionScriptLexer super(AntlrActionScriptLexer, self).__init__(ActionScriptLexer, AntlrLexer, **options) def analyse_text(text): return AntlrLexer.analyse_text(text) and \ - re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M) + re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M) + class TreetopBaseLexer(RegexLexer): """ @@ -763,6 +768,7 @@ class TreetopBaseLexer(RegexLexer): ], } + class TreetopLexer(DelegatingLexer): """ A lexer for `Treetop `_ grammars. @@ -776,3 +782,53 @@ class TreetopLexer(DelegatingLexer): def __init__(self, **options): super(TreetopLexer, self).__init__(RubyLexer, TreetopBaseLexer, **options) + + +class EbnfLexer(RegexLexer): + """ + Lexer for `ISO/IEC 14977 EBNF + `_ + grammars. + + .. versionadded:: 2.0 + """ + + name = 'EBNF' + aliases = ['ebnf'] + filenames = ['*.ebnf'] + mimetypes = ['text/x-ebnf'] + + tokens = { + 'root': [ + include('whitespace'), + include('comment_start'), + include('identifier'), + (r'=', Operator, 'production'), + ], + 'production': [ + include('whitespace'), + include('comment_start'), + include('identifier'), + (r'"[^"]*"', String.Double), + (r"'[^']*'", String.Single), + (r'(\?[^?]*\?)', Name.Entity), + (r'[\[\]{}(),|]', Punctuation), + (r'-', Operator), + (r';', Punctuation, '#pop'), + ], + 'whitespace': [ + (r'\s+', Text), + ], + 'comment_start': [ + (r'\(\*', Comment.Multiline, 'comment'), + ], + 'comment': [ + (r'[^*)]', Comment.Multiline), + include('comment_start'), + (r'\*\)', Comment.Multiline, '#pop'), + (r'[*)]', Comment.Multiline), + ], + 'identifier': [ + (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword), + ], + } diff --git a/pygments/lexers/templates.py b/pygments/lexers/templates.py index 8d91d9d0..d8b7a503 100644 --- a/pygments/lexers/templates.py +++ b/pygments/lexers/templates.py @@ -12,16 +12,15 @@ import re from pygments.lexers.web import \ - PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer -from pygments.lexers.agile import PythonLexer, PerlLexer -from pygments.lexers.compiled import JavaLexer -from pygments.lexers.jvm import TeaLangLexer -from pygments.lexers.text import YamlLexer + PhpLexer, HtmlLexer, XmlLexer, JavascriptLexer, CssLexer, LassoLexer +from pygments.lexers.python import PythonLexer +from pygments.lexers.perl import PerlLexer +from pygments.lexers.jvm import JavaLexer, TeaLangLexer +from pygments.lexers.data import YamlLexer from pygments.lexer import Lexer, DelegatingLexer, RegexLexer, bygroups, \ - include, using, this, default, combined -from pygments.token import Error, Punctuation, \ - Text, Comment, Operator, Keyword, Name, String, Number, Other, Token, \ - Whitespace + include, using, this, default, combined +from pygments.token import Error, Punctuation, Whitespace, \ + Text, Comment, Operator, Keyword, Name, String, Number, Other, Token from pygments.util import html_doctype_matches, looks_like_xml __all__ = ['HtmlPhpLexer', 'XmlPhpLexer', 'CssPhpLexer', @@ -63,7 +62,7 @@ class ErbLexer(Lexer): _block_re = re.compile(r'(<%%|%%>|<%=|<%#|<%-|<%|-%>|%>|^%[^%].*?$)', re.M) def __init__(self, **options): - from pygments.lexers.agile import RubyLexer + from pygments.lexers.ruby import RubyLexer self.ruby_lexer = RubyLexer(**options) Lexer.__init__(self, **options) diff --git a/pygments/lexers/text.py b/pygments/lexers/text.py index 8de3ded7..d4aeaeeb 100644 --- a/pygments/lexers/text.py +++ b/pygments/lexers/text.py @@ -9,2047 +9,18 @@ :license: BSD, see LICENSE for details. """ -import re -from bisect import bisect - -from pygments.lexer import Lexer, LexerContext, RegexLexer, ExtendedRegexLexer, \ - bygroups, include, using, this, do_insertions, default -from pygments.token import Punctuation, Text, Comment, Keyword, Name, String, \ - Generic, Operator, Number, Whitespace, Literal -from pygments.util import get_bool_opt, ClassNotFound -from pygments.lexers.agile import PythonLexer -from pygments.lexers.other import BashLexer - -__all__ = ['IniLexer', 'PropertiesLexer', 'SourcesListLexer', 'BaseMakefileLexer', - 'MakefileLexer', 'DiffLexer', 'IrcLogsLexer', 'TexLexer', - 'GroffLexer', 'ApacheConfLexer', 'BBCodeLexer', 'MoinWikiLexer', - 'RstLexer', 'VimLexer', 'GettextLexer', 'SquidConfLexer', - 'DebianControlLexer', 'DarcsPatchLexer', 'YamlLexer', - 'LighttpdConfLexer', 'NginxConfLexer', 'CMakeLexer', 'HttpLexer', - 'PyPyLogLexer', 'RegeditLexer', 'HxmlLexer', 'EbnfLexer', - 'TodotxtLexer', 'DockerLexer'] - - -class IniLexer(RegexLexer): - """ - Lexer for configuration files in INI style. - """ - - name = 'INI' - aliases = ['ini', 'cfg', 'dosini'] - filenames = ['*.ini', '*.cfg'] - mimetypes = ['text/x-ini'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'[;#].*', Comment.Single), - (r'\[.*?\]$', Keyword), - (r'(.*?)([ \t]*)(=)([ \t]*)(.*(?:\n[ \t].+)*)', - bygroups(Name.Attribute, Text, Operator, Text, String)) - ] - } - - def analyse_text(text): - npos = text.find('\n') - if npos < 3: - return False - return text[0] == '[' and text[npos-1] == ']' - - -class RegeditLexer(RegexLexer): - """ - Lexer for `Windows Registry - `_ files produced - by regedit. - - .. versionadded:: 1.6 - """ - - name = 'reg' - aliases = ['registry'] - filenames = ['*.reg'] - mimetypes = ['text/x-windows-registry'] - - tokens = { - 'root': [ - (r'Windows Registry Editor.*', Text), - (r'\s+', Text), - (r'[;#].*', Comment.Single), - (r'(\[)(-?)(HKEY_[A-Z_]+)(.*?\])$', - bygroups(Keyword, Operator, Name.Builtin, Keyword)), - # String keys, which obey somewhat normal escaping - (r'("(?:\\"|\\\\|[^"])+")([ \t]*)(=)([ \t]*)', - bygroups(Name.Attribute, Text, Operator, Text), - 'value'), - # Bare keys (includes @) - (r'(.*?)([ \t]*)(=)([ \t]*)', - bygroups(Name.Attribute, Text, Operator, Text), - 'value'), - ], - 'value': [ - (r'-', Operator, '#pop'), # delete value - (r'(dword|hex(?:\([0-9a-fA-F]\))?)(:)([0-9a-fA-F,]+)', - bygroups(Name.Variable, Punctuation, Number), '#pop'), - # As far as I know, .reg files do not support line continuation. - (r'.*', String, '#pop'), - ] - } - - def analyse_text(text): - return text.startswith('Windows Registry Editor') - - -class PropertiesLexer(RegexLexer): - """ - Lexer for configuration files in Java's properties format. - - .. versionadded:: 1.4 - """ - - name = 'Properties' - aliases = ['properties', 'jproperties'] - filenames = ['*.properties'] - mimetypes = ['text/x-java-properties'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'(?:[;#]|//).*$', Comment), - (r'(.*?)([ \t]*)([=:])([ \t]*)(.*(?:(?<=\\)\n.*)*)', - bygroups(Name.Attribute, Text, Operator, Text, String)), - ], - } - - -class SourcesListLexer(RegexLexer): - """ - Lexer that highlights debian sources.list files. - - .. versionadded:: 0.7 - """ - - name = 'Debian Sourcelist' - aliases = ['sourceslist', 'sources.list', 'debsources'] - filenames = ['sources.list'] - mimetype = ['application/x-debian-sourceslist'] - - tokens = { - 'root': [ - (r'\s+', Text), - (r'#.*?$', Comment), - (r'^(deb(?:-src)?)(\s+)', - bygroups(Keyword, Text), 'distribution') - ], - 'distribution': [ - (r'#.*?$', Comment, '#pop'), - (r'\$\(ARCH\)', Name.Variable), - (r'[^\s$[]+', String), - (r'\[', String.Other, 'escaped-distribution'), - (r'\$', String), - (r'\s+', Text, 'components') - ], - 'escaped-distribution': [ - (r'\]', String.Other, '#pop'), - (r'\$\(ARCH\)', Name.Variable), - (r'[^\]$]+', String.Other), - (r'\$', String.Other) - ], - 'components': [ - (r'#.*?$', Comment, '#pop:2'), - (r'$', Text, '#pop:2'), - (r'\s+', Text), - (r'\S+', Keyword.Pseudo), - ] - } - - def analyse_text(text): - for line in text.split('\n'): - line = line.strip() - if not (line.startswith('#') or line.startswith('deb ') or - line.startswith('deb-src ') or not line): - return False - return True - - -class MakefileLexer(Lexer): - """ - Lexer for BSD and GNU make extensions (lenient enough to handle both in - the same file even). - - *Rewritten in Pygments 0.10.* - """ - - name = 'Makefile' - aliases = ['make', 'makefile', 'mf', 'bsdmake'] - filenames = ['*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'] - mimetypes = ['text/x-makefile'] - - r_special = re.compile(r'^(?:' - # BSD Make - r'\.\s*(include|undef|error|warning|if|else|elif|endif|for|endfor)|' - # GNU Make - r'\s*(ifeq|ifneq|ifdef|ifndef|else|endif|-?include|define|endef|:))(?=\s)') - r_comment = re.compile(r'^\s*@?#') - - def get_tokens_unprocessed(self, text): - ins = [] - lines = text.splitlines(True) - done = '' - lex = BaseMakefileLexer(**self.options) - backslashflag = False - for line in lines: - if self.r_special.match(line) or backslashflag: - ins.append((len(done), [(0, Comment.Preproc, line)])) - backslashflag = line.strip().endswith('\\') - elif self.r_comment.match(line): - ins.append((len(done), [(0, Comment, line)])) - else: - done += line - for item in do_insertions(ins, lex.get_tokens_unprocessed(done)): - yield item - - def analyse_text(text): - # Many makefiles have $(BIG_CAPS) style variables - if re.search(r'\$\([A-Z_]+\)', text): - return 0.1 - - -class BaseMakefileLexer(RegexLexer): - """ - Lexer for simple Makefiles (no preprocessing). - - .. versionadded:: 0.10 - """ - - name = 'Base Makefile' - aliases = ['basemake'] - filenames = [] - mimetypes = [] - - tokens = { - 'root': [ - # recipes (need to allow spaces because of expandtabs) - (r'^(?:[\t ]+.*\n|\n)+', using(BashLexer)), - # special variables - (r'\$[<@$+%?|*]', Keyword), - (r'\s+', Text), - (r'#.*?\n', Comment), - (r'(export)(\s+)(?=[\w${}\t -]+\n)', - bygroups(Keyword, Text), 'export'), - (r'export\s+', Keyword), - # assignment - (r'([\w${}.-]+)(\s*)([!?:+]?=)([ \t]*)((?:.*\\\n)+|.*\n)', - bygroups(Name.Variable, Text, Operator, Text, using(BashLexer))), - # strings - (r'(?s)"(\\\\|\\.|[^"\\])*"', String.Double), - (r"(?s)'(\\\\|\\.|[^'\\])*'", String.Single), - # targets - (r'([^\n:]+)(:+)([ \t]*)', bygroups(Name.Function, Operator, Text), - 'block-header'), - # expansions - (r'\$\(', Keyword, 'expansion'), - ], - 'expansion': [ - (r'[^$a-zA-Z_)]+', Text), - (r'[a-zA-Z_]+', Name.Variable), - (r'\$', Keyword), - (r'\(', Keyword, '#push'), - (r'\)', Keyword, '#pop'), - ], - 'export': [ - (r'[\w${}-]+', Name.Variable), - (r'\n', Text, '#pop'), - (r'\s+', Text), - ], - 'block-header': [ - (r'[,|]', Punctuation), - (r'#.*?\n', Comment, '#pop'), - (r'\\\n', Text), # line continuation - (r'\$\(', Keyword, 'expansion'), - (r'[a-zA-Z_]+', Name), - (r'\n', Text, '#pop'), - (r'.', Text), - ], - } - - -class DiffLexer(RegexLexer): - """ - Lexer for unified or context-style diffs or patches. - """ - - name = 'Diff' - aliases = ['diff', 'udiff'] - filenames = ['*.diff', '*.patch'] - mimetypes = ['text/x-diff', 'text/x-patch'] - - tokens = { - 'root': [ - (r' .*\n', Text), - (r'\+.*\n', Generic.Inserted), - (r'-.*\n', Generic.Deleted), - (r'!.*\n', Generic.Strong), - (r'@.*\n', Generic.Subheading), - (r'([Ii]ndex|diff).*\n', Generic.Heading), - (r'=.*\n', Generic.Heading), - (r'.*\n', Text), - ] - } - - def analyse_text(text): - if text[:7] == 'Index: ': - return True - if text[:5] == 'diff ': - return True - if text[:4] == '--- ': - return 0.9 - - -DPATCH_KEYWORDS = ['hunk', 'addfile', 'adddir', 'rmfile', 'rmdir', 'move', - 'replace'] - -class DarcsPatchLexer(RegexLexer): - """ - DarcsPatchLexer is a lexer for the various versions of the darcs patch - format. Examples of this format are derived by commands such as - ``darcs annotate --patch`` and ``darcs send``. - - .. versionadded:: 0.10 - """ - name = 'Darcs Patch' - aliases = ['dpatch'] - filenames = ['*.dpatch', '*.darcspatch'] - - tokens = { - 'root': [ - (r'<', Operator), - (r'>', Operator), - (r'{', Operator), - (r'}', Operator), - (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)(\])', - bygroups(Operator, Keyword, Name, Text, Name, Operator, - Literal.Date, Text, Operator)), - (r'(\[)((?:TAG )?)(.*)(\n)(.*)(\*\*)(\d+)(\s?)', - bygroups(Operator, Keyword, Name, Text, Name, Operator, - Literal.Date, Text), 'comment'), - (r'New patches:', Generic.Heading), - (r'Context:', Generic.Heading), - (r'Patch bundle hash:', Generic.Heading), - (r'(\s*)(%s)(.*\n)' % '|'.join(DPATCH_KEYWORDS), - bygroups(Text, Keyword, Text)), - (r'\+', Generic.Inserted, "insert"), - (r'-', Generic.Deleted, "delete"), - (r'.*\n', Text), - ], - 'comment': [ - (r'[^\]].*\n', Comment), - (r'\]', Operator, "#pop"), - ], - 'specialText': [ # darcs add [_CODE_] special operators for clarity - (r'\n', Text, "#pop"), # line-based - (r'\[_[^_]*_]', Operator), - ], - 'insert': [ - include('specialText'), - (r'\[', Generic.Inserted), - (r'[^\n\[]+', Generic.Inserted), - ], - 'delete': [ - include('specialText'), - (r'\[', Generic.Deleted), - (r'[^\n\[]+', Generic.Deleted), - ], - } - - -class IrcLogsLexer(RegexLexer): - """ - Lexer for IRC logs in *irssi*, *xchat* or *weechat* style. - """ - - name = 'IRC logs' - aliases = ['irc'] - filenames = ['*.weechatlog'] - mimetypes = ['text/x-irclog'] - - flags = re.VERBOSE | re.MULTILINE - timestamp = r""" - ( - # irssi / xchat and others - (?: \[|\()? # Opening bracket or paren for the timestamp - (?: # Timestamp - (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits - [T ])? # Date/time separator: T or space - (?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits - ) - (?: \]|\))?\s+ # Closing bracket or paren for the timestamp - | - # weechat - \d{4}\s\w{3}\s\d{2}\s # Date - \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace - | - # xchat - \w{3}\s\d{2}\s # Date - \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace - )? - """ - tokens = { - 'root': [ - # log start/end - (r'^\*\*\*\*(.*)\*\*\*\*$', Comment), - # hack - ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)), - # normal msgs - ("^" + timestamp + r""" - (\s*<.*?>\s*) # Nick """, - bygroups(Comment.Preproc, Name.Tag), 'msg'), - # /me msgs - ("^" + timestamp + r""" - (\s*[*]\s+) # Star - (\S+\s+.*?\n) # Nick + rest of message """, - bygroups(Comment.Preproc, Keyword, Generic.Inserted)), - # join/part msgs - ("^" + timestamp + r""" - (\s*(?:\*{3}|?)\s*) # Star(s) or symbols - (\S+\s+) # Nick + Space - (.*?\n) # Rest of message """, - bygroups(Comment.Preproc, Keyword, String, Comment)), - (r"^.*?\n", Text), - ], - 'msg': [ - (r"\S+:(?!//)", Name.Attribute), # Prefix - (r".*\n", Text, '#pop'), - ], - } - - -class BBCodeLexer(RegexLexer): - """ - A lexer that highlights BBCode(-like) syntax. - - .. versionadded:: 0.6 - """ - - name = 'BBCode' - aliases = ['bbcode'] - mimetypes = ['text/x-bbcode'] - - tokens = { - 'root': [ - (r'[^[]+', Text), - # tag/end tag begin - (r'\[/?\w+', Keyword, 'tag'), - # stray bracket - (r'\[', Text), - ], - 'tag': [ - (r'\s+', Text), - # attribute with value - (r'(\w+)(=)("?[^\s"\]]+"?)', - bygroups(Name.Attribute, Operator, String)), - # tag argument (a la [color=green]) - (r'(=)("?[^\s"\]]+"?)', - bygroups(Operator, String)), - # tag end - (r'\]', Keyword, '#pop'), - ], - } - - -class TexLexer(RegexLexer): - """ - Lexer for the TeX and LaTeX typesetting languages. - """ - - name = 'TeX' - aliases = ['tex', 'latex'] - filenames = ['*.tex', '*.aux', '*.toc'] - mimetypes = ['text/x-tex', 'text/x-latex'] - - tokens = { - 'general': [ - (r'%.*?\n', Comment), - (r'[{}]', Name.Builtin), - (r'[&_^]', Name.Builtin), - ], - 'root': [ - (r'\\\[', String.Backtick, 'displaymath'), - (r'\\\(', String, 'inlinemath'), - (r'\$\$', String.Backtick, 'displaymath'), - (r'\$', String, 'inlinemath'), - (r'\\([a-zA-Z]+|.)', Keyword, 'command'), - include('general'), - (r'[^\\$%&_^{}]+', Text), - ], - 'math': [ - (r'\\([a-zA-Z]+|.)', Name.Variable), - include('general'), - (r'[0-9]+', Number), - (r'[-=!+*/()\[\]]', Operator), - (r'[^=!+*/()\[\]\\$%&_^{}0-9-]+', Name.Builtin), - ], - 'inlinemath': [ - (r'\\\)', String, '#pop'), - (r'\$', String, '#pop'), - include('math'), - ], - 'displaymath': [ - (r'\\\]', String, '#pop'), - (r'\$\$', String, '#pop'), - (r'\$', Name.Builtin), - include('math'), - ], - 'command': [ - (r'\[.*?\]', Name.Attribute), - (r'\*', Keyword), - default('#pop'), - ], - } - - def analyse_text(text): - for start in ("\\documentclass", "\\input", "\\documentstyle", - "\\relax"): - if text[:len(start)] == start: - return True - - -class GroffLexer(RegexLexer): - """ - Lexer for the (g)roff typesetting language, supporting groff - extensions. Mainly useful for highlighting manpage sources. - - .. versionadded:: 0.6 - """ - - name = 'Groff' - aliases = ['groff', 'nroff', 'man'] - filenames = ['*.[1234567]', '*.man'] - mimetypes = ['application/x-troff', 'text/troff'] - - tokens = { - 'root': [ - (r'(\.)(\w+)', bygroups(Text, Keyword), 'request'), - (r'\.', Punctuation, 'request'), - # Regular characters, slurp till we find a backslash or newline - (r'[^\\\n]*', Text, 'textline'), - ], - 'textline': [ - include('escapes'), - (r'[^\\\n]+', Text), - (r'\n', Text, '#pop'), - ], - 'escapes': [ - # groff has many ways to write escapes. - (r'\\"[^\n]*', Comment), - (r'\\[fn]\w', String.Escape), - (r'\\\(.{2}', String.Escape), - (r'\\.\[.*\]', String.Escape), - (r'\\.', String.Escape), - (r'\\\n', Text, 'request'), - ], - 'request': [ - (r'\n', Text, '#pop'), - include('escapes'), - (r'"[^\n"]+"', String.Double), - (r'\d+', Number), - (r'\S+', String), - (r'\s+', Text), - ], - } - - def analyse_text(text): - if text[:1] != '.': - return False - if text[:3] == '.\\"': - return True - if text[:4] == '.TH ': - return True - if text[1:3].isalnum() and text[3].isspace(): - return 0.9 - - -class ApacheConfLexer(RegexLexer): - """ - Lexer for configuration files following the Apache config file - format. - - .. versionadded:: 0.6 - """ - - name = 'ApacheConf' - aliases = ['apacheconf', 'aconf', 'apache'] - filenames = ['.htaccess', 'apache.conf', 'apache2.conf'] - mimetypes = ['text/x-apacheconf'] - flags = re.MULTILINE | re.IGNORECASE - - tokens = { - 'root': [ - (r'\s+', Text), - (r'(#.*?)$', Comment), - (r'(<[^\s>]+)(?:(\s+)(.*?))?(>)', - bygroups(Name.Tag, Text, String, Name.Tag)), - (r'([a-z]\w*)(\s+)', - bygroups(Name.Builtin, Text), 'value'), - (r'\.+', Text), - ], - 'value': [ - (r'$', Text, '#pop'), - (r'[^\S\n]+', Text), - (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), - (r'\d+', Number), - (r'/([a-z0-9][\w./-]+)', String.Other), - (r'(on|off|none|any|all|double|email|dns|min|minimal|' - r'os|productonly|full|emerg|alert|crit|error|warn|' - r'notice|info|debug|registry|script|inetd|standalone|' - r'user|group)\b', Keyword), - (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), - (r'[^\s"]+', Text) - ] - } - - -class MoinWikiLexer(RegexLexer): - """ - For MoinMoin (and Trac) Wiki markup. - - .. versionadded:: 0.7 - """ - - name = 'MoinMoin/Trac Wiki markup' - aliases = ['trac-wiki', 'moin'] - filenames = [] - mimetypes = ['text/x-trac-wiki'] - flags = re.MULTILINE | re.IGNORECASE - - tokens = { - 'root': [ - (r'^#.*$', Comment), - (r'(!)(\S+)', bygroups(Keyword, Text)), # Ignore-next - # Titles - (r'^(=+)([^=]+)(=+)(\s*#.+)?$', - bygroups(Generic.Heading, using(this), Generic.Heading, String)), - # Literal code blocks, with optional shebang - (r'({{{)(\n#!.+)?', bygroups(Name.Builtin, Name.Namespace), 'codeblock'), - (r'(\'\'\'?|\|\||`|__|~~|\^|,,|::)', Comment), # Formatting - # Lists - (r'^( +)([.*-])( )', bygroups(Text, Name.Builtin, Text)), - (r'^( +)([a-z]{1,5}\.)( )', bygroups(Text, Name.Builtin, Text)), - # Other Formatting - (r'\[\[\w+.*?\]\]', Keyword), # Macro - (r'(\[[^\s\]]+)(\s+[^\]]+?)?(\])', - bygroups(Keyword, String, Keyword)), # Link - (r'^----+$', Keyword), # Horizontal rules - (r'[^\n\'\[{!_~^,|]+', Text), - (r'\n', Text), - (r'.', Text), - ], - 'codeblock': [ - (r'}}}', Name.Builtin, '#pop'), - # these blocks are allowed to be nested in Trac, but not MoinMoin - (r'{{{', Text, '#push'), - (r'[^{}]+', Comment.Preproc), # slurp boring text - (r'.', Comment.Preproc), # allow loose { or } - ], - } - - -class RstLexer(RegexLexer): - """ - For `reStructuredText `_ markup. - - .. versionadded:: 0.7 - - Additional options accepted: - - `handlecodeblocks` - Highlight the contents of ``.. sourcecode:: language``, - ``.. code:: language`` and ``.. code-block:: language`` - directives with a lexer for the given language (default: - ``True``). - - .. versionadded:: 0.8 - """ - name = 'reStructuredText' - aliases = ['rst', 'rest', 'restructuredtext'] - filenames = ['*.rst', '*.rest'] - mimetypes = ["text/x-rst", "text/prs.fallenstein.rst"] - flags = re.MULTILINE - - def _handle_sourcecode(self, match): - from pygments.lexers import get_lexer_by_name - - # section header - yield match.start(1), Punctuation, match.group(1) - yield match.start(2), Text, match.group(2) - yield match.start(3), Operator.Word, match.group(3) - yield match.start(4), Punctuation, match.group(4) - yield match.start(5), Text, match.group(5) - yield match.start(6), Keyword, match.group(6) - yield match.start(7), Text, match.group(7) - - # lookup lexer if wanted and existing - lexer = None - if self.handlecodeblocks: - try: - lexer = get_lexer_by_name(match.group(6).strip()) - except ClassNotFound: - pass - indention = match.group(8) - indention_size = len(indention) - code = (indention + match.group(9) + match.group(10) + match.group(11)) - - # no lexer for this language. handle it like it was a code block - if lexer is None: - yield match.start(8), String, code - return - - # highlight the lines with the lexer. - ins = [] - codelines = code.splitlines(True) - code = '' - for line in codelines: - if len(line) > indention_size: - ins.append((len(code), [(0, Text, line[:indention_size])])) - code += line[indention_size:] - else: - code += line - for item in do_insertions(ins, lexer.get_tokens_unprocessed(code)): - yield item - - # from docutils.parsers.rst.states - closers = u'\'")]}>\u2019\u201d\xbb!?' - unicode_delimiters = u'\u2010\u2011\u2012\u2013\u2014\u00a0' - end_string_suffix = (r'((?=$)|(?=[-/:.,; \n\x00%s%s]))' - % (re.escape(unicode_delimiters), - re.escape(closers))) - - tokens = { - 'root': [ - # Heading with overline - (r'^(=+|-+|`+|:+|\.+|\'+|"+|~+|\^+|_+|\*+|\++|#+)([ \t]*\n)' - r'(.+)(\n)(\1)(\n)', - bygroups(Generic.Heading, Text, Generic.Heading, - Text, Generic.Heading, Text)), - # Plain heading - (r'^(\S.*)(\n)(={3,}|-{3,}|`{3,}|:{3,}|\.{3,}|\'{3,}|"{3,}|' - r'~{3,}|\^{3,}|_{3,}|\*{3,}|\+{3,}|#{3,})(\n)', - bygroups(Generic.Heading, Text, Generic.Heading, Text)), - # Bulleted lists - (r'^(\s*)([-*+])( .+\n(?:\1 .+\n)*)', - bygroups(Text, Number, using(this, state='inline'))), - # Numbered lists - (r'^(\s*)([0-9#ivxlcmIVXLCM]+\.)( .+\n(?:\1 .+\n)*)', - bygroups(Text, Number, using(this, state='inline'))), - (r'^(\s*)(\(?[0-9#ivxlcmIVXLCM]+\))( .+\n(?:\1 .+\n)*)', - bygroups(Text, Number, using(this, state='inline'))), - # Numbered, but keep words at BOL from becoming lists - (r'^(\s*)([A-Z]+\.)( .+\n(?:\1 .+\n)+)', - bygroups(Text, Number, using(this, state='inline'))), - (r'^(\s*)(\(?[A-Za-z]+\))( .+\n(?:\1 .+\n)+)', - bygroups(Text, Number, using(this, state='inline'))), - # Line blocks - (r'^(\s*)(\|)( .+\n(?:\| .+\n)*)', - bygroups(Text, Operator, using(this, state='inline'))), - # Sourcecode directives - (r'^( *\.\.)(\s*)((?:source)?code(?:-block)?)(::)([ \t]*)([^\n]+)' - r'(\n[ \t]*\n)([ \t]+)(.*)(\n)((?:(?:\8.*|)\n)+)', - _handle_sourcecode), - # A directive - (r'^( *\.\.)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))', - bygroups(Punctuation, Text, Operator.Word, Punctuation, Text, - using(this, state='inline'))), - # A reference target - (r'^( *\.\.)(\s*)(_(?:[^:\\]|\\.)+:)(.*?)$', - bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))), - # A footnote/citation target - (r'^( *\.\.)(\s*)(\[.+\])(.*?)$', - bygroups(Punctuation, Text, Name.Tag, using(this, state='inline'))), - # A substitution def - (r'^( *\.\.)(\s*)(\|.+\|)(\s*)([\w:-]+?)(::)(?:([ \t]*)(.*))', - bygroups(Punctuation, Text, Name.Tag, Text, Operator.Word, - Punctuation, Text, using(this, state='inline'))), - # Comments - (r'^ *\.\..*(\n( +.*\n|\n)+)?', Comment.Preproc), - # Field list - (r'^( *)(:[a-zA-Z-]+:)(\s*)$', bygroups(Text, Name.Class, Text)), - (r'^( *)(:.*?:)([ \t]+)(.*?)$', - bygroups(Text, Name.Class, Text, Name.Function)), - # Definition list - (r'^(\S.*(?)(`__?)', # reference with inline target - bygroups(String, String.Interpol, String)), - (r'`.+?`__?', String), # reference - (r'(`.+?`)(:[a-zA-Z0-9:-]+?:)?', - bygroups(Name.Variable, Name.Attribute)), # role - (r'(:[a-zA-Z0-9:-]+?:)(`.+?`)', - bygroups(Name.Attribute, Name.Variable)), # role (content first) - (r'\*\*.+?\*\*', Generic.Strong), # Strong emphasis - (r'\*.+?\*', Generic.Emph), # Emphasis - (r'\[.*?\]_', String), # Footnote or citation - (r'<.+?>', Name.Tag), # Hyperlink - (r'[^\\\n\[*`:]+', Text), - (r'.', Text), - ], - 'literal': [ - (r'[^`]+', String), - (r'``' + end_string_suffix, String, '#pop'), - (r'`', String), - ] - } - - def __init__(self, **options): - self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) - RegexLexer.__init__(self, **options) - - def analyse_text(text): - if text[:2] == '..' and text[2:3] != '.': - return 0.3 - p1 = text.find("\n") - p2 = text.find("\n", p1 + 1) - if (p2 > -1 and # has two lines - p1 * 2 + 1 == p2 and # they are the same length - text[p1+1] in '-=' and # the next line both starts and ends with - text[p1+1] == text[p2-1]): # ...a sufficiently high header - return 0.5 - - -class VimLexer(RegexLexer): - """ - Lexer for VimL script files. - - .. versionadded:: 0.8 - """ - name = 'VimL' - aliases = ['vim'] - filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc', - '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'] - mimetypes = ['text/x-vim'] - flags = re.MULTILINE - - _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?' - - tokens = { - 'root': [ - (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)', - bygroups(using(this), Keyword, Text, Operator, Text, Text, - using(PythonLexer), Text)), - (r'^([ \t:]*)(' + _python + r')([ \t])(.*)', - bygroups(using(this), Keyword, Text, using(PythonLexer))), - - (r'^\s*".*', Comment), - - (r'[ \t]+', Text), - # TODO: regexes can have other delims - (r'/(\\\\|\\/|[^\n/])*/', String.Regex), - (r'"(\\\\|\\"|[^\n"])*"', String.Double), - (r"'(''|[^\n'])*'", String.Single), - - # Who decided that doublequote was a good comment character?? - (r'(?<=\s)"[^\-:.%#=*].*', Comment), - (r'-?\d+', Number), - (r'#[0-9a-f]{6}', Number.Hex), - (r'^:', Punctuation), - (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent. - (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b', - Keyword), - (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin), - (r'\b\w+\b', Name.Other), # These are postprocessed below - (r'.', Text), - ], - } - def __init__(self, **options): - from pygments.lexers._vimbuiltins import command, option, auto - self._cmd = command - self._opt = option - self._aut = auto - - RegexLexer.__init__(self, **options) - - def is_in(self, w, mapping): - r""" - It's kind of difficult to decide if something might be a keyword - in VimL because it allows you to abbreviate them. In fact, - 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are - valid ways to call it so rather than making really awful regexps - like:: - - \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b - - we match `\b\w+\b` and then call is_in() on those tokens. See - `scripts/get_vimkw.py` for how the lists are extracted. - """ - p = bisect(mapping, (w,)) - if p > 0: - if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \ - mapping[p-1][1][:len(w)] == w: return True - if p < len(mapping): - return mapping[p][0] == w[:len(mapping[p][0])] and \ - mapping[p][1][:len(w)] == w - return False - - def get_tokens_unprocessed(self, text): - # TODO: builtins are only subsequent tokens on lines - # and 'keywords' only happen at the beginning except - # for :au ones - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text): - if token is Name.Other: - if self.is_in(value, self._cmd): - yield index, Keyword, value - elif self.is_in(value, self._opt) or \ - self.is_in(value, self._aut): - yield index, Name.Builtin, value - else: - yield index, Text, value - else: - yield index, token, value - - -class GettextLexer(RegexLexer): - """ - Lexer for Gettext catalog files. - - .. versionadded:: 0.9 - """ - name = 'Gettext Catalog' - aliases = ['pot', 'po'] - filenames = ['*.pot', '*.po'] - mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext'] - - tokens = { - 'root': [ - (r'^#,\s.*?$', Keyword.Type), - (r'^#:\s.*?$', Keyword.Declaration), - #(r'^#$', Comment), - (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single), - (r'^(")([A-Za-z-]+:)(.*")$', - bygroups(String, Name.Property, String)), - (r'^".*"$', String), - (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$', - bygroups(Name.Variable, Text, String)), - (r'^(msgstr\[)(\d)(\])(\s+)(".*")$', - bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)), - ] - } - - -class SquidConfLexer(RegexLexer): - """ - Lexer for `squid `_ configuration files. - - .. versionadded:: 0.9 - """ - - name = 'SquidConf' - aliases = ['squidconf', 'squid.conf', 'squid'] - filenames = ['squid.conf'] - mimetypes = ['text/x-squidconf'] - flags = re.IGNORECASE - - keywords = [ - "access_log", "acl", "always_direct", "announce_host", - "announce_period", "announce_port", "announce_to", "anonymize_headers", - "append_domain", "as_whois_server", "auth_param_basic", - "authenticate_children", "authenticate_program", "authenticate_ttl", - "broken_posts", "buffered_logs", "cache_access_log", "cache_announce", - "cache_dir", "cache_dns_program", "cache_effective_group", - "cache_effective_user", "cache_host", "cache_host_acl", - "cache_host_domain", "cache_log", "cache_mem", "cache_mem_high", - "cache_mem_low", "cache_mgr", "cachemgr_passwd", "cache_peer", - "cache_peer_access", "cahce_replacement_policy", "cache_stoplist", - "cache_stoplist_pattern", "cache_store_log", "cache_swap", - "cache_swap_high", "cache_swap_log", "cache_swap_low", "client_db", - "client_lifetime", "client_netmask", "connect_timeout", "coredump_dir", - "dead_peer_timeout", "debug_options", "delay_access", "delay_class", - "delay_initial_bucket_level", "delay_parameters", "delay_pools", - "deny_info", "dns_children", "dns_defnames", "dns_nameservers", - "dns_testnames", "emulate_httpd_log", "err_html_text", - "fake_user_agent", "firewall_ip", "forwarded_for", "forward_snmpd_port", - "fqdncache_size", "ftpget_options", "ftpget_program", "ftp_list_width", - "ftp_passive", "ftp_user", "half_closed_clients", "header_access", - "header_replace", "hierarchy_stoplist", "high_response_time_warning", - "high_page_fault_warning", "hosts_file", "htcp_port", "http_access", - "http_anonymizer", "httpd_accel", "httpd_accel_host", - "httpd_accel_port", "httpd_accel_uses_host_header", - "httpd_accel_with_proxy", "http_port", "http_reply_access", - "icp_access", "icp_hit_stale", "icp_port", "icp_query_timeout", - "ident_lookup", "ident_lookup_access", "ident_timeout", - "incoming_http_average", "incoming_icp_average", "inside_firewall", - "ipcache_high", "ipcache_low", "ipcache_size", "local_domain", - "local_ip", "logfile_rotate", "log_fqdn", "log_icp_queries", - "log_mime_hdrs", "maximum_object_size", "maximum_single_addr_tries", - "mcast_groups", "mcast_icp_query_timeout", "mcast_miss_addr", - "mcast_miss_encode_key", "mcast_miss_port", "memory_pools", - "memory_pools_limit", "memory_replacement_policy", "mime_table", - "min_http_poll_cnt", "min_icp_poll_cnt", "minimum_direct_hops", - "minimum_object_size", "minimum_retry_timeout", "miss_access", - "negative_dns_ttl", "negative_ttl", "neighbor_timeout", - "neighbor_type_domain", "netdb_high", "netdb_low", "netdb_ping_period", - "netdb_ping_rate", "never_direct", "no_cache", "passthrough_proxy", - "pconn_timeout", "pid_filename", "pinger_program", "positive_dns_ttl", - "prefer_direct", "proxy_auth", "proxy_auth_realm", "query_icmp", - "quick_abort", "quick_abort", "quick_abort_max", "quick_abort_min", - "quick_abort_pct", "range_offset_limit", "read_timeout", - "redirect_children", "redirect_program", - "redirect_rewrites_host_header", "reference_age", "reference_age", - "refresh_pattern", "reload_into_ims", "request_body_max_size", - "request_size", "request_timeout", "shutdown_lifetime", - "single_parent_bypass", "siteselect_timeout", "snmp_access", - "snmp_incoming_address", "snmp_port", "source_ping", "ssl_proxy", - "store_avg_object_size", "store_objects_per_bucket", - "strip_query_terms", "swap_level1_dirs", "swap_level2_dirs", - "tcp_incoming_address", "tcp_outgoing_address", "tcp_recv_bufsize", - "test_reachability", "udp_hit_obj", "udp_hit_obj_size", - "udp_incoming_address", "udp_outgoing_address", "unique_hostname", - "unlinkd_program", "uri_whitespace", "useragent_log", - "visible_hostname", "wais_relay", "wais_relay_host", "wais_relay_port", - ] - - opts = [ - "proxy-only", "weight", "ttl", "no-query", "default", "round-robin", - "multicast-responder", "on", "off", "all", "deny", "allow", "via", - "parent", "no-digest", "heap", "lru", "realm", "children", "q1", "q2", - "credentialsttl", "none", "disable", "offline_toggle", "diskd", - ] - - actions = [ - "shutdown", "info", "parameter", "server_list", "client_list", - r'squid\.conf', - ] - - actions_stats = [ - "objects", "vm_objects", "utilization", "ipcache", "fqdncache", "dns", - "redirector", "io", "reply_headers", "filedescriptors", "netdb", - ] - - actions_log = ["status", "enable", "disable", "clear"] - - acls = [ - "url_regex", "urlpath_regex", "referer_regex", "port", "proto", - "req_mime_type", "rep_mime_type", "method", "browser", "user", "src", - "dst", "time", "dstdomain", "ident", "snmp_community", - ] - - ip_re = ( - r'(?:(?:(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|0x0*[0-9a-f]{1,2}|' - r'0+[1-3]?[0-7]{0,2})(?:\.(?:[3-9]\d?|2(?:5[0-5]|[0-4]?\d)?|1\d{0,2}|' - r'0x0*[0-9a-f]{1,2}|0+[1-3]?[0-7]{0,2})){3})|(?!.*::.*::)(?:(?!:)|' - r':(?=:))(?:[0-9a-f]{0,4}(?:(?<=::)|(?`` outputs. - - .. versionadded:: 0.9 - """ - name = 'Debian Control file' - aliases = ['control', 'debcontrol'] - filenames = ['control'] - - tokens = { - 'root': [ - (r'^(Description)', Keyword, 'description'), - (r'^(Maintainer)(:\s*)', bygroups(Keyword, Text), 'maintainer'), - (r'^((Build-)?Depends)', Keyword, 'depends'), - (r'^((?:Python-)?Version)(:\s*)(\S+)$', - bygroups(Keyword, Text, Number)), - (r'^((?:Installed-)?Size)(:\s*)(\S+)$', - bygroups(Keyword, Text, Number)), - (r'^(MD5Sum|SHA1|SHA256)(:\s*)(\S+)$', - bygroups(Keyword, Text, Number)), - (r'^([a-zA-Z\-0-9\.]*?)(:\s*)(.*?)$', - bygroups(Keyword, Whitespace, String)), - ], - 'maintainer': [ - (r'<[^>]+>', Generic.Strong), - (r'<[^>]+>$', Generic.Strong, '#pop'), - (r',\n?', Text), - (r'.', Text), - ], - 'description': [ - (r'(.*)(Homepage)(: )(\S+)', - bygroups(Text, String, Name, Name.Class)), - (r':.*\n', Generic.Strong), - (r' .*\n', Text), - ('', Text, '#pop'), - ], - 'depends': [ - (r':\s*', Text), - (r'(\$)(\{)(\w+\s*:\s*\w+)', bygroups(Operator, Text, Name.Entity)), - (r'\(', Text, 'depend_vers'), - (r',', Text), - (r'\|', Operator), - (r'[\s]+', Text), - (r'[}\)]\s*$', Text, '#pop'), - (r'}', Text), - (r'[^,]$', Name.Function, '#pop'), - (r'([\+\.a-zA-Z0-9-])(\s*)', bygroups(Name.Function, Text)), - (r'\[.*?\]', Name.Entity), - ], - 'depend_vers': [ - (r'\),', Text, '#pop'), - (r'\)[^,]', Text, '#pop:2'), - (r'([><=]+)(\s*)([^\)]+)', bygroups(Operator, Text, Number)) - ] - } - - -class YamlLexerContext(LexerContext): - """Indentation context for the YAML lexer.""" - - def __init__(self, *args, **kwds): - super(YamlLexerContext, self).__init__(*args, **kwds) - self.indent_stack = [] - self.indent = -1 - self.next_indent = 0 - self.block_scalar_indent = None - - -class YamlLexer(ExtendedRegexLexer): - """ - Lexer for `YAML `_, a human-friendly data serialization - language. - - .. versionadded:: 0.11 - """ - - name = 'YAML' - aliases = ['yaml'] - filenames = ['*.yaml', '*.yml'] - mimetypes = ['text/x-yaml'] - - - def something(token_class): - """Do not produce empty tokens.""" - def callback(lexer, match, context): - text = match.group() - if not text: - return - yield match.start(), token_class, text - context.pos = match.end() - return callback - - def reset_indent(token_class): - """Reset the indentation levels.""" - def callback(lexer, match, context): - text = match.group() - context.indent_stack = [] - context.indent = -1 - context.next_indent = 0 - context.block_scalar_indent = None - yield match.start(), token_class, text - context.pos = match.end() - return callback - - def save_indent(token_class, start=False): - """Save a possible indentation level.""" - def callback(lexer, match, context): - text = match.group() - extra = '' - if start: - context.next_indent = len(text) - if context.next_indent < context.indent: - while context.next_indent < context.indent: - context.indent = context.indent_stack.pop() - if context.next_indent > context.indent: - extra = text[context.indent:] - text = text[:context.indent] - else: - context.next_indent += len(text) - if text: - yield match.start(), token_class, text - if extra: - yield match.start()+len(text), token_class.Error, extra - context.pos = match.end() - return callback - - def set_indent(token_class, implicit=False): - """Set the previously saved indentation level.""" - def callback(lexer, match, context): - text = match.group() - if context.indent < context.next_indent: - context.indent_stack.append(context.indent) - context.indent = context.next_indent - if not implicit: - context.next_indent += len(text) - yield match.start(), token_class, text - context.pos = match.end() - return callback - - def set_block_scalar_indent(token_class): - """Set an explicit indentation level for a block scalar.""" - def callback(lexer, match, context): - text = match.group() - context.block_scalar_indent = None - if not text: - return - increment = match.group(1) - if increment: - current_indent = max(context.indent, 0) - increment = int(increment) - context.block_scalar_indent = current_indent + increment - if text: - yield match.start(), token_class, text - context.pos = match.end() - return callback - - def parse_block_scalar_empty_line(indent_token_class, content_token_class): - """Process an empty line in a block scalar.""" - def callback(lexer, match, context): - text = match.group() - if (context.block_scalar_indent is None or - len(text) <= context.block_scalar_indent): - if text: - yield match.start(), indent_token_class, text - else: - indentation = text[:context.block_scalar_indent] - content = text[context.block_scalar_indent:] - yield match.start(), indent_token_class, indentation - yield (match.start()+context.block_scalar_indent, - content_token_class, content) - context.pos = match.end() - return callback - - def parse_block_scalar_indent(token_class): - """Process indentation spaces in a block scalar.""" - def callback(lexer, match, context): - text = match.group() - if context.block_scalar_indent is None: - if len(text) <= max(context.indent, 0): - context.stack.pop() - context.stack.pop() - return - context.block_scalar_indent = len(text) - else: - if len(text) < context.block_scalar_indent: - context.stack.pop() - context.stack.pop() - return - if text: - yield match.start(), token_class, text - context.pos = match.end() - return callback - - def parse_plain_scalar_indent(token_class): - """Process indentation spaces in a plain scalar.""" - def callback(lexer, match, context): - text = match.group() - if len(text) <= context.indent: - context.stack.pop() - context.stack.pop() - return - if text: - yield match.start(), token_class, text - context.pos = match.end() - return callback - - - - tokens = { - # the root rules - 'root': [ - # ignored whitespaces - (r'[ ]+(?=#|$)', Text), - # line breaks - (r'\n+', Text), - # a comment - (r'#[^\n]*', Comment.Single), - # the '%YAML' directive - (r'^%YAML(?=[ ]|$)', reset_indent(Name.Tag), 'yaml-directive'), - # the %TAG directive - (r'^%TAG(?=[ ]|$)', reset_indent(Name.Tag), 'tag-directive'), - # document start and document end indicators - (r'^(?:---|\.\.\.)(?=[ ]|$)', reset_indent(Name.Namespace), - 'block-line'), - # indentation spaces - (r'[ ]*(?![ \t\n\r\f\v]|$)', save_indent(Text, start=True), - ('block-line', 'indentation')), - ], - - # trailing whitespaces after directives or a block scalar indicator - 'ignored-line': [ - # ignored whitespaces - (r'[ ]+(?=#|$)', Text), - # a comment - (r'#[^\n]*', Comment.Single), - # line break - (r'\n', Text, '#pop:2'), - ], - - # the %YAML directive - 'yaml-directive': [ - # the version number - (r'([ ]+)([0-9]+\.[0-9]+)', - bygroups(Text, Number), 'ignored-line'), - ], - - # the %YAG directive - 'tag-directive': [ - # a tag handle and the corresponding prefix - (r'([ ]+)(!|![0-9A-Za-z_-]*!)' - r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)', - bygroups(Text, Keyword.Type, Text, Keyword.Type), - 'ignored-line'), - ], - - # block scalar indicators and indentation spaces - 'indentation': [ - # trailing whitespaces are ignored - (r'[ ]*$', something(Text), '#pop:2'), - # whitespaces preceeding block collection indicators - (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text)), - # block collection indicators - (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)), - # the beginning a block line - (r'[ ]*', save_indent(Text), '#pop'), - ], - - # an indented line in the block context - 'block-line': [ - # the line end - (r'[ ]*(?=#|$)', something(Text), '#pop'), - # whitespaces separating tokens - (r'[ ]+', Text), - # tags, anchors and aliases, - include('descriptors'), - # block collections and scalars - include('block-nodes'), - # flow collections and quoted scalars - include('flow-nodes'), - # a plain scalar - (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])', - something(Name.Variable), - 'plain-scalar-in-block-context'), - ], - - # tags, anchors, aliases - 'descriptors' : [ - # a full-form tag - (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Keyword.Type), - # a tag in the form '!', '!suffix' or '!handle!suffix' - (r'!(?:[0-9A-Za-z_-]+)?' - r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Keyword.Type), - # an anchor - (r'&[0-9A-Za-z_-]+', Name.Label), - # an alias - (r'\*[0-9A-Za-z_-]+', Name.Variable), - ], - - # block collections and scalars - 'block-nodes': [ - # implicit key - (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)), - # literal and folded scalars - (r'[|>]', Punctuation.Indicator, - ('block-scalar-content', 'block-scalar-header')), - ], - - # flow collections and quoted scalars - 'flow-nodes': [ - # a flow sequence - (r'\[', Punctuation.Indicator, 'flow-sequence'), - # a flow mapping - (r'\{', Punctuation.Indicator, 'flow-mapping'), - # a single-quoted scalar - (r'\'', String, 'single-quoted-scalar'), - # a double-quoted scalar - (r'\"', String, 'double-quoted-scalar'), - ], - - # the content of a flow collection - 'flow-collection': [ - # whitespaces - (r'[ ]+', Text), - # line breaks - (r'\n+', Text), - # a comment - (r'#[^\n]*', Comment.Single), - # simple indicators - (r'[?:,]', Punctuation.Indicator), - # tags, anchors and aliases - include('descriptors'), - # nested collections and quoted scalars - include('flow-nodes'), - # a plain scalar - (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])', - something(Name.Variable), - 'plain-scalar-in-flow-context'), - ], - - # a flow sequence indicated by '[' and ']' - 'flow-sequence': [ - # include flow collection rules - include('flow-collection'), - # the closing indicator - (r'\]', Punctuation.Indicator, '#pop'), - ], - - # a flow mapping indicated by '{' and '}' - 'flow-mapping': [ - # include flow collection rules - include('flow-collection'), - # the closing indicator - (r'\}', Punctuation.Indicator, '#pop'), - ], - - # block scalar lines - 'block-scalar-content': [ - # line break - (r'\n', Text), - # empty line - (r'^[ ]+$', - parse_block_scalar_empty_line(Text, Name.Constant)), - # indentation spaces (we may leave the state here) - (r'^[ ]*', parse_block_scalar_indent(Text)), - # line content - (r'[^\n\r\f\v]+', Name.Constant), - ], - - # the content of a literal or folded scalar - 'block-scalar-header': [ - # indentation indicator followed by chomping flag - (r'([1-9])?[+-]?(?=[ ]|$)', - set_block_scalar_indent(Punctuation.Indicator), - 'ignored-line'), - # chomping flag followed by indentation indicator - (r'[+-]?([1-9])?(?=[ ]|$)', - set_block_scalar_indent(Punctuation.Indicator), - 'ignored-line'), - ], - - # ignored and regular whitespaces in quoted scalars - 'quoted-scalar-whitespaces': [ - # leading and trailing whitespaces are ignored - (r'^[ ]+', Text), - (r'[ ]+$', Text), - # line breaks are ignored - (r'\n+', Text), - # other whitespaces are a part of the value - (r'[ ]+', Name.Variable), - ], - - # single-quoted scalars - 'single-quoted-scalar': [ - # include whitespace and line break rules - include('quoted-scalar-whitespaces'), - # escaping of the quote character - (r'\'\'', String.Escape), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v\']+', String), - # the closing quote - (r'\'', String, '#pop'), - ], - - # double-quoted scalars - 'double-quoted-scalar': [ - # include whitespace and line break rules - include('quoted-scalar-whitespaces'), - # escaping of special characters - (r'\\[0abt\tn\nvfre "\\N_LP]', String), - # escape codes - (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})', - String.Escape), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v\"\\]+', String), - # the closing quote - (r'"', String, '#pop'), - ], - - # the beginning of a new line while scanning a plain scalar - 'plain-scalar-in-block-context-new-line': [ - # empty lines - (r'^[ ]+$', Text), - # line breaks - (r'\n+', Text), - # document start and document end indicators - (r'^(?=---|\.\.\.)', something(Name.Namespace), '#pop:3'), - # indentation spaces (we may leave the block line state here) - (r'^[ ]*', parse_plain_scalar_indent(Text), '#pop'), - ], - - # a plain scalar in the block context - 'plain-scalar-in-block-context': [ - # the scalar ends with the ':' indicator - (r'[ ]*(?=:[ ]|:$)', something(Text), '#pop'), - # the scalar ends with whitespaces followed by a comment - (r'[ ]+(?=#)', Text, '#pop'), - # trailing whitespaces are ignored - (r'[ ]+$', Text), - # line breaks are ignored - (r'\n+', Text, 'plain-scalar-in-block-context-new-line'), - # other whitespaces are a part of the value - (r'[ ]+', Literal.Scalar.Plain), - # regular non-whitespace characters - (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', Literal.Scalar.Plain), - ], - - # a plain scalar is the flow context - 'plain-scalar-in-flow-context': [ - # the scalar ends with an indicator character - (r'[ ]*(?=[,:?\[\]{}])', something(Text), '#pop'), - # the scalar ends with a comment - (r'[ ]+(?=#)', Text, '#pop'), - # leading and trailing whitespaces are ignored - (r'^[ ]+', Text), - (r'[ ]+$', Text), - # line breaks are ignored - (r'\n+', Text), - # other whitespaces are a part of the value - (r'[ ]+', Name.Variable), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v,:?\[\]{}]+', Name.Variable), - ], - - } - - def get_tokens_unprocessed(self, text=None, context=None): - if context is None: - context = YamlLexerContext(text, 0) - return super(YamlLexer, self).get_tokens_unprocessed(text, context) - - -class LighttpdConfLexer(RegexLexer): - """ - Lexer for `Lighttpd `_ configuration files. - - .. versionadded:: 0.11 - """ - name = 'Lighttpd configuration file' - aliases = ['lighty', 'lighttpd'] - filenames = [] - mimetypes = ['text/x-lighttpd-conf'] - - tokens = { - 'root': [ - (r'#.*\n', Comment.Single), - (r'/\S*', Name), # pathname - (r'[a-zA-Z._-]+', Keyword), - (r'\d+\.\d+\.\d+\.\d+(?:/\d+)?', Number), - (r'[0-9]+', Number), - (r'=>|=~|\+=|==|=|\+', Operator), - (r'\$[A-Z]+', Name.Builtin), - (r'[(){}\[\],]', Punctuation), - (r'"([^"\\]*(?:\\.[^"\\]*)*)"', String.Double), - (r'\s+', Text), - ], - - } - - -class NginxConfLexer(RegexLexer): - """ - Lexer for `Nginx `_ configuration files. - - .. versionadded:: 0.11 - """ - name = 'Nginx configuration file' - aliases = ['nginx'] - filenames = [] - mimetypes = ['text/x-nginx-conf'] - - tokens = { - 'root': [ - (r'(include)(\s+)([^\s;]+)', bygroups(Keyword, Text, Name)), - (r'[^\s;#]+', Keyword, 'stmt'), - include('base'), - ], - 'block': [ - (r'}', Punctuation, '#pop:2'), - (r'[^\s;#]+', Keyword.Namespace, 'stmt'), - include('base'), - ], - 'stmt': [ - (r'{', Punctuation, 'block'), - (r';', Punctuation, '#pop'), - include('base'), - ], - 'base': [ - (r'#.*\n', Comment.Single), - (r'on|off', Name.Constant), - (r'\$[^\s;#()]+', Name.Variable), - (r'([a-z0-9.-]+)(:)([0-9]+)', - bygroups(Name, Punctuation, Number.Integer)), - (r'[a-z-]+/[a-z-+]+', String), # mimetype - #(r'[a-zA-Z._-]+', Keyword), - (r'[0-9]+[km]?\b', Number.Integer), - (r'(~)(\s*)([^\s{]+)', bygroups(Punctuation, Text, String.Regex)), - (r'[:=~]', Punctuation), - (r'[^\s;#{}$]+', String), # catch all - (r'/[^\s;#]*', Name), # pathname - (r'\s+', Text), - (r'[$;]', Text), # leftover characters - ], - } - - -class CMakeLexer(RegexLexer): - """ - Lexer for `CMake `_ files. - - .. versionadded:: 1.2 - """ - name = 'CMake' - aliases = ['cmake'] - filenames = ['*.cmake', 'CMakeLists.txt'] - mimetypes = ['text/x-cmake'] - - tokens = { - 'root': [ - #(r'(ADD_CUSTOM_COMMAND|ADD_CUSTOM_TARGET|ADD_DEFINITIONS|' - # r'ADD_DEPENDENCIES|ADD_EXECUTABLE|ADD_LIBRARY|ADD_SUBDIRECTORY|' - # r'ADD_TEST|AUX_SOURCE_DIRECTORY|BUILD_COMMAND|BUILD_NAME|' - # r'CMAKE_MINIMUM_REQUIRED|CONFIGURE_FILE|CREATE_TEST_SOURCELIST|' - # r'ELSE|ELSEIF|ENABLE_LANGUAGE|ENABLE_TESTING|ENDFOREACH|' - # r'ENDFUNCTION|ENDIF|ENDMACRO|ENDWHILE|EXEC_PROGRAM|' - # r'EXECUTE_PROCESS|EXPORT_LIBRARY_DEPENDENCIES|FILE|FIND_FILE|' - # r'FIND_LIBRARY|FIND_PACKAGE|FIND_PATH|FIND_PROGRAM|FLTK_WRAP_UI|' - # r'FOREACH|FUNCTION|GET_CMAKE_PROPERTY|GET_DIRECTORY_PROPERTY|' - # r'GET_FILENAME_COMPONENT|GET_SOURCE_FILE_PROPERTY|' - # r'GET_TARGET_PROPERTY|GET_TEST_PROPERTY|IF|INCLUDE|' - # r'INCLUDE_DIRECTORIES|INCLUDE_EXTERNAL_MSPROJECT|' - # r'INCLUDE_REGULAR_EXPRESSION|INSTALL|INSTALL_FILES|' - # r'INSTALL_PROGRAMS|INSTALL_TARGETS|LINK_DIRECTORIES|' - # r'LINK_LIBRARIES|LIST|LOAD_CACHE|LOAD_COMMAND|MACRO|' - # r'MAKE_DIRECTORY|MARK_AS_ADVANCED|MATH|MESSAGE|OPTION|' - # r'OUTPUT_REQUIRED_FILES|PROJECT|QT_WRAP_CPP|QT_WRAP_UI|REMOVE|' - # r'REMOVE_DEFINITIONS|SEPARATE_ARGUMENTS|SET|' - # r'SET_DIRECTORY_PROPERTIES|SET_SOURCE_FILES_PROPERTIES|' - # r'SET_TARGET_PROPERTIES|SET_TESTS_PROPERTIES|SITE_NAME|' - # r'SOURCE_GROUP|STRING|SUBDIR_DEPENDS|SUBDIRS|' - # r'TARGET_LINK_LIBRARIES|TRY_COMPILE|TRY_RUN|UNSET|' - # r'USE_MANGLED_MESA|UTILITY_SOURCE|VARIABLE_REQUIRES|' - # r'VTK_MAKE_INSTANTIATOR|VTK_WRAP_JAVA|VTK_WRAP_PYTHON|' - # r'VTK_WRAP_TCL|WHILE|WRITE_FILE|' - # r'COUNTARGS)\b', Name.Builtin, 'args'), - (r'\b(\w+)([ \t]*)(\()', bygroups(Name.Builtin, Text, - Punctuation), 'args'), - include('keywords'), - include('ws') - ], - 'args': [ - (r'\(', Punctuation, '#push'), - (r'\)', Punctuation, '#pop'), - (r'(\${)(.+?)(})', bygroups(Operator, Name.Variable, Operator)), - (r'(\$<)(.+?)(>)', bygroups(Operator, Name.Variable, Operator)), - (r'(?s)".*?"', String.Double), - (r'\\\S+', String), - (r'[^\)$"# \t\n]+', String), - (r'\n', Text), # explicitly legal - include('keywords'), - include('ws') - ], - 'string': [ - - ], - 'keywords': [ - (r'\b(WIN32|UNIX|APPLE|CYGWIN|BORLAND|MINGW|MSVC|MSVC_IDE|MSVC60|' - r'MSVC70|MSVC71|MSVC80|MSVC90)\b', Keyword), - ], - 'ws': [ - (r'[ \t]+', Text), - (r'#.*\n', Comment), - ] - } - - def analyse_text(text): - exp = r'^ *CMAKE_MINIMUM_REQUIRED *\( *VERSION *\d(\.\d)* *( FATAL_ERROR)? *\) *$' - if re.search(exp, text, flags=re.MULTILINE | re.IGNORECASE): - return 0.8 - return 0.0 - - -class HttpLexer(RegexLexer): - """ - Lexer for HTTP sessions. - - .. versionadded:: 1.5 - """ - - name = 'HTTP' - aliases = ['http'] - - flags = re.DOTALL - - def header_callback(self, match): - if match.group(1).lower() == 'content-type': - content_type = match.group(5).strip() - if ';' in content_type: - content_type = content_type[:content_type.find(';')].strip() - self.content_type = content_type - yield match.start(1), Name.Attribute, match.group(1) - yield match.start(2), Text, match.group(2) - yield match.start(3), Operator, match.group(3) - yield match.start(4), Text, match.group(4) - yield match.start(5), Literal, match.group(5) - yield match.start(6), Text, match.group(6) - - def continuous_header_callback(self, match): - yield match.start(1), Text, match.group(1) - yield match.start(2), Literal, match.group(2) - yield match.start(3), Text, match.group(3) - - def content_callback(self, match): - content_type = getattr(self, 'content_type', None) - content = match.group() - offset = match.start() - if content_type: - from pygments.lexers import get_lexer_for_mimetype - try: - lexer = get_lexer_for_mimetype(content_type) - except ClassNotFound: - pass - else: - for idx, token, value in lexer.get_tokens_unprocessed(content): - yield offset + idx, token, value - return - yield offset, Text, content - - tokens = { - 'root': [ - (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)' - r'(HTTP)(/)(1\.[01])(\r?\n|$)', - bygroups(Name.Function, Text, Name.Namespace, Text, - Keyword.Reserved, Operator, Number, Text), - 'headers'), - (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', - bygroups(Keyword.Reserved, Operator, Number, Text, Number, - Text, Name.Exception, Text), - 'headers'), - ], - 'headers': [ - (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback), - (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback), - (r'\r?\n', Text, 'content') - ], - 'content': [ - (r'.+', content_callback) - ] - } - - -class PyPyLogLexer(RegexLexer): - """ - Lexer for PyPy log files. - - .. versionadded:: 1.5 - """ - name = "PyPy Log" - aliases = ["pypylog", "pypy"] - filenames = ["*.pypylog"] - mimetypes = ['application/x-pypylog'] - - tokens = { - "root": [ - (r"\[\w+\] {jit-log-.*?$", Keyword, "jit-log"), - (r"\[\w+\] {jit-backend-counts$", Keyword, "jit-backend-counts"), - include("extra-stuff"), - ], - "jit-log": [ - (r"\[\w+\] jit-log-.*?}$", Keyword, "#pop"), - (r"^\+\d+: ", Comment), - (r"--end of the loop--", Comment), - (r"[ifp]\d+", Name), - (r"ptr\d+", Name), - (r"(\()(\w+(?:\.\w+)?)(\))", - bygroups(Punctuation, Name.Builtin, Punctuation)), - (r"[\[\]=,()]", Punctuation), - (r"(\d+\.\d+|inf|-inf)", Number.Float), - (r"-?\d+", Number.Integer), - (r"'.*'", String), - (r"(None|descr|ConstClass|ConstPtr|TargetToken)", Name), - (r"<.*?>+", Name.Builtin), - (r"(label|debug_merge_point|jump|finish)", Name.Class), - (r"(int_add_ovf|int_add|int_sub_ovf|int_sub|int_mul_ovf|int_mul|" - r"int_floordiv|int_mod|int_lshift|int_rshift|int_and|int_or|" - r"int_xor|int_eq|int_ne|int_ge|int_gt|int_le|int_lt|int_is_zero|" - r"int_is_true|" - r"uint_floordiv|uint_ge|uint_lt|" - r"float_add|float_sub|float_mul|float_truediv|float_neg|" - r"float_eq|float_ne|float_ge|float_gt|float_le|float_lt|float_abs|" - r"ptr_eq|ptr_ne|instance_ptr_eq|instance_ptr_ne|" - r"cast_int_to_float|cast_float_to_int|" - r"force_token|quasiimmut_field|same_as|virtual_ref_finish|" - r"virtual_ref|mark_opaque_ptr|" - r"call_may_force|call_assembler|call_loopinvariant|" - r"call_release_gil|call_pure|call|" - r"new_with_vtable|new_array|newstr|newunicode|new|" - r"arraylen_gc|" - r"getarrayitem_gc_pure|getarrayitem_gc|setarrayitem_gc|" - r"getarrayitem_raw|setarrayitem_raw|getfield_gc_pure|" - r"getfield_gc|getinteriorfield_gc|setinteriorfield_gc|" - r"getfield_raw|setfield_gc|setfield_raw|" - r"strgetitem|strsetitem|strlen|copystrcontent|" - r"unicodegetitem|unicodesetitem|unicodelen|" - r"guard_true|guard_false|guard_value|guard_isnull|" - r"guard_nonnull_class|guard_nonnull|guard_class|guard_no_overflow|" - r"guard_not_forced|guard_no_exception|guard_not_invalidated)", - Name.Builtin), - include("extra-stuff"), - ], - "jit-backend-counts": [ - (r"\[\w+\] jit-backend-counts}$", Keyword, "#pop"), - (r":", Punctuation), - (r"\d+", Number), - include("extra-stuff"), - ], - "extra-stuff": [ - (r"\s+", Text), - (r"#.*?$", Comment), - ], - } - - -class HxmlLexer(RegexLexer): - """ - Lexer for `haXe build `_ files. - - .. versionadded:: 1.6 - """ - name = 'Hxml' - aliases = ['haxeml', 'hxml'] - filenames = ['*.hxml'] - - tokens = { - 'root': [ - # Seperator - (r'(--)(next)', bygroups(Punctuation, Generic.Heading)), - # Compiler switches with one dash - (r'(-)(prompt|debug|v)', bygroups(Punctuation, Keyword.Keyword)), - # Compilerswitches with two dashes - (r'(--)(neko-source|flash-strict|flash-use-stage|no-opt|no-traces|' - r'no-inline|times|no-output)', bygroups(Punctuation, Keyword)), - # Targets and other options that take an argument - (r'(-)(cpp|js|neko|x|as3|swf9?|swf-lib|php|xml|main|lib|D|resource|' - r'cp|cmd)( +)(.+)', - bygroups(Punctuation, Keyword, Whitespace, String)), - # Options that take only numerical arguments - (r'(-)(swf-version)( +)(\d+)', - bygroups(Punctuation, Keyword, Number.Integer)), - # An Option that defines the size, the fps and the background - # color of an flash movie - (r'(-)(swf-header)( +)(\d+)(:)(\d+)(:)(\d+)(:)([A-Fa-f0-9]{6})', - bygroups(Punctuation, Keyword, Whitespace, Number.Integer, - Punctuation, Number.Integer, Punctuation, Number.Integer, - Punctuation, Number.Hex)), - # options with two dashes that takes arguments - (r'(--)(js-namespace|php-front|php-lib|remap|gen-hx-classes)( +)' - r'(.+)', bygroups(Punctuation, Keyword, Whitespace, String)), - # Single line comment, multiline ones are not allowed. - (r'#.*', Comment.Single) - ] - } - - -class EbnfLexer(RegexLexer): - """ - Lexer for `ISO/IEC 14977 EBNF - `_ - grammars. - - .. versionadded:: 2.0 - """ - - name = 'EBNF' - aliases = ['ebnf'] - filenames = ['*.ebnf'] - mimetypes = ['text/x-ebnf'] - - tokens = { - 'root': [ - include('whitespace'), - include('comment_start'), - include('identifier'), - (r'=', Operator, 'production'), - ], - 'production': [ - include('whitespace'), - include('comment_start'), - include('identifier'), - (r'"[^"]*"', String.Double), - (r"'[^']*'", String.Single), - (r'(\?[^?]*\?)', Name.Entity), - (r'[\[\]{}(),|]', Punctuation), - (r'-', Operator), - (r';', Punctuation, '#pop'), - ], - 'whitespace': [ - (r'\s+', Text), - ], - 'comment_start': [ - (r'\(\*', Comment.Multiline, 'comment'), - ], - 'comment': [ - (r'[^*)]', Comment.Multiline), - include('comment_start'), - (r'\*\)', Comment.Multiline, '#pop'), - (r'[*)]', Comment.Multiline), - ], - 'identifier': [ - (r'([a-zA-Z][a-zA-Z0-9 \-]*)', Keyword), - ], - } - -class TodotxtLexer(RegexLexer): - """ - Lexer for `Todo.txt `_ todo list format. - - .. versionadded:: 2.0 - """ - - name = 'Todotxt' - aliases = ['todotxt'] - # *.todotxt is not a standard extension for Todo.txt files; including it - # makes testing easier, and also makes autodetecting file type easier. - filenames = ['todo.txt', '*.todotxt'] - mimetypes = ['text/x-todo'] - - ## Aliases mapping standard token types of Todo.txt format concepts - CompleteTaskText = Operator # Chosen to de-emphasize complete tasks - IncompleteTaskText = Text # Incomplete tasks should look like plain text - - # Priority should have most emphasis to indicate importance of tasks - Priority = Generic.Heading - # Dates should have next most emphasis because time is important - Date = Generic.Subheading - - # Project and context should have equal weight, and be in different colors - Project = Generic.Error - Context = String - - # If tag functionality is added, it should have the same weight as Project - # and Context, and a different color. Generic.Traceback would work well. - - # Regex patterns for building up rules; dates, priorities, projects, and - # contexts are all atomic - # TODO: Make date regex more ISO 8601 compliant - date_regex = r'\d{4,}-\d{2}-\d{2}' - priority_regex = r'\([A-Z]\)' - project_regex = r'\+\S+' - context_regex = r'@\S+' - - # Compound regex expressions - complete_one_date_regex = r'(x )(' + date_regex + r')' - complete_two_date_regex = (complete_one_date_regex + r'( )(' + - date_regex + r')') - priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')' - - tokens = { - # Should parse starting at beginning of line; each line is a task - 'root': [ - ## Complete task entry points: two total: - # 1. Complete task with two dates - (complete_two_date_regex, bygroups(CompleteTaskText, Date, - CompleteTaskText, Date), - 'complete'), - # 2. Complete task with one date - (complete_one_date_regex, bygroups(CompleteTaskText, Date), - 'complete'), - - ## Incomplete task entry points: six total: - # 1. Priority plus date - (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date), - 'incomplete'), - # 2. Priority only - (priority_regex, Priority, 'incomplete'), - # 3. Leading date - (date_regex, Date, 'incomplete'), - # 4. Leading context - (context_regex, Context, 'incomplete'), - # 5. Leading project - (project_regex, Project, 'incomplete'), - # 6. Non-whitespace catch-all - ('\S+', IncompleteTaskText, 'incomplete'), - ], - - # Parse a complete task - 'complete': [ - # Newline indicates end of task, should return to root - (r'\s*\n', CompleteTaskText, '#pop'), - # Tokenize contexts and projects - (context_regex, Context), - (project_regex, Project), - # Tokenize non-whitespace text - ('\S+', CompleteTaskText), - # Tokenize whitespace not containing a newline - ('\s+', CompleteTaskText), - ], - - # Parse an incomplete task - 'incomplete': [ - # Newline indicates end of task, should return to root - (r'\s*\n', IncompleteTaskText, '#pop'), - # Tokenize contexts and projects - (context_regex, Context), - (project_regex, Project), - # Tokenize non-whitespace text - ('\S+', IncompleteTaskText), - # Tokenize whitespace not containing a newline - ('\s+', IncompleteTaskText), - ], - } - - -class DockerLexer(RegexLexer): - """ - Lexer for `Docker `_ configuration files. - - .. versionadded:: 2.0 - """ - name = 'Docker' - aliases = ['docker', 'dockerfile'] - filenames = ['Dockerfile', '*.docker'] - mimetypes = ['text/x-dockerfile-config'] - - _keywords = (r'(?:FROM|MAINTAINER|RUN|CMD|EXPOSE|ENV|ADD|ENTRYPOINT|' - r'VOLUME|WORKDIR)') - - flags = re.IGNORECASE | re.MULTILINE - - tokens = { - 'root': [ - (r'^(ONBUILD)(\s+)(%s)\b' % (_keywords,), - bygroups(Name.Keyword, Whitespace, Keyword)), - (_keywords + r'\b', Keyword), - (r'#.*', Comment), - (r'.+', using(BashLexer)), - ], - } +from pygments.lexers.configs import ApacheConfLexer, NginxConfLexer, \ + SquidConfLexer, LighttpdConfLexer, IniLexer, RegeditLexer, PropertiesLexer +from pygments.lexers.console import PyPyLogLexer +from pygments.lexers.textedit import VimLexer +from pygments.lexers.markup import BBCodeLexer, MoinWikiLexer, RstLexer, \ + TexLexer, GroffLexer +from pygments.lexers.installers import DebianControlLexer, SourcesListLexer +from pygments.lexers.misc.make import MakefileLexer, BaseMakefileLexer, \ + CMakeLexer +from pygments.lexers.dsls import HxmlLexer +from pygments.lexers.diff import DiffLexer, DarcsPatchLexer +from pygments.lexers.data import YamlLexer +from pygments.lexers.textfmts import IrcLogsLexer, GettextLexer, HttpLexer + +__all__ = [] diff --git a/pygments/lexers/textedit.py b/pygments/lexers/textedit.py index 66255fae..1f6d3fee 100644 --- a/pygments/lexers/textedit.py +++ b/pygments/lexers/textedit.py @@ -9,11 +9,16 @@ :license: BSD, see LICENSE for details. """ -from pygments.lexer import RegexLexer, include, default +import re +from bisect import bisect + +from pygments.lexer import RegexLexer, include, default, bygroups, using, this from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation -__all__ = ['AwkLexer'] +from pygments.lexers.python import PythonLexer + +__all__ = ['AwkLexer', 'VimLexer'] class AwkLexer(RegexLexer): @@ -68,3 +73,97 @@ class AwkLexer(RegexLexer): (r"'(\\\\|\\'|[^'])*'", String.Single), ] } + + +class VimLexer(RegexLexer): + """ + Lexer for VimL script files. + + .. versionadded:: 0.8 + """ + name = 'VimL' + aliases = ['vim'] + filenames = ['*.vim', '.vimrc', '.exrc', '.gvimrc', + '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'] + mimetypes = ['text/x-vim'] + flags = re.MULTILINE + + _python = r'py(?:t(?:h(?:o(?:n)?)?)?)?' + + tokens = { + 'root': [ + (r'^([ \t:]*)(' + _python + r')([ \t]*)(<<)([ \t]*)(.*)((?:\n|.)*)(\6)', + bygroups(using(this), Keyword, Text, Operator, Text, Text, + using(PythonLexer), Text)), + (r'^([ \t:]*)(' + _python + r')([ \t])(.*)', + bygroups(using(this), Keyword, Text, using(PythonLexer))), + + (r'^\s*".*', Comment), + + (r'[ \t]+', Text), + # TODO: regexes can have other delims + (r'/(\\\\|\\/|[^\n/])*/', String.Regex), + (r'"(\\\\|\\"|[^\n"])*"', String.Double), + (r"'(''|[^\n'])*'", String.Single), + + # Who decided that doublequote was a good comment character?? + (r'(?<=\s)"[^\-:.%#=*].*', Comment), + (r'-?\d+', Number), + (r'#[0-9a-f]{6}', Number.Hex), + (r'^:', Punctuation), + (r'[()<>+=!|,~-]', Punctuation), # Inexact list. Looks decent. + (r'\b(let|if|else|endif|elseif|fun|function|endfunction)\b', + Keyword), + (r'\b(NONE|bold|italic|underline|dark|light)\b', Name.Builtin), + (r'\b\w+\b', Name.Other), # These are postprocessed below + (r'.', Text), + ], + } + + def __init__(self, **options): + from pygments.lexers._vimbuiltins import command, option, auto + self._cmd = command + self._opt = option + self._aut = auto + + RegexLexer.__init__(self, **options) + + def is_in(self, w, mapping): + r""" + It's kind of difficult to decide if something might be a keyword + in VimL because it allows you to abbreviate them. In fact, + 'ab[breviate]' is a good example. :ab, :abbre, or :abbreviate are + valid ways to call it so rather than making really awful regexps + like:: + + \bab(?:b(?:r(?:e(?:v(?:i(?:a(?:t(?:e)?)?)?)?)?)?)?)?\b + + we match `\b\w+\b` and then call is_in() on those tokens. See + `scripts/get_vimkw.py` for how the lists are extracted. + """ + p = bisect(mapping, (w,)) + if p > 0: + if mapping[p-1][0] == w[:len(mapping[p-1][0])] and \ + mapping[p-1][1][:len(w)] == w: + return True + if p < len(mapping): + return mapping[p][0] == w[:len(mapping[p][0])] and \ + mapping[p][1][:len(w)] == w + return False + + def get_tokens_unprocessed(self, text): + # TODO: builtins are only subsequent tokens on lines + # and 'keywords' only happen at the beginning except + # for :au ones + for index, token, value in \ + RegexLexer.get_tokens_unprocessed(self, text): + if token is Name.Other: + if self.is_in(value, self._cmd): + yield index, Keyword, value + elif self.is_in(value, self._opt) or \ + self.is_in(value, self._aut): + yield index, Name.Builtin, value + else: + yield index, Text, value + else: + yield index, token, value diff --git a/pygments/lexers/textfmts.py b/pygments/lexers/textfmts.py new file mode 100644 index 00000000..63e67182 --- /dev/null +++ b/pygments/lexers/textfmts.py @@ -0,0 +1,277 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.textfmts + ~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for various text formats. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Generic, Literal +from pygments.util import ClassNotFound + +__all__ = ['IrcLogsLexer', 'TodotxtLexer', 'HttpLexer', 'GettextLexer'] + + +class IrcLogsLexer(RegexLexer): + """ + Lexer for IRC logs in *irssi*, *xchat* or *weechat* style. + """ + + name = 'IRC logs' + aliases = ['irc'] + filenames = ['*.weechatlog'] + mimetypes = ['text/x-irclog'] + + flags = re.VERBOSE | re.MULTILINE + timestamp = r""" + ( + # irssi / xchat and others + (?: \[|\()? # Opening bracket or paren for the timestamp + (?: # Timestamp + (?: (?:\d{1,4} [-/]?)+ # Date as - or /-separated groups of digits + [T ])? # Date/time separator: T or space + (?: \d?\d [:.]?)+ # Time as :/.-separated groups of 1 or 2 digits + ) + (?: \]|\))?\s+ # Closing bracket or paren for the timestamp + | + # weechat + \d{4}\s\w{3}\s\d{2}\s # Date + \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace + | + # xchat + \w{3}\s\d{2}\s # Date + \d{2}:\d{2}:\d{2}\s+ # Time + Whitespace + )? + """ + tokens = { + 'root': [ + # log start/end + (r'^\*\*\*\*(.*)\*\*\*\*$', Comment), + # hack + ("^" + timestamp + r'(\s*<[^>]*>\s*)$', bygroups(Comment.Preproc, Name.Tag)), + # normal msgs + ("^" + timestamp + r""" + (\s*<.*?>\s*) # Nick """, + bygroups(Comment.Preproc, Name.Tag), 'msg'), + # /me msgs + ("^" + timestamp + r""" + (\s*[*]\s+) # Star + (\S+\s+.*?\n) # Nick + rest of message """, + bygroups(Comment.Preproc, Keyword, Generic.Inserted)), + # join/part msgs + ("^" + timestamp + r""" + (\s*(?:\*{3}|?)\s*) # Star(s) or symbols + (\S+\s+) # Nick + Space + (.*?\n) # Rest of message """, + bygroups(Comment.Preproc, Keyword, String, Comment)), + (r"^.*?\n", Text), + ], + 'msg': [ + (r"\S+:(?!//)", Name.Attribute), # Prefix + (r".*\n", Text, '#pop'), + ], + } + + +class GettextLexer(RegexLexer): + """ + Lexer for Gettext catalog files. + + .. versionadded:: 0.9 + """ + name = 'Gettext Catalog' + aliases = ['pot', 'po'] + filenames = ['*.pot', '*.po'] + mimetypes = ['application/x-gettext', 'text/x-gettext', 'text/gettext'] + + tokens = { + 'root': [ + (r'^#,\s.*?$', Keyword.Type), + (r'^#:\s.*?$', Keyword.Declaration), + # (r'^#$', Comment), + (r'^(#|#\.\s|#\|\s|#~\s|#\s).*$', Comment.Single), + (r'^(")([A-Za-z-]+:)(.*")$', + bygroups(String, Name.Property, String)), + (r'^".*"$', String), + (r'^(msgid|msgid_plural|msgstr)(\s+)(".*")$', + bygroups(Name.Variable, Text, String)), + (r'^(msgstr\[)(\d)(\])(\s+)(".*")$', + bygroups(Name.Variable, Number.Integer, Name.Variable, Text, String)), + ] + } + + +class HttpLexer(RegexLexer): + """ + Lexer for HTTP sessions. + + .. versionadded:: 1.5 + """ + + name = 'HTTP' + aliases = ['http'] + + flags = re.DOTALL + + def header_callback(self, match): + if match.group(1).lower() == 'content-type': + content_type = match.group(5).strip() + if ';' in content_type: + content_type = content_type[:content_type.find(';')].strip() + self.content_type = content_type + yield match.start(1), Name.Attribute, match.group(1) + yield match.start(2), Text, match.group(2) + yield match.start(3), Operator, match.group(3) + yield match.start(4), Text, match.group(4) + yield match.start(5), Literal, match.group(5) + yield match.start(6), Text, match.group(6) + + def continuous_header_callback(self, match): + yield match.start(1), Text, match.group(1) + yield match.start(2), Literal, match.group(2) + yield match.start(3), Text, match.group(3) + + def content_callback(self, match): + content_type = getattr(self, 'content_type', None) + content = match.group() + offset = match.start() + if content_type: + from pygments.lexers import get_lexer_for_mimetype + try: + lexer = get_lexer_for_mimetype(content_type) + except ClassNotFound: + pass + else: + for idx, token, value in lexer.get_tokens_unprocessed(content): + yield offset + idx, token, value + return + yield offset, Text, content + + tokens = { + 'root': [ + (r'(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH)( +)([^ ]+)( +)' + r'(HTTP)(/)(1\.[01])(\r?\n|$)', + bygroups(Name.Function, Text, Name.Namespace, Text, + Keyword.Reserved, Operator, Number, Text), + 'headers'), + (r'(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|$)', + bygroups(Keyword.Reserved, Operator, Number, Text, Number, + Text, Name.Exception, Text), + 'headers'), + ], + 'headers': [ + (r'([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|$)', header_callback), + (r'([\t ]+)([^\r\n]+)(\r?\n|$)', continuous_header_callback), + (r'\r?\n', Text, 'content') + ], + 'content': [ + (r'.+', content_callback) + ] + } + + +class TodotxtLexer(RegexLexer): + """ + Lexer for `Todo.txt `_ todo list format. + + .. versionadded:: 2.0 + """ + + name = 'Todotxt' + aliases = ['todotxt'] + # *.todotxt is not a standard extension for Todo.txt files; including it + # makes testing easier, and also makes autodetecting file type easier. + filenames = ['todo.txt', '*.todotxt'] + mimetypes = ['text/x-todo'] + + # Aliases mapping standard token types of Todo.txt format concepts + CompleteTaskText = Operator # Chosen to de-emphasize complete tasks + IncompleteTaskText = Text # Incomplete tasks should look like plain text + + # Priority should have most emphasis to indicate importance of tasks + Priority = Generic.Heading + # Dates should have next most emphasis because time is important + Date = Generic.Subheading + + # Project and context should have equal weight, and be in different colors + Project = Generic.Error + Context = String + + # If tag functionality is added, it should have the same weight as Project + # and Context, and a different color. Generic.Traceback would work well. + + # Regex patterns for building up rules; dates, priorities, projects, and + # contexts are all atomic + # TODO: Make date regex more ISO 8601 compliant + date_regex = r'\d{4,}-\d{2}-\d{2}' + priority_regex = r'\([A-Z]\)' + project_regex = r'\+\S+' + context_regex = r'@\S+' + + # Compound regex expressions + complete_one_date_regex = r'(x )(' + date_regex + r')' + complete_two_date_regex = (complete_one_date_regex + r'( )(' + + date_regex + r')') + priority_date_regex = r'(' + priority_regex + r')( )(' + date_regex + r')' + + tokens = { + # Should parse starting at beginning of line; each line is a task + 'root': [ + # Complete task entry points: two total: + # 1. Complete task with two dates + (complete_two_date_regex, bygroups(CompleteTaskText, Date, + CompleteTaskText, Date), + 'complete'), + # 2. Complete task with one date + (complete_one_date_regex, bygroups(CompleteTaskText, Date), + 'complete'), + + # Incomplete task entry points: six total: + # 1. Priority plus date + (priority_date_regex, bygroups(Priority, IncompleteTaskText, Date), + 'incomplete'), + # 2. Priority only + (priority_regex, Priority, 'incomplete'), + # 3. Leading date + (date_regex, Date, 'incomplete'), + # 4. Leading context + (context_regex, Context, 'incomplete'), + # 5. Leading project + (project_regex, Project, 'incomplete'), + # 6. Non-whitespace catch-all + ('\S+', IncompleteTaskText, 'incomplete'), + ], + + # Parse a complete task + 'complete': [ + # Newline indicates end of task, should return to root + (r'\s*\n', CompleteTaskText, '#pop'), + # Tokenize contexts and projects + (context_regex, Context), + (project_regex, Project), + # Tokenize non-whitespace text + ('\S+', CompleteTaskText), + # Tokenize whitespace not containing a newline + ('\s+', CompleteTaskText), + ], + + # Parse an incomplete task + 'incomplete': [ + # Newline indicates end of task, should return to root + (r'\s*\n', IncompleteTaskText, '#pop'), + # Tokenize contexts and projects + (context_regex, Context), + (project_regex, Project), + # Tokenize non-whitespace text + ('\S+', IncompleteTaskText), + # Tokenize whitespace not containing a newline + ('\s+', IncompleteTaskText), + ], + } diff --git a/pygments/lexers/web.py b/pygments/lexers/web.py index 0188508f..b28d3df7 100644 --- a/pygments/lexers/web.py +++ b/pygments/lexers/web.py @@ -18,8 +18,8 @@ from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Other, Punctuation, Literal from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \ html_doctype_matches, unirange, iteritems -from pygments.lexers.agile import RubyLexer -from pygments.lexers.compiled import ScalaLexer +from pygments.lexers.ruby import RubyLexer +from pygments.lexers.jvm import ScalaLexer __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer', diff --git a/tests/test_perllexer.py b/tests/test_perllexer.py index bfa3aeb8..e37539f2 100644 --- a/tests/test_perllexer.py +++ b/tests/test_perllexer.py @@ -11,7 +11,7 @@ import time import unittest from pygments.token import String -from pygments.lexers.agile import PerlLexer +from pygments.lexers.perl import PerlLexer class RunawayRegexTest(unittest.TestCase): -- cgit v1.2.1 From fc8d1304d47bf289b28c9aade4f7b222ede5205c Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 23:49:41 +0200 Subject: Fixup and word()ify math --- pygments/lexers/math.py | 1983 ++++++++++++++++++++++++----------------------- 1 file changed, 999 insertions(+), 984 deletions(-) diff --git a/pygments/lexers/math.py b/pygments/lexers/math.py index 27e4ad3a..01ffc84d 100644 --- a/pygments/lexers/math.py +++ b/pygments/lexers/math.py @@ -15,7 +15,7 @@ import re from pygments.util import shebang_matches from pygments.lexer import Lexer, RegexLexer, bygroups, include, \ - combined, do_insertions + combined, do_insertions, words from pygments.token import Comment, String, Punctuation, Keyword, Name, \ Operator, Number, Text, Generic @@ -36,16 +36,16 @@ class JuliaLexer(RegexLexer): .. versionadded:: 1.6 """ name = 'Julia' - aliases = ['julia','jl'] + aliases = ['julia', 'jl'] filenames = ['*.jl'] - mimetypes = ['text/x-julia','application/x-julia'] + mimetypes = ['text/x-julia', 'application/x-julia'] builtins = [ - 'exit','whos','edit','load','is','isa','isequal','typeof','tuple', - 'ntuple','uid','hash','finalizer','convert','promote','subtype', - 'typemin','typemax','realmin','realmax','sizeof','eps','promote_type', - 'method_exists','applicable','invoke','dlopen','dlsym','system', - 'error','throw','assert','new','Inf','Nan','pi','im', + 'exit', 'whos', 'edit', 'load', 'is', 'isa', 'isequal', 'typeof', 'tuple', + 'ntuple', 'uid', 'hash', 'finalizer', 'convert', 'promote', 'subtype', + 'typemin', 'typemax', 'realmin', 'realmax', 'sizeof', 'eps', 'promote_type', + 'method_exists', 'applicable', 'invoke', 'dlopen', 'dlsym', 'system', + 'error', 'throw', 'assert', 'new', 'Inf', 'Nan', 'pi', 'im', ] tokens = { @@ -69,11 +69,11 @@ class JuliaLexer(RegexLexer): # functions (r'(function)((?:\s|\\\s)+)', - bygroups(Keyword,Name.Function), 'funcname'), + bygroups(Keyword, Name.Function), 'funcname'), # types (r'(type|typealias|abstract)((?:\s|\\\s)+)', - bygroups(Keyword,Name.Class), 'typename'), + bygroups(Keyword, Name.Class), 'typename'), # operators (r'==|!=|<=|>=|->|&&|\|\||::|<:|[-~+/*%=<>&^|.?!$]', Operator), @@ -132,7 +132,7 @@ class JuliaLexer(RegexLexer): 'string': [ (r'"', String, '#pop'), - (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings + (r'\\\\|\\"|\\\n', String.Escape), # included here for raw strings (r'\$(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?', String.Interpol), (r'[^\\"$]+', String), @@ -149,6 +149,7 @@ class JuliaLexer(RegexLexer): line_re = re.compile('.*?\n') + class JuliaConsoleLexer(Lexer): """ For Julia console sessions. Modeled after MatlabSessionLexer. @@ -184,16 +185,16 @@ class JuliaConsoleLexer(Lexer): else: if curcode: for item in do_insertions( - insertions, jllexer.get_tokens_unprocessed(curcode)): + insertions, jllexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] yield match.start(), Generic.Output, line - if curcode: # or item: + if curcode: # or item: for item in do_insertions( - insertions, jllexer.get_tokens_unprocessed(curcode)): + insertions, jllexer.get_tokens_unprocessed(curcode)): yield item @@ -209,62 +210,62 @@ class MuPADLexer(RegexLexer): filenames = ['*.mu'] tokens = { - 'root' : [ - (r'//.*?$', Comment.Single), - (r'/\*', Comment.Multiline, 'comment'), - (r'"(?:[^"\\]|\\.)*"', String), - (r'\(|\)|\[|\]|\{|\}', Punctuation), - (r'''(?x)\b(?: - next|break|end| - axiom|end_axiom|category|end_category|domain|end_domain|inherits| - if|%if|then|elif|else|end_if| - case|of|do|otherwise|end_case| - while|end_while| - repeat|until|end_repeat| - for|from|to|downto|step|end_for| - proc|local|option|save|begin|end_proc| - delete|frame - )\b''', Keyword), - (r'''(?x)\b(?: - DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR| - DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT| - DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC| - DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR - )\b''', Name.Class), - (r'''(?x)\b(?: - PI|EULER|E|CATALAN| - NIL|FAIL|undefined|infinity| - TRUE|FALSE|UNKNOWN - )\b''', - Name.Constant), - (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo), - (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator), - (r'''(?x)\b(?: - and|or|not|xor| - assuming| - div|mod| - union|minus|intersect|in|subset - )\b''', - Operator.Word), - (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number), - #(r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin), - (r'''(?x) - ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`) - (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''', - bygroups(Name.Function, Text, Punctuation)), - (r'''(?x) - (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`) - (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable), - (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), - (r'\.[0-9]+(?:e[0-9]+)?', Number), - (r'.', Text) - ], - 'comment' : [ - (r'[^*/]', Comment.Multiline), - (r'/\*', Comment.Multiline, '#push'), - (r'\*/', Comment.Multiline, '#pop'), - (r'[*/]', Comment.Multiline) - ] + 'root': [ + (r'//.*?$', Comment.Single), + (r'/\*', Comment.Multiline, 'comment'), + (r'"(?:[^"\\]|\\.)*"', String), + (r'\(|\)|\[|\]|\{|\}', Punctuation), + (r'''(?x)\b(?: + next|break|end| + axiom|end_axiom|category|end_category|domain|end_domain|inherits| + if|%if|then|elif|else|end_if| + case|of|do|otherwise|end_case| + while|end_while| + repeat|until|end_repeat| + for|from|to|downto|step|end_for| + proc|local|option|save|begin|end_proc| + delete|frame + )\b''', Keyword), + (r'''(?x)\b(?: + DOM_ARRAY|DOM_BOOL|DOM_COMPLEX|DOM_DOMAIN|DOM_EXEC|DOM_EXPR| + DOM_FAIL|DOM_FLOAT|DOM_FRAME|DOM_FUNC_ENV|DOM_HFARRAY|DOM_IDENT| + DOM_INT|DOM_INTERVAL|DOM_LIST|DOM_NIL|DOM_NULL|DOM_POLY|DOM_PROC| + DOM_PROC_ENV|DOM_RAT|DOM_SET|DOM_STRING|DOM_TABLE|DOM_VAR + )\b''', Name.Class), + (r'''(?x)\b(?: + PI|EULER|E|CATALAN| + NIL|FAIL|undefined|infinity| + TRUE|FALSE|UNKNOWN + )\b''', + Name.Constant), + (r'\b(?:dom|procname)\b', Name.Builtin.Pseudo), + (r'\.|,|:|;|=|\+|-|\*|/|\^|@|>|<|\$|\||!|\'|%|~=', Operator), + (r'''(?x)\b(?: + and|or|not|xor| + assuming| + div|mod| + union|minus|intersect|in|subset + )\b''', + Operator.Word), + (r'\b(?:I|RDN_INF|RD_NINF|RD_NAN)\b', Number), + # (r'\b(?:adt|linalg|newDomain|hold)\b', Name.Builtin), + (r'''(?x) + ((?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`) + (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*)(\s*)([(])''', + bygroups(Name.Function, Text, Punctuation)), + (r'''(?x) + (?:[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`) + (?:::[a-zA-Z_#][a-zA-Z_#0-9]*|`[^`]*`)*''', Name.Variable), + (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), + (r'\.[0-9]+(?:e[0-9]+)?', Number), + (r'.', Text) + ], + 'comment': [ + (r'[^*/]', Comment.Multiline), + (r'/\*', Comment.Multiline, '#push'), + (r'\*/', Comment.Multiline, '#pop'), + (r'[*/]', Comment.Multiline) + ] } @@ -295,30 +296,30 @@ class MatlabLexer(RegexLexer): # # taken from Matlab version 7.4.0.336 (R2007a) # - elfun = ["sin","sind","sinh","asin","asind","asinh","cos","cosd","cosh", - "acos","acosd","acosh","tan","tand","tanh","atan","atand","atan2", - "atanh","sec","secd","sech","asec","asecd","asech","csc","cscd", - "csch","acsc","acscd","acsch","cot","cotd","coth","acot","acotd", - "acoth","hypot","exp","expm1","log","log1p","log10","log2","pow2", - "realpow","reallog","realsqrt","sqrt","nthroot","nextpow2","abs", - "angle","complex","conj","imag","real","unwrap","isreal","cplxpair", - "fix","floor","ceil","round","mod","rem","sign"] - specfun = ["airy","besselj","bessely","besselh","besseli","besselk","beta", - "betainc","betaln","ellipj","ellipke","erf","erfc","erfcx", - "erfinv","expint","gamma","gammainc","gammaln","psi","legendre", - "cross","dot","factor","isprime","primes","gcd","lcm","rat", - "rats","perms","nchoosek","factorial","cart2sph","cart2pol", - "pol2cart","sph2cart","hsv2rgb","rgb2hsv"] - elmat = ["zeros","ones","eye","repmat","rand","randn","linspace","logspace", - "freqspace","meshgrid","accumarray","size","length","ndims","numel", - "disp","isempty","isequal","isequalwithequalnans","cat","reshape", - "diag","blkdiag","tril","triu","fliplr","flipud","flipdim","rot90", - "find","end","sub2ind","ind2sub","bsxfun","ndgrid","permute", - "ipermute","shiftdim","circshift","squeeze","isscalar","isvector", - "ans","eps","realmax","realmin","pi","i","inf","nan","isnan", - "isinf","isfinite","j","why","compan","gallery","hadamard","hankel", - "hilb","invhilb","magic","pascal","rosser","toeplitz","vander", - "wilkinson"] + elfun = ("sin", "sind", "sinh", "asin", "asind", "asinh", "cos", "cosd", "cosh", + "acos", "acosd", "acosh", "tan", "tand", "tanh", "atan", "atand", "atan2", + "atanh", "sec", "secd", "sech", "asec", "asecd", "asech", "csc", "cscd", + "csch", "acsc", "acscd", "acsch", "cot", "cotd", "coth", "acot", "acotd", + "acoth", "hypot", "exp", "expm1", "log", "log1p", "log10", "log2", "pow2", + "realpow", "reallog", "realsqrt", "sqrt", "nthroot", "nextpow2", "abs", + "angle", "complex", "conj", "imag", "real", "unwrap", "isreal", "cplxpair", + "fix", "floor", "ceil", "round", "mod", "rem", "sign") + specfun = ("airy", "besselj", "bessely", "besselh", "besseli", "besselk", "beta", + "betainc", "betaln", "ellipj", "ellipke", "erf", "erfc", "erfcx", + "erfinv", "expint", "gamma", "gammainc", "gammaln", "psi", "legendre", + "cross", "dot", "factor", "isprime", "primes", "gcd", "lcm", "rat", + "rats", "perms", "nchoosek", "factorial", "cart2sph", "cart2pol", + "pol2cart", "sph2cart", "hsv2rgb", "rgb2hsv") + elmat = ("zeros", "ones", "eye", "repmat", "rand", "randn", "linspace", "logspace", + "freqspace", "meshgrid", "accumarray", "size", "length", "ndims", "numel", + "disp", "isempty", "isequal", "isequalwithequalnans", "cat", "reshape", + "diag", "blkdiag", "tril", "triu", "fliplr", "flipud", "flipdim", "rot90", + "find", "end", "sub2ind", "ind2sub", "bsxfun", "ndgrid", "permute", + "ipermute", "shiftdim", "circshift", "squeeze", "isscalar", "isvector", + "ans", "eps", "realmax", "realmin", "pi", "i", "inf", "nan", "isnan", + "isinf", "isfinite", "j", "why", "compan", "gallery", "hadamard", "hankel", + "hilb", "invhilb", "magic", "pascal", "rosser", "toeplitz", "vander", + "wilkinson") tokens = { 'root': [ @@ -330,9 +331,12 @@ class MatlabLexer(RegexLexer): (r'^\s*function', Keyword, 'deffunc'), # from 'iskeyword' on version 7.11 (R2010): - (r'(break|case|catch|classdef|continue|else|elseif|end|enumerated|' - r'events|for|function|global|if|methods|otherwise|parfor|' - r'persistent|properties|return|spmd|switch|try|while)\b', Keyword), + (words(( + 'break', 'case', 'catch', 'classdef', 'continue', 'else', 'elseif', + 'end', 'enumerated', 'events', 'for', 'function', 'global', 'if', + 'methods', 'otherwise', 'parfor', 'persistent', 'properties', + 'return', 'spmd', 'switch', 'try', 'while'), suffix=r'\b'), + Keyword), ("(" + "|".join(elfun+specfun+elmat) + r')\b', Name.Builtin), @@ -377,14 +381,15 @@ class MatlabLexer(RegexLexer): } def analyse_text(text): - if re.match('^\s*%', text, re.M): # comment + if re.match('^\s*%', text, re.M): # comment return 0.2 - elif re.match('^!\w+', text, re.M): # system cmd + elif re.match('^!\w+', text, re.M): # system cmd return 0.2 line_re = re.compile('.*?\n') + class MatlabSessionLexer(Lexer): """ For Matlab sessions. Modeled after PythonConsoleLexer. @@ -419,14 +424,14 @@ class MatlabSessionLexer(Lexer): idx = len(curcode) # without is showing error on same line as before...? - #line = "\n" + line + # line = "\n" + line token = (0, Generic.Traceback, line) insertions.append((idx, [token])) else: if curcode: for item in do_insertions( - insertions, mlexer.get_tokens_unprocessed(curcode)): + insertions, mlexer.get_tokens_unprocessed(curcode)): yield item curcode = '' insertions = [] @@ -434,9 +439,9 @@ class MatlabSessionLexer(Lexer): yield match.start(), Generic.Output, line print(insertions) - if curcode: # or item: + if curcode: # or item: for item in do_insertions( - insertions, mlexer.get_tokens_unprocessed(curcode)): + insertions, mlexer.get_tokens_unprocessed(curcode)): yield item @@ -475,328 +480,333 @@ class OctaveLexer(RegexLexer): # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011) - builtin_kw = [ "addlistener", "addpath", "addproperty", "all", - "and", "any", "argnames", "argv", "assignin", - "atexit", "autoload", - "available_graphics_toolkits", "beep_on_error", - "bitand", "bitmax", "bitor", "bitshift", "bitxor", - "cat", "cell", "cellstr", "char", "class", "clc", - "columns", "command_line_path", - "completion_append_char", "completion_matches", - "complex", "confirm_recursive_rmdir", "cputime", - "crash_dumps_octave_core", "ctranspose", "cumprod", - "cumsum", "debug_on_error", "debug_on_interrupt", - "debug_on_warning", "default_save_options", - "dellistener", "diag", "diff", "disp", - "doc_cache_file", "do_string_escapes", "double", - "drawnow", "e", "echo_executing_commands", "eps", - "eq", "errno", "errno_list", "error", "eval", - "evalin", "exec", "exist", "exit", "eye", "false", - "fclear", "fclose", "fcntl", "fdisp", "feof", - "ferror", "feval", "fflush", "fgetl", "fgets", - "fieldnames", "file_in_loadpath", "file_in_path", - "filemarker", "filesep", "find_dir_in_path", - "fixed_point_format", "fnmatch", "fopen", "fork", - "formula", "fprintf", "fputs", "fread", "freport", - "frewind", "fscanf", "fseek", "fskipl", "ftell", - "functions", "fwrite", "ge", "genpath", "get", - "getegid", "getenv", "geteuid", "getgid", - "getpgrp", "getpid", "getppid", "getuid", "glob", - "gt", "gui_mode", "history_control", - "history_file", "history_size", - "history_timestamp_format_string", "home", - "horzcat", "hypot", "ifelse", - "ignore_function_time_stamp", "inferiorto", - "info_file", "info_program", "inline", "input", - "intmax", "intmin", "ipermute", - "is_absolute_filename", "isargout", "isbool", - "iscell", "iscellstr", "ischar", "iscomplex", - "isempty", "isfield", "isfloat", "isglobal", - "ishandle", "isieee", "isindex", "isinteger", - "islogical", "ismatrix", "ismethod", "isnull", - "isnumeric", "isobject", "isreal", - "is_rooted_relative_filename", "issorted", - "isstruct", "isvarname", "kbhit", "keyboard", - "kill", "lasterr", "lasterror", "lastwarn", - "ldivide", "le", "length", "link", "linspace", - "logical", "lstat", "lt", "make_absolute_filename", - "makeinfo_program", "max_recursion_depth", "merge", - "methods", "mfilename", "minus", "mislocked", - "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock", - "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes", - "munlock", "nargin", "nargout", - "native_float_format", "ndims", "ne", "nfields", - "nnz", "norm", "not", "numel", "nzmax", - "octave_config_info", "octave_core_file_limit", - "octave_core_file_name", - "octave_core_file_options", "ones", "or", - "output_max_field_width", "output_precision", - "page_output_immediately", "page_screen_output", - "path", "pathsep", "pause", "pclose", "permute", - "pi", "pipe", "plus", "popen", "power", - "print_empty_dimensions", "printf", - "print_struct_array_contents", "prod", - "program_invocation_name", "program_name", - "putenv", "puts", "pwd", "quit", "rats", "rdivide", - "readdir", "readlink", "read_readline_init_file", - "realmax", "realmin", "rehash", "rename", - "repelems", "re_read_readline_init_file", "reset", - "reshape", "resize", "restoredefaultpath", - "rethrow", "rmdir", "rmfield", "rmpath", "rows", - "save_header_format_string", "save_precision", - "saving_history", "scanf", "set", "setenv", - "shell_cmd", "sighup_dumps_octave_core", - "sigterm_dumps_octave_core", "silent_functions", - "single", "size", "size_equal", "sizemax", - "sizeof", "sleep", "source", "sparse_auto_mutate", - "split_long_rows", "sprintf", "squeeze", "sscanf", - "stat", "stderr", "stdin", "stdout", "strcmp", - "strcmpi", "string_fill_char", "strncmp", - "strncmpi", "struct", "struct_levels_to_print", - "strvcat", "subsasgn", "subsref", "sum", "sumsq", - "superiorto", "suppress_verbose_help_message", - "symlink", "system", "tic", "tilde_expand", - "times", "tmpfile", "tmpnam", "toc", "toupper", - "transpose", "true", "typeinfo", "umask", "uminus", - "uname", "undo_string_escapes", "unlink", "uplus", - "upper", "usage", "usleep", "vec", "vectorize", - "vertcat", "waitpid", "warning", "warranty", - "whos_line_format", "yes_or_no", "zeros", - "inf", "Inf", "nan", "NaN"] - - command_kw = [ "close", "load", "who", "whos", ] - - function_kw = [ "accumarray", "accumdim", "acosd", "acotd", - "acscd", "addtodate", "allchild", "ancestor", - "anova", "arch_fit", "arch_rnd", "arch_test", - "area", "arma_rnd", "arrayfun", "ascii", "asctime", - "asecd", "asind", "assert", "atand", - "autoreg_matrix", "autumn", "axes", "axis", "bar", - "barh", "bartlett", "bartlett_test", "beep", - "betacdf", "betainv", "betapdf", "betarnd", - "bicgstab", "bicubic", "binary", "binocdf", - "binoinv", "binopdf", "binornd", "bitcmp", - "bitget", "bitset", "blackman", "blanks", - "blkdiag", "bone", "box", "brighten", "calendar", - "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf", - "cauchy_rnd", "caxis", "celldisp", "center", "cgs", - "chisquare_test_homogeneity", - "chisquare_test_independence", "circshift", "cla", - "clabel", "clf", "clock", "cloglog", "closereq", - "colon", "colorbar", "colormap", "colperm", - "comet", "common_size", "commutation_matrix", - "compan", "compare_versions", "compass", - "computer", "cond", "condest", "contour", - "contourc", "contourf", "contrast", "conv", - "convhull", "cool", "copper", "copyfile", "cor", - "corrcoef", "cor_test", "cosd", "cotd", "cov", - "cplxpair", "cross", "cscd", "cstrcat", "csvread", - "csvwrite", "ctime", "cumtrapz", "curl", "cut", - "cylinder", "date", "datenum", "datestr", - "datetick", "datevec", "dblquad", "deal", - "deblank", "deconv", "delaunay", "delaunayn", - "delete", "demo", "detrend", "diffpara", "diffuse", - "dir", "discrete_cdf", "discrete_inv", - "discrete_pdf", "discrete_rnd", "display", - "divergence", "dlmwrite", "dos", "dsearch", - "dsearchn", "duplication_matrix", "durbinlevinson", - "ellipsoid", "empirical_cdf", "empirical_inv", - "empirical_pdf", "empirical_rnd", "eomday", - "errorbar", "etime", "etreeplot", "example", - "expcdf", "expinv", "expm", "exppdf", "exprnd", - "ezcontour", "ezcontourf", "ezmesh", "ezmeshc", - "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor", - "factorial", "fail", "fcdf", "feather", "fftconv", - "fftfilt", "fftshift", "figure", "fileattrib", - "fileparts", "fill", "findall", "findobj", - "findstr", "finv", "flag", "flipdim", "fliplr", - "flipud", "fpdf", "fplot", "fractdiff", "freqz", - "freqz_plot", "frnd", "fsolve", - "f_test_regression", "ftp", "fullfile", "fzero", - "gamcdf", "gaminv", "gampdf", "gamrnd", "gca", - "gcbf", "gcbo", "gcf", "genvarname", "geocdf", - "geoinv", "geopdf", "geornd", "getfield", "ginput", - "glpk", "gls", "gplot", "gradient", - "graphics_toolkit", "gray", "grid", "griddata", - "griddatan", "gtext", "gunzip", "gzip", "hadamard", - "hamming", "hankel", "hanning", "hggroup", - "hidden", "hilb", "hist", "histc", "hold", "hot", - "hotelling_test", "housh", "hsv", "hurst", - "hygecdf", "hygeinv", "hygepdf", "hygernd", - "idivide", "ifftshift", "image", "imagesc", - "imfinfo", "imread", "imshow", "imwrite", "index", - "info", "inpolygon", "inputname", "interpft", - "interpn", "intersect", "invhilb", "iqr", "isa", - "isdefinite", "isdir", "is_duplicate_entry", - "isequal", "isequalwithequalnans", "isfigure", - "ishermitian", "ishghandle", "is_leap_year", - "isletter", "ismac", "ismember", "ispc", "isprime", - "isprop", "isscalar", "issquare", "isstrprop", - "issymmetric", "isunix", "is_valid_file_id", - "isvector", "jet", "kendall", - "kolmogorov_smirnov_cdf", - "kolmogorov_smirnov_test", "kruskal_wallis_test", - "krylov", "kurtosis", "laplace_cdf", "laplace_inv", - "laplace_pdf", "laplace_rnd", "legend", "legendre", - "license", "line", "linkprop", "list_primes", - "loadaudio", "loadobj", "logistic_cdf", - "logistic_inv", "logistic_pdf", "logistic_rnd", - "logit", "loglog", "loglogerr", "logm", "logncdf", - "logninv", "lognpdf", "lognrnd", "logspace", - "lookfor", "ls_command", "lsqnonneg", "magic", - "mahalanobis", "manova", "matlabroot", - "mcnemar_test", "mean", "meansq", "median", "menu", - "mesh", "meshc", "meshgrid", "meshz", "mexext", - "mget", "mkpp", "mode", "moment", "movefile", - "mpoles", "mput", "namelengthmax", "nargchk", - "nargoutchk", "nbincdf", "nbininv", "nbinpdf", - "nbinrnd", "nchoosek", "ndgrid", "newplot", "news", - "nonzeros", "normcdf", "normest", "norminv", - "normpdf", "normrnd", "now", "nthroot", "null", - "ocean", "ols", "onenormest", "optimget", - "optimset", "orderfields", "orient", "orth", - "pack", "pareto", "parseparams", "pascal", "patch", - "pathdef", "pcg", "pchip", "pcolor", "pcr", - "peaks", "periodogram", "perl", "perms", "pie", - "pink", "planerot", "playaudio", "plot", - "plotmatrix", "plotyy", "poisscdf", "poissinv", - "poisspdf", "poissrnd", "polar", "poly", - "polyaffine", "polyarea", "polyderiv", "polyfit", - "polygcd", "polyint", "polyout", "polyreduce", - "polyval", "polyvalm", "postpad", "powerset", - "ppder", "ppint", "ppjumps", "ppplot", "ppval", - "pqpnonneg", "prepad", "primes", "print", - "print_usage", "prism", "probit", "qp", "qqplot", - "quadcc", "quadgk", "quadl", "quadv", "quiver", - "qzhess", "rainbow", "randi", "range", "rank", - "ranks", "rat", "reallog", "realpow", "realsqrt", - "record", "rectangle_lw", "rectangle_sw", - "rectint", "refresh", "refreshdata", - "regexptranslate", "repmat", "residue", "ribbon", - "rindex", "roots", "rose", "rosser", "rotdim", - "rref", "run", "run_count", "rundemos", "run_test", - "runtests", "saveas", "saveaudio", "saveobj", - "savepath", "scatter", "secd", "semilogx", - "semilogxerr", "semilogy", "semilogyerr", - "setaudio", "setdiff", "setfield", "setxor", - "shading", "shift", "shiftdim", "sign_test", - "sinc", "sind", "sinetone", "sinewave", "skewness", - "slice", "sombrero", "sortrows", "spaugment", - "spconvert", "spdiags", "spearman", "spectral_adf", - "spectral_xdf", "specular", "speed", "spencer", - "speye", "spfun", "sphere", "spinmap", "spline", - "spones", "sprand", "sprandn", "sprandsym", - "spring", "spstats", "spy", "sqp", "stairs", - "statistics", "std", "stdnormal_cdf", - "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd", - "stem", "stft", "strcat", "strchr", "strjust", - "strmatch", "strread", "strsplit", "strtok", - "strtrim", "strtrunc", "structfun", "studentize", - "subplot", "subsindex", "subspace", "substr", - "substruct", "summer", "surf", "surface", "surfc", - "surfl", "surfnorm", "svds", "swapbytes", - "sylvester_matrix", "symvar", "synthesis", "table", - "tand", "tar", "tcdf", "tempdir", "tempname", - "test", "text", "textread", "textscan", "tinv", - "title", "toeplitz", "tpdf", "trace", "trapz", - "treelayout", "treeplot", "triangle_lw", - "triangle_sw", "tril", "trimesh", "triplequad", - "triplot", "trisurf", "triu", "trnd", "tsearchn", - "t_test", "t_test_regression", "type", "unidcdf", - "unidinv", "unidpdf", "unidrnd", "unifcdf", - "unifinv", "unifpdf", "unifrnd", "union", "unique", - "unix", "unmkpp", "unpack", "untabify", "untar", - "unwrap", "unzip", "u_test", "validatestring", - "vander", "var", "var_test", "vech", "ver", - "version", "view", "voronoi", "voronoin", - "waitforbuttonpress", "wavread", "wavwrite", - "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday", - "welch_test", "what", "white", "whitebg", - "wienrnd", "wilcoxon_test", "wilkinson", "winter", - "xlabel", "xlim", "ylabel", "yulewalker", "zip", - "zlabel", "z_test", ] - - loadable_kw = [ "airy", "amd", "balance", "besselh", "besseli", - "besselj", "besselk", "bessely", "bitpack", - "bsxfun", "builtin", "ccolamd", "cellfun", - "cellslices", "chol", "choldelete", "cholinsert", - "cholinv", "cholshift", "cholupdate", "colamd", - "colloc", "convhulln", "convn", "csymamd", - "cummax", "cummin", "daspk", "daspk_options", - "dasrt", "dasrt_options", "dassl", "dassl_options", - "dbclear", "dbdown", "dbstack", "dbstatus", - "dbstop", "dbtype", "dbup", "dbwhere", "det", - "dlmread", "dmperm", "dot", "eig", "eigs", - "endgrent", "endpwent", "etree", "fft", "fftn", - "fftw", "filter", "find", "full", "gcd", - "getgrent", "getgrgid", "getgrnam", "getpwent", - "getpwnam", "getpwuid", "getrusage", "givens", - "gmtime", "gnuplot_binary", "hess", "ifft", - "ifftn", "inv", "isdebugmode", "issparse", "kron", - "localtime", "lookup", "lsode", "lsode_options", - "lu", "luinc", "luupdate", "matrix_type", "max", - "min", "mktime", "pinv", "qr", "qrdelete", - "qrinsert", "qrshift", "qrupdate", "quad", - "quad_options", "qz", "rand", "rande", "randg", - "randn", "randp", "randperm", "rcond", "regexp", - "regexpi", "regexprep", "schur", "setgrent", - "setpwent", "sort", "spalloc", "sparse", "spparms", - "sprank", "sqrtm", "strfind", "strftime", - "strptime", "strrep", "svd", "svd_driver", "syl", - "symamd", "symbfact", "symrcm", "time", "tsearch", - "typecast", "urlread", "urlwrite", ] - - mapping_kw = [ "abs", "acos", "acosh", "acot", "acoth", "acsc", - "acsch", "angle", "arg", "asec", "asech", "asin", - "asinh", "atan", "atanh", "beta", "betainc", - "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos", - "cosh", "cot", "coth", "csc", "csch", "erf", "erfc", - "erfcx", "erfinv", "exp", "finite", "fix", "floor", - "fmod", "gamma", "gammainc", "gammaln", "imag", - "isalnum", "isalpha", "isascii", "iscntrl", - "isdigit", "isfinite", "isgraph", "isinf", - "islower", "isna", "isnan", "isprint", "ispunct", - "isspace", "isupper", "isxdigit", "lcm", "lgamma", - "log", "lower", "mod", "real", "rem", "round", - "roundb", "sec", "sech", "sign", "sin", "sinh", - "sqrt", "tan", "tanh", "toascii", "tolower", "xor", - ] - - builtin_consts = [ "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA", - "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER", - "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET", - "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO", - "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE", - "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED", - "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG", - "WSTOPSIG", "WTERMSIG", "WUNTRACED", ] + builtin_kw = ( + "addlistener", "addpath", "addproperty", "all", + "and", "any", "argnames", "argv", "assignin", + "atexit", "autoload", + "available_graphics_toolkits", "beep_on_error", + "bitand", "bitmax", "bitor", "bitshift", "bitxor", + "cat", "cell", "cellstr", "char", "class", "clc", + "columns", "command_line_path", + "completion_append_char", "completion_matches", + "complex", "confirm_recursive_rmdir", "cputime", + "crash_dumps_octave_core", "ctranspose", "cumprod", + "cumsum", "debug_on_error", "debug_on_interrupt", + "debug_on_warning", "default_save_options", + "dellistener", "diag", "diff", "disp", + "doc_cache_file", "do_string_escapes", "double", + "drawnow", "e", "echo_executing_commands", "eps", + "eq", "errno", "errno_list", "error", "eval", + "evalin", "exec", "exist", "exit", "eye", "false", + "fclear", "fclose", "fcntl", "fdisp", "feof", + "ferror", "feval", "fflush", "fgetl", "fgets", + "fieldnames", "file_in_loadpath", "file_in_path", + "filemarker", "filesep", "find_dir_in_path", + "fixed_point_format", "fnmatch", "fopen", "fork", + "formula", "fprintf", "fputs", "fread", "freport", + "frewind", "fscanf", "fseek", "fskipl", "ftell", + "functions", "fwrite", "ge", "genpath", "get", + "getegid", "getenv", "geteuid", "getgid", + "getpgrp", "getpid", "getppid", "getuid", "glob", + "gt", "gui_mode", "history_control", + "history_file", "history_size", + "history_timestamp_format_string", "home", + "horzcat", "hypot", "ifelse", + "ignore_function_time_stamp", "inferiorto", + "info_file", "info_program", "inline", "input", + "intmax", "intmin", "ipermute", + "is_absolute_filename", "isargout", "isbool", + "iscell", "iscellstr", "ischar", "iscomplex", + "isempty", "isfield", "isfloat", "isglobal", + "ishandle", "isieee", "isindex", "isinteger", + "islogical", "ismatrix", "ismethod", "isnull", + "isnumeric", "isobject", "isreal", + "is_rooted_relative_filename", "issorted", + "isstruct", "isvarname", "kbhit", "keyboard", + "kill", "lasterr", "lasterror", "lastwarn", + "ldivide", "le", "length", "link", "linspace", + "logical", "lstat", "lt", "make_absolute_filename", + "makeinfo_program", "max_recursion_depth", "merge", + "methods", "mfilename", "minus", "mislocked", + "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock", + "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes", + "munlock", "nargin", "nargout", + "native_float_format", "ndims", "ne", "nfields", + "nnz", "norm", "not", "numel", "nzmax", + "octave_config_info", "octave_core_file_limit", + "octave_core_file_name", + "octave_core_file_options", "ones", "or", + "output_max_field_width", "output_precision", + "page_output_immediately", "page_screen_output", + "path", "pathsep", "pause", "pclose", "permute", + "pi", "pipe", "plus", "popen", "power", + "print_empty_dimensions", "printf", + "print_struct_array_contents", "prod", + "program_invocation_name", "program_name", + "putenv", "puts", "pwd", "quit", "rats", "rdivide", + "readdir", "readlink", "read_readline_init_file", + "realmax", "realmin", "rehash", "rename", + "repelems", "re_read_readline_init_file", "reset", + "reshape", "resize", "restoredefaultpath", + "rethrow", "rmdir", "rmfield", "rmpath", "rows", + "save_header_format_string", "save_precision", + "saving_history", "scanf", "set", "setenv", + "shell_cmd", "sighup_dumps_octave_core", + "sigterm_dumps_octave_core", "silent_functions", + "single", "size", "size_equal", "sizemax", + "sizeof", "sleep", "source", "sparse_auto_mutate", + "split_long_rows", "sprintf", "squeeze", "sscanf", + "stat", "stderr", "stdin", "stdout", "strcmp", + "strcmpi", "string_fill_char", "strncmp", + "strncmpi", "struct", "struct_levels_to_print", + "strvcat", "subsasgn", "subsref", "sum", "sumsq", + "superiorto", "suppress_verbose_help_message", + "symlink", "system", "tic", "tilde_expand", + "times", "tmpfile", "tmpnam", "toc", "toupper", + "transpose", "true", "typeinfo", "umask", "uminus", + "uname", "undo_string_escapes", "unlink", "uplus", + "upper", "usage", "usleep", "vec", "vectorize", + "vertcat", "waitpid", "warning", "warranty", + "whos_line_format", "yes_or_no", "zeros", + "inf", "Inf", "nan", "NaN") + + command_kw = ("close", "load", "who", "whos") + + function_kw = ( + "accumarray", "accumdim", "acosd", "acotd", + "acscd", "addtodate", "allchild", "ancestor", + "anova", "arch_fit", "arch_rnd", "arch_test", + "area", "arma_rnd", "arrayfun", "ascii", "asctime", + "asecd", "asind", "assert", "atand", + "autoreg_matrix", "autumn", "axes", "axis", "bar", + "barh", "bartlett", "bartlett_test", "beep", + "betacdf", "betainv", "betapdf", "betarnd", + "bicgstab", "bicubic", "binary", "binocdf", + "binoinv", "binopdf", "binornd", "bitcmp", + "bitget", "bitset", "blackman", "blanks", + "blkdiag", "bone", "box", "brighten", "calendar", + "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf", + "cauchy_rnd", "caxis", "celldisp", "center", "cgs", + "chisquare_test_homogeneity", + "chisquare_test_independence", "circshift", "cla", + "clabel", "clf", "clock", "cloglog", "closereq", + "colon", "colorbar", "colormap", "colperm", + "comet", "common_size", "commutation_matrix", + "compan", "compare_versions", "compass", + "computer", "cond", "condest", "contour", + "contourc", "contourf", "contrast", "conv", + "convhull", "cool", "copper", "copyfile", "cor", + "corrcoef", "cor_test", "cosd", "cotd", "cov", + "cplxpair", "cross", "cscd", "cstrcat", "csvread", + "csvwrite", "ctime", "cumtrapz", "curl", "cut", + "cylinder", "date", "datenum", "datestr", + "datetick", "datevec", "dblquad", "deal", + "deblank", "deconv", "delaunay", "delaunayn", + "delete", "demo", "detrend", "diffpara", "diffuse", + "dir", "discrete_cdf", "discrete_inv", + "discrete_pdf", "discrete_rnd", "display", + "divergence", "dlmwrite", "dos", "dsearch", + "dsearchn", "duplication_matrix", "durbinlevinson", + "ellipsoid", "empirical_cdf", "empirical_inv", + "empirical_pdf", "empirical_rnd", "eomday", + "errorbar", "etime", "etreeplot", "example", + "expcdf", "expinv", "expm", "exppdf", "exprnd", + "ezcontour", "ezcontourf", "ezmesh", "ezmeshc", + "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor", + "factorial", "fail", "fcdf", "feather", "fftconv", + "fftfilt", "fftshift", "figure", "fileattrib", + "fileparts", "fill", "findall", "findobj", + "findstr", "finv", "flag", "flipdim", "fliplr", + "flipud", "fpdf", "fplot", "fractdiff", "freqz", + "freqz_plot", "frnd", "fsolve", + "f_test_regression", "ftp", "fullfile", "fzero", + "gamcdf", "gaminv", "gampdf", "gamrnd", "gca", + "gcbf", "gcbo", "gcf", "genvarname", "geocdf", + "geoinv", "geopdf", "geornd", "getfield", "ginput", + "glpk", "gls", "gplot", "gradient", + "graphics_toolkit", "gray", "grid", "griddata", + "griddatan", "gtext", "gunzip", "gzip", "hadamard", + "hamming", "hankel", "hanning", "hggroup", + "hidden", "hilb", "hist", "histc", "hold", "hot", + "hotelling_test", "housh", "hsv", "hurst", + "hygecdf", "hygeinv", "hygepdf", "hygernd", + "idivide", "ifftshift", "image", "imagesc", + "imfinfo", "imread", "imshow", "imwrite", "index", + "info", "inpolygon", "inputname", "interpft", + "interpn", "intersect", "invhilb", "iqr", "isa", + "isdefinite", "isdir", "is_duplicate_entry", + "isequal", "isequalwithequalnans", "isfigure", + "ishermitian", "ishghandle", "is_leap_year", + "isletter", "ismac", "ismember", "ispc", "isprime", + "isprop", "isscalar", "issquare", "isstrprop", + "issymmetric", "isunix", "is_valid_file_id", + "isvector", "jet", "kendall", + "kolmogorov_smirnov_cdf", + "kolmogorov_smirnov_test", "kruskal_wallis_test", + "krylov", "kurtosis", "laplace_cdf", "laplace_inv", + "laplace_pdf", "laplace_rnd", "legend", "legendre", + "license", "line", "linkprop", "list_primes", + "loadaudio", "loadobj", "logistic_cdf", + "logistic_inv", "logistic_pdf", "logistic_rnd", + "logit", "loglog", "loglogerr", "logm", "logncdf", + "logninv", "lognpdf", "lognrnd", "logspace", + "lookfor", "ls_command", "lsqnonneg", "magic", + "mahalanobis", "manova", "matlabroot", + "mcnemar_test", "mean", "meansq", "median", "menu", + "mesh", "meshc", "meshgrid", "meshz", "mexext", + "mget", "mkpp", "mode", "moment", "movefile", + "mpoles", "mput", "namelengthmax", "nargchk", + "nargoutchk", "nbincdf", "nbininv", "nbinpdf", + "nbinrnd", "nchoosek", "ndgrid", "newplot", "news", + "nonzeros", "normcdf", "normest", "norminv", + "normpdf", "normrnd", "now", "nthroot", "null", + "ocean", "ols", "onenormest", "optimget", + "optimset", "orderfields", "orient", "orth", + "pack", "pareto", "parseparams", "pascal", "patch", + "pathdef", "pcg", "pchip", "pcolor", "pcr", + "peaks", "periodogram", "perl", "perms", "pie", + "pink", "planerot", "playaudio", "plot", + "plotmatrix", "plotyy", "poisscdf", "poissinv", + "poisspdf", "poissrnd", "polar", "poly", + "polyaffine", "polyarea", "polyderiv", "polyfit", + "polygcd", "polyint", "polyout", "polyreduce", + "polyval", "polyvalm", "postpad", "powerset", + "ppder", "ppint", "ppjumps", "ppplot", "ppval", + "pqpnonneg", "prepad", "primes", "print", + "print_usage", "prism", "probit", "qp", "qqplot", + "quadcc", "quadgk", "quadl", "quadv", "quiver", + "qzhess", "rainbow", "randi", "range", "rank", + "ranks", "rat", "reallog", "realpow", "realsqrt", + "record", "rectangle_lw", "rectangle_sw", + "rectint", "refresh", "refreshdata", + "regexptranslate", "repmat", "residue", "ribbon", + "rindex", "roots", "rose", "rosser", "rotdim", + "rref", "run", "run_count", "rundemos", "run_test", + "runtests", "saveas", "saveaudio", "saveobj", + "savepath", "scatter", "secd", "semilogx", + "semilogxerr", "semilogy", "semilogyerr", + "setaudio", "setdiff", "setfield", "setxor", + "shading", "shift", "shiftdim", "sign_test", + "sinc", "sind", "sinetone", "sinewave", "skewness", + "slice", "sombrero", "sortrows", "spaugment", + "spconvert", "spdiags", "spearman", "spectral_adf", + "spectral_xdf", "specular", "speed", "spencer", + "speye", "spfun", "sphere", "spinmap", "spline", + "spones", "sprand", "sprandn", "sprandsym", + "spring", "spstats", "spy", "sqp", "stairs", + "statistics", "std", "stdnormal_cdf", + "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd", + "stem", "stft", "strcat", "strchr", "strjust", + "strmatch", "strread", "strsplit", "strtok", + "strtrim", "strtrunc", "structfun", "studentize", + "subplot", "subsindex", "subspace", "substr", + "substruct", "summer", "surf", "surface", "surfc", + "surfl", "surfnorm", "svds", "swapbytes", + "sylvester_matrix", "symvar", "synthesis", "table", + "tand", "tar", "tcdf", "tempdir", "tempname", + "test", "text", "textread", "textscan", "tinv", + "title", "toeplitz", "tpdf", "trace", "trapz", + "treelayout", "treeplot", "triangle_lw", + "triangle_sw", "tril", "trimesh", "triplequad", + "triplot", "trisurf", "triu", "trnd", "tsearchn", + "t_test", "t_test_regression", "type", "unidcdf", + "unidinv", "unidpdf", "unidrnd", "unifcdf", + "unifinv", "unifpdf", "unifrnd", "union", "unique", + "unix", "unmkpp", "unpack", "untabify", "untar", + "unwrap", "unzip", "u_test", "validatestring", + "vander", "var", "var_test", "vech", "ver", + "version", "view", "voronoi", "voronoin", + "waitforbuttonpress", "wavread", "wavwrite", + "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday", + "welch_test", "what", "white", "whitebg", + "wienrnd", "wilcoxon_test", "wilkinson", "winter", + "xlabel", "xlim", "ylabel", "yulewalker", "zip", + "zlabel", "z_test") + + loadable_kw = ( + "airy", "amd", "balance", "besselh", "besseli", + "besselj", "besselk", "bessely", "bitpack", + "bsxfun", "builtin", "ccolamd", "cellfun", + "cellslices", "chol", "choldelete", "cholinsert", + "cholinv", "cholshift", "cholupdate", "colamd", + "colloc", "convhulln", "convn", "csymamd", + "cummax", "cummin", "daspk", "daspk_options", + "dasrt", "dasrt_options", "dassl", "dassl_options", + "dbclear", "dbdown", "dbstack", "dbstatus", + "dbstop", "dbtype", "dbup", "dbwhere", "det", + "dlmread", "dmperm", "dot", "eig", "eigs", + "endgrent", "endpwent", "etree", "fft", "fftn", + "fftw", "filter", "find", "full", "gcd", + "getgrent", "getgrgid", "getgrnam", "getpwent", + "getpwnam", "getpwuid", "getrusage", "givens", + "gmtime", "gnuplot_binary", "hess", "ifft", + "ifftn", "inv", "isdebugmode", "issparse", "kron", + "localtime", "lookup", "lsode", "lsode_options", + "lu", "luinc", "luupdate", "matrix_type", "max", + "min", "mktime", "pinv", "qr", "qrdelete", + "qrinsert", "qrshift", "qrupdate", "quad", + "quad_options", "qz", "rand", "rande", "randg", + "randn", "randp", "randperm", "rcond", "regexp", + "regexpi", "regexprep", "schur", "setgrent", + "setpwent", "sort", "spalloc", "sparse", "spparms", + "sprank", "sqrtm", "strfind", "strftime", + "strptime", "strrep", "svd", "svd_driver", "syl", + "symamd", "symbfact", "symrcm", "time", "tsearch", + "typecast", "urlread", "urlwrite") + + mapping_kw = ( + "abs", "acos", "acosh", "acot", "acoth", "acsc", + "acsch", "angle", "arg", "asec", "asech", "asin", + "asinh", "atan", "atanh", "beta", "betainc", + "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos", + "cosh", "cot", "coth", "csc", "csch", "erf", "erfc", + "erfcx", "erfinv", "exp", "finite", "fix", "floor", + "fmod", "gamma", "gammainc", "gammaln", "imag", + "isalnum", "isalpha", "isascii", "iscntrl", + "isdigit", "isfinite", "isgraph", "isinf", + "islower", "isna", "isnan", "isprint", "ispunct", + "isspace", "isupper", "isxdigit", "lcm", "lgamma", + "log", "lower", "mod", "real", "rem", "round", + "roundb", "sec", "sech", "sign", "sin", "sinh", + "sqrt", "tan", "tanh", "toascii", "tolower", "xor") + + builtin_consts = ( + "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA", + "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER", + "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET", + "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO", + "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE", + "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED", + "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG", + "WSTOPSIG", "WTERMSIG", "WUNTRACED") tokens = { 'root': [ - #We should look into multiline comments + # We should look into multiline comments (r'[%#].*$', Comment), (r'^\s*function', Keyword, 'deffunc'), # from 'iskeyword' on hg changeset 8cc154f45e37 - (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|' - r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|' - r'endevents|endfor|endfunction|endif|endmethods|endproperties|' - r'endswitch|endwhile|events|for|function|get|global|if|methods|' - r'otherwise|persistent|properties|return|set|static|switch|try|' - r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword), + (words(( + '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else', + 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef', + 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties', + 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods', + 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try', + 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'), + Keyword), - ("(" + "|".join( builtin_kw + command_kw - + function_kw + loadable_kw - + mapping_kw) + r')\b', Name.Builtin), + (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw, + suffix=r'\b'), Name.Builtin), - ("(" + "|".join(builtin_consts) + r')\b', Name.Constant), + (words(builtin_consts, suffix=r'\b'), Name.Constant), # operators in Octave but not Matlab: (r'-=|!=|!|/=|--', Operator), # operators: (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator), # operators in Octave but not Matlab requiring escape for re: - (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*',Operator), + (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator), # operators requiring escape for re: (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator), @@ -847,12 +857,14 @@ class ScilabLexer(RegexLexer): (r'//.*?$', Comment.Single), (r'^\s*function', Keyword, 'deffunc'), - (r'(__FILE__|__LINE__|break|case|catch|classdef|continue|do|else|' - r'elseif|end|end_try_catch|end_unwind_protect|endclassdef|' - r'endevents|endfor|endfunction|endif|endmethods|endproperties|' - r'endswitch|endwhile|events|for|function|get|global|if|methods|' - r'otherwise|persistent|properties|return|set|static|switch|try|' - r'until|unwind_protect|unwind_protect_cleanup|while)\b', Keyword), + (words(( + '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef', 'continue', 'do', 'else', + 'elseif', 'end', 'end_try_catch', 'end_unwind_protect', 'endclassdef', + 'endevents', 'endfor', 'endfunction', 'endif', 'endmethods', 'endproperties', + 'endswitch', 'endwhile', 'events', 'for', 'function', 'get', 'global', 'if', 'methods', + 'otherwise', 'persistent', 'properties', 'return', 'set', 'static', 'switch', 'try', + 'until', 'unwind_protect', 'unwind_protect_cleanup', 'while'), suffix=r'\b'), + Keyword), ("(" + "|".join(_scilab_builtins.functions_kw + _scilab_builtins.commands_kw + @@ -911,7 +923,7 @@ class NumPyLexer(PythonLexer): mimetypes = [] filenames = [] - EXTRA_KEYWORDS = set([ + EXTRA_KEYWORDS = set(( 'abs', 'absolute', 'accumulate', 'add', 'alen', 'all', 'allclose', 'alltrue', 'alterdot', 'amax', 'amin', 'angle', 'any', 'append', 'apply_along_axis', 'apply_over_axes', 'arange', 'arccos', 'arccosh', @@ -976,7 +988,7 @@ class NumPyLexer(PythonLexer): 'typename', 'uniform', 'union1d', 'unique', 'unique1d', 'unravel_index', 'unwrap', 'vander', 'var', 'vdot', 'vectorize', 'view', 'vonmises', 'vsplit', 'vstack', 'weibull', 'where', 'who', 'zeros', 'zeros_like' - ]) + )) def get_tokens_unprocessed(self, text): for index, token, value in \ @@ -1020,8 +1032,8 @@ class RConsoleLexer(Lexer): # If we have stored prompt lines, need to process them first. if current_code_block: # Weave together the prompts and highlight code. - for item in do_insertions(insertions, - slexer.get_tokens_unprocessed(current_code_block)): + for item in do_insertions( + insertions, slexer.get_tokens_unprocessed(current_code_block)): yield item # Reset vars for next code block. current_code_block = '' @@ -1033,8 +1045,8 @@ class RConsoleLexer(Lexer): # process the last code block. This is neither elegant nor DRY so # should be changed. if current_code_block: - for item in do_insertions(insertions, - slexer.get_tokens_unprocessed(current_code_block)): + for item in do_insertions( + insertions, slexer.get_tokens_unprocessed(current_code_block)): yield item @@ -1051,256 +1063,256 @@ class SLexer(RegexLexer): mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile'] - builtins_base = [ - 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE', - 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf', - 'La\.svd', 'Map', 'Math\.Date', 'Math\.POSIXt', 'Math\.data\.frame', - 'Math\.difftime', 'Math\.factor', 'Mod', 'NA_character_', - 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN', - 'Negate', 'NextMethod', 'Ops\.Date', 'Ops\.POSIXt', 'Ops\.data\.frame', - 'Ops\.difftime', 'Ops\.factor', 'Ops\.numeric_version', 'Ops\.ordered', - 'Position', 'R\.Version', 'R\.home', 'R\.version', 'R\.version\.string', - 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall', - 'Reduce', 'Summary\.Date', 'Summary\.POSIXct', 'Summary\.POSIXlt', - 'Summary\.data\.frame', 'Summary\.difftime', 'Summary\.factor', - 'Summary\.numeric_version', 'Summary\.ordered', 'Sys\.Date', - 'Sys\.chmod', 'Sys\.getenv', 'Sys\.getlocale', 'Sys\.getpid', - 'Sys\.glob', 'Sys\.info', 'Sys\.localeconv', 'Sys\.readlink', - 'Sys\.setFileTime', 'Sys\.setenv', 'Sys\.setlocale', 'Sys\.sleep', - 'Sys\.time', 'Sys\.timezone', 'Sys\.umask', 'Sys\.unsetenv', - 'Sys\.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs', - 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist', - 'all', 'all\.equal', 'all\.equal\.POSIXct', 'all\.equal\.character', - 'all\.equal\.default', 'all\.equal\.factor', 'all\.equal\.formula', - 'all\.equal\.language', 'all\.equal\.list', 'all\.equal\.numeric', - 'all\.equal\.raw', 'all\.names', 'all\.vars', 'any', 'anyDuplicated', - 'anyDuplicated\.array', 'anyDuplicated\.data\.frame', - 'anyDuplicated\.default', 'anyDuplicated\.matrix', 'aperm', - 'aperm\.default', 'aperm\.table', 'append', 'apply', 'args', - 'arrayInd', 'as\.Date', 'as\.Date\.POSIXct', 'as\.Date\.POSIXlt', - 'as\.Date\.character', 'as\.Date\.date', 'as\.Date\.dates', - 'as\.Date\.default', 'as\.Date\.factor', 'as\.Date\.numeric', - 'as\.POSIXct', 'as\.POSIXct\.Date', 'as\.POSIXct\.POSIXlt', - 'as\.POSIXct\.date', 'as\.POSIXct\.dates', 'as\.POSIXct\.default', - 'as\.POSIXct\.numeric', 'as\.POSIXlt', 'as\.POSIXlt\.Date', - 'as\.POSIXlt\.POSIXct', 'as\.POSIXlt\.character', 'as\.POSIXlt\.date', - 'as\.POSIXlt\.dates', 'as\.POSIXlt\.default', 'as\.POSIXlt\.factor', - 'as\.POSIXlt\.numeric', 'as\.array', 'as\.array\.default', 'as\.call', - 'as\.character', 'as\.character\.Date', 'as\.character\.POSIXt', - 'as\.character\.condition', 'as\.character\.default', - 'as\.character\.error', 'as\.character\.factor', 'as\.character\.hexmode', - 'as\.character\.numeric_version', 'as\.character\.octmode', - 'as\.character\.srcref', 'as\.complex', 'as\.data\.frame', - 'as\.data\.frame\.AsIs', 'as\.data\.frame\.Date', 'as\.data\.frame\.POSIXct', - 'as\.data\.frame\.POSIXlt', 'as\.data\.frame\.array', - 'as\.data\.frame\.character', 'as\.data\.frame\.complex', - 'as\.data\.frame\.data\.frame', 'as\.data\.frame\.default', - 'as\.data\.frame\.difftime', 'as\.data\.frame\.factor', - 'as\.data\.frame\.integer', 'as\.data\.frame\.list', - 'as\.data\.frame\.logical', 'as\.data\.frame\.matrix', - 'as\.data\.frame\.model\.matrix', 'as\.data\.frame\.numeric', - 'as\.data\.frame\.numeric_version', 'as\.data\.frame\.ordered', - 'as\.data\.frame\.raw', 'as\.data\.frame\.table', 'as\.data\.frame\.ts', - 'as\.data\.frame\.vector', 'as\.difftime', 'as\.double', - 'as\.double\.POSIXlt', 'as\.double\.difftime', 'as\.environment', - 'as\.expression', 'as\.expression\.default', 'as\.factor', - 'as\.function', 'as\.function\.default', 'as\.hexmode', 'as\.integer', - 'as\.list', 'as\.list\.Date', 'as\.list\.POSIXct', 'as\.list\.data\.frame', - 'as\.list\.default', 'as\.list\.environment', 'as\.list\.factor', - 'as\.list\.function', 'as\.list\.numeric_version', 'as\.logical', - 'as\.logical\.factor', 'as\.matrix', 'as\.matrix\.POSIXlt', - 'as\.matrix\.data\.frame', 'as\.matrix\.default', 'as\.matrix\.noquote', - 'as\.name', 'as\.null', 'as\.null\.default', 'as\.numeric', - 'as\.numeric_version', 'as\.octmode', 'as\.ordered', - 'as\.package_version', 'as\.pairlist', 'as\.qr', 'as\.raw', 'as\.single', - 'as\.single\.default', 'as\.symbol', 'as\.table', 'as\.table\.default', - 'as\.vector', 'as\.vector\.factor', 'asNamespace', 'asS3', 'asS4', - 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh', - 'attachNamespace', 'attr', 'attr\.all\.equal', 'attributes', - 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename', - 'besselI', 'besselJ', 'besselK', 'besselY', 'beta', - 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd', - 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body', - 'bquote', 'browser', 'browserCondition', 'browserSetDebug', - 'browserText', 'builtins', 'by', 'by\.data\.frame', 'by\.default', - 'bzfile', 'c\.Date', 'c\.POSIXct', 'c\.POSIXlt', 'c\.noquote', - 'c\.numeric_version', 'call', 'callCC', 'capabilities', 'casefold', - 'cat', 'category', 'cbind', 'cbind\.data\.frame', 'ceiling', - 'char\.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones', - 'chol', 'chol\.default', 'chol2inv', 'choose', 'class', - 'clearPushBack', 'close', 'close\.connection', 'close\.srcfile', - 'close\.srcfilealias', 'closeAllConnections', 'col', 'colMeans', - 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts', - 'conditionCall', 'conditionCall\.condition', 'conditionMessage', - 'conditionMessage\.condition', 'conflicts', 'contributors', 'cos', - 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut', - 'cut\.Date', 'cut\.POSIXt', 'cut\.default', 'dQuote', 'data\.class', - 'data\.matrix', 'date', 'debug', 'debugonce', - 'default\.stringsAsFactors', 'delayedAssign', 'deparse', 'det', - 'determinant', 'determinant\.matrix', 'dget', 'diag', 'diff', - 'diff\.Date', 'diff\.POSIXt', 'diff\.default', 'difftime', 'digamma', - 'dim', 'dim\.data\.frame', 'dimnames', 'dimnames\.data\.frame', 'dir', - 'dir\.create', 'dirname', 'do\.call', 'dput', 'drop', 'droplevels', - 'droplevels\.data\.frame', 'droplevels\.factor', 'dump', 'duplicated', - 'duplicated\.POSIXlt', 'duplicated\.array', 'duplicated\.data\.frame', - 'duplicated\.default', 'duplicated\.matrix', - 'duplicated\.numeric_version', 'dyn\.load', 'dyn\.unload', 'eapply', - 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8', - 'encodeString', 'enquote', 'env\.profile', 'environment', - 'environmentIsLocked', 'environmentName', 'eval', 'eval\.parent', - 'evalq', 'exists', 'exp', 'expand\.grid', 'expm1', 'expression', - 'factor', 'factorial', 'fifo', 'file', 'file\.access', 'file\.append', - 'file\.choose', 'file\.copy', 'file\.create', 'file\.exists', - 'file\.info', 'file\.link', 'file\.path', 'file\.remove', 'file\.rename', - 'file\.show', 'file\.symlink', 'find\.package', 'findInterval', - 'findPackageEnv', 'findRestart', 'floor', 'flush', - 'flush\.connection', 'force', 'formals', 'format', - 'format\.AsIs', 'format\.Date', 'format\.POSIXct', 'format\.POSIXlt', - 'format\.data\.frame', 'format\.default', 'format\.difftime', - 'format\.factor', 'format\.hexmode', 'format\.info', - 'format\.libraryIQR', 'format\.numeric_version', 'format\.octmode', - 'format\.packageInfo', 'format\.pval', 'format\.summaryDefault', - 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc\.time', - 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections', - 'getCallingDLL', 'getCallingDLLe', 'getConnection', - 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines\.DLLInfo', - 'getDLLRegisteredRoutines\.character', 'getElement', - 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace', - 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo', - 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion', - 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines', - 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf', - 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl', - 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist', - 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv', - 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive', - 'intersect', 'inverse\.rle', 'invisible', 'invokeRestart', - 'invokeRestartInteractively', 'is\.R', 'is\.array', 'is\.atomic', - 'is\.call', 'is\.character', 'is\.complex', 'is\.data\.frame', - 'is\.double', 'is\.element', 'is\.environment', 'is\.expression', - 'is\.factor', 'is\.finite', 'is\.function', 'is\.infinite', - 'is\.integer', 'is\.language', 'is\.list', 'is\.loaded', 'is\.logical', - 'is\.matrix', 'is\.na', 'is\.na\.POSIXlt', 'is\.na\.data\.frame', - 'is\.na\.numeric_version', 'is\.name', 'is\.nan', 'is\.null', - 'is\.numeric', 'is\.numeric\.Date', 'is\.numeric\.POSIXt', - 'is\.numeric\.difftime', 'is\.numeric_version', 'is\.object', - 'is\.ordered', 'is\.package_version', 'is\.pairlist', 'is\.primitive', - 'is\.qr', 'is\.raw', 'is\.recursive', 'is\.single', 'is\.symbol', - 'is\.table', 'is\.unsorted', 'is\.vector', 'isBaseNamespace', - 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4', - 'isSeekable', 'isSymmetric', 'isSymmetric\.matrix', 'isTRUE', - 'isatty', 'isdebugged', 'jitter', 'julian', 'julian\.Date', - 'julian\.POSIXt', 'kappa', 'kappa\.default', 'kappa\.lm', 'kappa\.qr', - 'kronecker', 'l10n_info', 'labels', 'labels\.default', 'lapply', - 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose', - 'length', 'length\.POSIXlt', 'letters', 'levels', 'levels\.default', - 'lfactorial', 'lgamma', 'library\.dynam', 'library\.dynam\.unload', - 'licence', 'license', 'list\.dirs', 'list\.files', 'list2env', 'load', - 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo', - 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p', - 'log2', 'logb', 'lower\.tri', 'ls', 'make\.names', 'make\.unique', - 'makeActiveBinding', 'mapply', 'margin\.table', 'mat\.or\.vec', - 'match', 'match\.arg', 'match\.call', 'match\.fun', 'max', 'max\.col', - 'mean', 'mean\.Date', 'mean\.POSIXct', 'mean\.POSIXlt', 'mean\.default', - 'mean\.difftime', 'mem\.limits', 'memCompress', 'memDecompress', - 'memory\.profile', 'merge', 'merge\.data\.frame', 'merge\.default', - 'message', 'mget', 'min', 'missing', 'mode', 'month\.abb', - 'month\.name', 'months', 'months\.Date', 'months\.POSIXt', - 'months\.abb', 'months\.nameletters', 'names', 'names\.POSIXlt', - 'namespaceExport', 'namespaceImport', 'namespaceImportClasses', - 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar', - 'ncol', 'new\.env', 'ngettext', 'nlevels', 'noquote', 'norm', - 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects', - 'oldClass', 'on\.exit', 'open', 'open\.connection', 'open\.srcfile', - 'open\.srcfilealias', 'open\.srcfilecopy', 'options', 'order', - 'ordered', 'outer', 'packBits', 'packageEvent', - 'packageHasNamespace', 'packageStartupMessage', 'package_version', - 'pairlist', 'parent\.env', 'parent\.frame', 'parse', - 'parseNamespaceFile', 'paste', 'paste0', 'path\.expand', - 'path\.package', 'pipe', 'pmatch', 'pmax', 'pmax\.int', 'pmin', - 'pmin\.int', 'polyroot', 'pos\.to\.env', 'pretty', 'pretty\.default', - 'prettyNum', 'print', 'print\.AsIs', 'print\.DLLInfo', - 'print\.DLLInfoList', 'print\.DLLRegisteredRoutines', 'print\.Date', - 'print\.NativeRoutineList', 'print\.POSIXct', 'print\.POSIXlt', - 'print\.by', 'print\.condition', 'print\.connection', - 'print\.data\.frame', 'print\.default', 'print\.difftime', - 'print\.factor', 'print\.function', 'print\.hexmode', - 'print\.libraryIQR', 'print\.listof', 'print\.noquote', - 'print\.numeric_version', 'print\.octmode', 'print\.packageInfo', - 'print\.proc_time', 'print\.restart', 'print\.rle', - 'print\.simple\.list', 'print\.srcfile', 'print\.srcref', - 'print\.summary\.table', 'print\.summaryDefault', 'print\.table', - 'print\.warnings', 'prmatrix', 'proc\.time', 'prod', 'prop\.table', - 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q', - 'qr', 'qr\.Q', 'qr\.R', 'qr\.X', 'qr\.coef', 'qr\.default', 'qr\.fitted', - 'qr\.qty', 'qr\.qy', 'qr\.resid', 'qr\.solve', 'quarters', - 'quarters\.Date', 'quarters\.POSIXt', 'quit', 'quote', 'range', - 'range\.default', 'rank', 'rapply', 'raw', 'rawConnection', - 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind', - 'rbind\.data\.frame', 'rcond', 'read\.dcf', 'readBin', 'readChar', - 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg\.finalizer', - 'regexec', 'regexpr', 'registerS3method', 'registerS3methods', - 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep\.Date', - 'rep\.POSIXct', 'rep\.POSIXlt', 'rep\.factor', 'rep\.int', - 'rep\.numeric_version', 'rep_len', 'replace', 'replicate', - 'requireNamespace', 'restartDescription', 'restartFormals', - 'retracemem', 'rev', 'rev\.default', 'rle', 'rm', 'round', - 'round\.Date', 'round\.POSIXt', 'row', 'row\.names', - 'row\.names\.data\.frame', 'row\.names\.default', 'rowMeans', 'rowSums', - 'rownames', 'rowsum', 'rowsum\.data\.frame', 'rowsum\.default', - 'sQuote', 'sample', 'sample\.int', 'sapply', 'save', 'save\.image', - 'saveRDS', 'scale', 'scale\.default', 'scan', 'search', - 'searchpaths', 'seek', 'seek\.connection', 'seq', 'seq\.Date', - 'seq\.POSIXt', 'seq\.default', 'seq\.int', 'seq_along', 'seq_len', - 'sequence', 'serialize', 'set\.seed', 'setHook', 'setNamespaceInfo', - 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal', - 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition', - 'signif', 'simpleCondition', 'simpleError', 'simpleMessage', - 'simpleWarning', 'simplify2array', 'sin', 'single', - 'sinh', 'sink', 'sink\.number', 'slice\.index', 'socketConnection', - 'socketSelect', 'solve', 'solve\.default', 'solve\.qr', 'sort', - 'sort\.POSIXlt', 'sort\.default', 'sort\.int', 'sort\.list', 'split', - 'split\.Date', 'split\.POSIXct', 'split\.data\.frame', 'split\.default', - 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy', - 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop', - 'stopifnot', 'storage\.mode', 'strftime', 'strptime', 'strsplit', - 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset', - 'subset\.data\.frame', 'subset\.default', 'subset\.matrix', - 'substitute', 'substr', 'substring', 'sum', 'summary', - 'summary\.Date', 'summary\.POSIXct', 'summary\.POSIXlt', - 'summary\.connection', 'summary\.data\.frame', 'summary\.default', - 'summary\.factor', 'summary\.matrix', 'summary\.proc_time', - 'summary\.srcfile', 'summary\.srcref', 'summary\.table', - 'suppressMessages', 'suppressPackageStartupMessages', - 'suppressWarnings', 'svd', 'sweep', 'sys\.call', 'sys\.calls', - 'sys\.frame', 'sys\.frames', 'sys\.function', 'sys\.load\.image', - 'sys\.nframe', 'sys\.on\.exit', 'sys\.parent', 'sys\.parents', - 'sys\.save\.image', 'sys\.source', 'sys\.status', 'system', - 'system\.file', 'system\.time', 'system2', 't', 't\.data\.frame', - 't\.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply', - 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile', - 'testPlatformEquivalence', 'textConnection', 'textConnectionValue', - 'toString', 'toString\.default', 'tolower', 'topenv', 'toupper', - 'trace', 'traceback', 'tracemem', 'tracingState', 'transform', - 'transform\.data\.frame', 'transform\.default', 'trigamma', 'trunc', - 'trunc\.Date', 'trunc\.POSIXt', 'truncate', 'truncate\.connection', - 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union', - 'unique', 'unique\.POSIXlt', 'unique\.array', 'unique\.data\.frame', - 'unique\.default', 'unique\.matrix', 'unique\.numeric_version', - 'units', 'units\.difftime', 'unix\.time', 'unlink', 'unlist', - 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize', - 'unsplit', 'untrace', 'untracemem', 'unz', 'upper\.tri', 'url', - 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays', - 'weekdays\.Date', 'weekdays\.POSIXt', 'which', 'which\.max', - 'which\.min', 'with', 'with\.default', 'withCallingHandlers', - 'withRestarts', 'withVisible', 'within', 'within\.data\.frame', - 'within\.list', 'write', 'write\.dcf', 'writeBin', 'writeChar', - 'writeLines', 'xor', 'xor\.hexmode', 'xor\.octmode', - 'xpdrows\.data\.frame', 'xtfrm', 'xtfrm\.AsIs', 'xtfrm\.Date', - 'xtfrm\.POSIXct', 'xtfrm\.POSIXlt', 'xtfrm\.Surv', 'xtfrm\.default', - 'xtfrm\.difftime', 'xtfrm\.factor', 'xtfrm\.numeric_version', 'xzfile', - 'zapsmall' - ] + builtins_base = ( + 'Arg', 'Conj', 'Cstack_info', 'Encoding', 'FALSE', + 'Filter', 'Find', 'I', 'ISOdate', 'ISOdatetime', 'Im', 'Inf', + 'La.svd', 'Map', 'Math.Date', 'Math.POSIXt', 'Math.data.frame', + 'Math.difftime', 'Math.factor', 'Mod', 'NA_character_', + 'NA_complex_', 'NA_real_', 'NCOL', 'NROW', 'NULLNA_integer_', 'NaN', + 'Negate', 'NextMethod', 'Ops.Date', 'Ops.POSIXt', 'Ops.data.frame', + 'Ops.difftime', 'Ops.factor', 'Ops.numeric_version', 'Ops.ordered', + 'Position', 'R.Version', 'R.home', 'R.version', 'R.version.string', + 'RNGkind', 'RNGversion', 'R_system_version', 'Re', 'Recall', + 'Reduce', 'Summary.Date', 'Summary.POSIXct', 'Summary.POSIXlt', + 'Summary.data.frame', 'Summary.difftime', 'Summary.factor', + 'Summary.numeric_version', 'Summary.ordered', 'Sys.Date', + 'Sys.chmod', 'Sys.getenv', 'Sys.getlocale', 'Sys.getpid', + 'Sys.glob', 'Sys.info', 'Sys.localeconv', 'Sys.readlink', + 'Sys.setFileTime', 'Sys.setenv', 'Sys.setlocale', 'Sys.sleep', + 'Sys.time', 'Sys.timezone', 'Sys.umask', 'Sys.unsetenv', + 'Sys.which', 'TRUE', 'UseMethod', 'Vectorize', 'abbreviate', 'abs', + 'acos', 'acosh', 'addNA', 'addTaskCallback', 'agrep', 'alist', + 'all', 'all.equal', 'all.equal.POSIXct', 'all.equal.character', + 'all.equal.default', 'all.equal.factor', 'all.equal.formula', + 'all.equal.language', 'all.equal.list', 'all.equal.numeric', + 'all.equal.raw', 'all.names', 'all.vars', 'any', 'anyDuplicated', + 'anyDuplicated.array', 'anyDuplicated.data.frame', + 'anyDuplicated.default', 'anyDuplicated.matrix', 'aperm', + 'aperm.default', 'aperm.table', 'append', 'apply', 'args', + 'arrayInd', 'as.Date', 'as.Date.POSIXct', 'as.Date.POSIXlt', + 'as.Date.character', 'as.Date.date', 'as.Date.dates', + 'as.Date.default', 'as.Date.factor', 'as.Date.numeric', + 'as.POSIXct', 'as.POSIXct.Date', 'as.POSIXct.POSIXlt', + 'as.POSIXct.date', 'as.POSIXct.dates', 'as.POSIXct.default', + 'as.POSIXct.numeric', 'as.POSIXlt', 'as.POSIXlt.Date', + 'as.POSIXlt.POSIXct', 'as.POSIXlt.character', 'as.POSIXlt.date', + 'as.POSIXlt.dates', 'as.POSIXlt.default', 'as.POSIXlt.factor', + 'as.POSIXlt.numeric', 'as.array', 'as.array.default', 'as.call', + 'as.character', 'as.character.Date', 'as.character.POSIXt', + 'as.character.condition', 'as.character.default', + 'as.character.error', 'as.character.factor', 'as.character.hexmode', + 'as.character.numeric_version', 'as.character.octmode', + 'as.character.srcref', 'as.complex', 'as.data.frame', + 'as.data.frame.AsIs', 'as.data.frame.Date', 'as.data.frame.POSIXct', + 'as.data.frame.POSIXlt', 'as.data.frame.array', + 'as.data.frame.character', 'as.data.frame.complex', + 'as.data.frame.data.frame', 'as.data.frame.default', + 'as.data.frame.difftime', 'as.data.frame.factor', + 'as.data.frame.integer', 'as.data.frame.list', + 'as.data.frame.logical', 'as.data.frame.matrix', + 'as.data.frame.model.matrix', 'as.data.frame.numeric', + 'as.data.frame.numeric_version', 'as.data.frame.ordered', + 'as.data.frame.raw', 'as.data.frame.table', 'as.data.frame.ts', + 'as.data.frame.vector', 'as.difftime', 'as.double', + 'as.double.POSIXlt', 'as.double.difftime', 'as.environment', + 'as.expression', 'as.expression.default', 'as.factor', + 'as.function', 'as.function.default', 'as.hexmode', 'as.integer', + 'as.list', 'as.list.Date', 'as.list.POSIXct', 'as.list.data.frame', + 'as.list.default', 'as.list.environment', 'as.list.factor', + 'as.list.function', 'as.list.numeric_version', 'as.logical', + 'as.logical.factor', 'as.matrix', 'as.matrix.POSIXlt', + 'as.matrix.data.frame', 'as.matrix.default', 'as.matrix.noquote', + 'as.name', 'as.null', 'as.null.default', 'as.numeric', + 'as.numeric_version', 'as.octmode', 'as.ordered', + 'as.package_version', 'as.pairlist', 'as.qr', 'as.raw', 'as.single', + 'as.single.default', 'as.symbol', 'as.table', 'as.table.default', + 'as.vector', 'as.vector.factor', 'asNamespace', 'asS3', 'asS4', + 'asin', 'asinh', 'assign', 'atan', 'atan2', 'atanh', + 'attachNamespace', 'attr', 'attr.all.equal', 'attributes', + 'autoload', 'autoloader', 'backsolve', 'baseenv', 'basename', + 'besselI', 'besselJ', 'besselK', 'besselY', 'beta', + 'bindingIsActive', 'bindingIsLocked', 'bindtextdomain', 'bitwAnd', + 'bitwNot', 'bitwOr', 'bitwShiftL', 'bitwShiftR', 'bitwXor', 'body', + 'bquote', 'browser', 'browserCondition', 'browserSetDebug', + 'browserText', 'builtins', 'by', 'by.data.frame', 'by.default', + 'bzfile', 'c.Date', 'c.POSIXct', 'c.POSIXlt', 'c.noquote', + 'c.numeric_version', 'call', 'callCC', 'capabilities', 'casefold', + 'cat', 'category', 'cbind', 'cbind.data.frame', 'ceiling', + 'char.expand', 'charToRaw', 'charmatch', 'chartr', 'check_tzones', + 'chol', 'chol.default', 'chol2inv', 'choose', 'class', + 'clearPushBack', 'close', 'close.connection', 'close.srcfile', + 'close.srcfilealias', 'closeAllConnections', 'col', 'colMeans', + 'colSums', 'colnames', 'commandArgs', 'comment', 'computeRestarts', + 'conditionCall', 'conditionCall.condition', 'conditionMessage', + 'conditionMessage.condition', 'conflicts', 'contributors', 'cos', + 'cosh', 'crossprod', 'cummax', 'cummin', 'cumprod', 'cumsum', 'cut', + 'cut.Date', 'cut.POSIXt', 'cut.default', 'dQuote', 'data.class', + 'data.matrix', 'date', 'debug', 'debugonce', + 'default.stringsAsFactors', 'delayedAssign', 'deparse', 'det', + 'determinant', 'determinant.matrix', 'dget', 'diag', 'diff', + 'diff.Date', 'diff.POSIXt', 'diff.default', 'difftime', 'digamma', + 'dim', 'dim.data.frame', 'dimnames', 'dimnames.data.frame', 'dir', + 'dir.create', 'dirname', 'do.call', 'dput', 'drop', 'droplevels', + 'droplevels.data.frame', 'droplevels.factor', 'dump', 'duplicated', + 'duplicated.POSIXlt', 'duplicated.array', 'duplicated.data.frame', + 'duplicated.default', 'duplicated.matrix', + 'duplicated.numeric_version', 'dyn.load', 'dyn.unload', 'eapply', + 'eigen', 'else', 'emptyenv', 'enc2native', 'enc2utf8', + 'encodeString', 'enquote', 'env.profile', 'environment', + 'environmentIsLocked', 'environmentName', 'eval', 'eval.parent', + 'evalq', 'exists', 'exp', 'expand.grid', 'expm1', 'expression', + 'factor', 'factorial', 'fifo', 'file', 'file.access', 'file.append', + 'file.choose', 'file.copy', 'file.create', 'file.exists', + 'file.info', 'file.link', 'file.path', 'file.remove', 'file.rename', + 'file.show', 'file.symlink', 'find.package', 'findInterval', + 'findPackageEnv', 'findRestart', 'floor', 'flush', + 'flush.connection', 'force', 'formals', 'format', + 'format.AsIs', 'format.Date', 'format.POSIXct', 'format.POSIXlt', + 'format.data.frame', 'format.default', 'format.difftime', + 'format.factor', 'format.hexmode', 'format.info', + 'format.libraryIQR', 'format.numeric_version', 'format.octmode', + 'format.packageInfo', 'format.pval', 'format.summaryDefault', + 'formatC', 'formatDL', 'forwardsolve', 'gamma', 'gc', 'gc.time', + 'gcinfo', 'gctorture', 'gctorture2', 'get', 'getAllConnections', + 'getCallingDLL', 'getCallingDLLe', 'getConnection', + 'getDLLRegisteredRoutines', 'getDLLRegisteredRoutines.DLLInfo', + 'getDLLRegisteredRoutines.character', 'getElement', + 'getExportedValue', 'getHook', 'getLoadedDLLs', 'getNamespace', + 'getNamespaceExports', 'getNamespaceImports', 'getNamespaceInfo', + 'getNamespaceName', 'getNamespaceUsers', 'getNamespaceVersion', + 'getNativeSymbolInfo', 'getOption', 'getRversion', 'getSrcLines', + 'getTaskCallbackNames', 'geterrmessage', 'gettext', 'gettextf', + 'getwd', 'gl', 'globalenv', 'gregexpr', 'grep', 'grepRaw', 'grepl', + 'gsub', 'gzcon', 'gzfile', 'head', 'iconv', 'iconvlist', + 'icuSetCollate', 'identical', 'identity', 'ifelse', 'importIntoEnv', + 'in', 'inherits', 'intToBits', 'intToUtf8', 'interaction', 'interactive', + 'intersect', 'inverse.rle', 'invisible', 'invokeRestart', + 'invokeRestartInteractively', 'is.R', 'is.array', 'is.atomic', + 'is.call', 'is.character', 'is.complex', 'is.data.frame', + 'is.double', 'is.element', 'is.environment', 'is.expression', + 'is.factor', 'is.finite', 'is.function', 'is.infinite', + 'is.integer', 'is.language', 'is.list', 'is.loaded', 'is.logical', + 'is.matrix', 'is.na', 'is.na.POSIXlt', 'is.na.data.frame', + 'is.na.numeric_version', 'is.name', 'is.nan', 'is.null', + 'is.numeric', 'is.numeric.Date', 'is.numeric.POSIXt', + 'is.numeric.difftime', 'is.numeric_version', 'is.object', + 'is.ordered', 'is.package_version', 'is.pairlist', 'is.primitive', + 'is.qr', 'is.raw', 'is.recursive', 'is.single', 'is.symbol', + 'is.table', 'is.unsorted', 'is.vector', 'isBaseNamespace', + 'isIncomplete', 'isNamespace', 'isOpen', 'isRestart', 'isS4', + 'isSeekable', 'isSymmetric', 'isSymmetric.matrix', 'isTRUE', + 'isatty', 'isdebugged', 'jitter', 'julian', 'julian.Date', + 'julian.POSIXt', 'kappa', 'kappa.default', 'kappa.lm', 'kappa.qr', + 'kronecker', 'l10n_info', 'labels', 'labels.default', 'lapply', + 'lazyLoad', 'lazyLoadDBexec', 'lazyLoadDBfetch', 'lbeta', 'lchoose', + 'length', 'length.POSIXlt', 'letters', 'levels', 'levels.default', + 'lfactorial', 'lgamma', 'library.dynam', 'library.dynam.unload', + 'licence', 'license', 'list.dirs', 'list.files', 'list2env', 'load', + 'loadNamespace', 'loadedNamespaces', 'loadingNamespaceInfo', + 'local', 'lockBinding', 'lockEnvironment', 'log', 'log10', 'log1p', + 'log2', 'logb', 'lower.tri', 'ls', 'make.names', 'make.unique', + 'makeActiveBinding', 'mapply', 'margin.table', 'mat.or.vec', + 'match', 'match.arg', 'match.call', 'match.fun', 'max', 'max.col', + 'mean', 'mean.Date', 'mean.POSIXct', 'mean.POSIXlt', 'mean.default', + 'mean.difftime', 'mem.limits', 'memCompress', 'memDecompress', + 'memory.profile', 'merge', 'merge.data.frame', 'merge.default', + 'message', 'mget', 'min', 'missing', 'mode', 'month.abb', + 'month.name', 'months', 'months.Date', 'months.POSIXt', + 'months.abb', 'months.nameletters', 'names', 'names.POSIXlt', + 'namespaceExport', 'namespaceImport', 'namespaceImportClasses', + 'namespaceImportFrom', 'namespaceImportMethods', 'nargs', 'nchar', + 'ncol', 'new.env', 'ngettext', 'nlevels', 'noquote', 'norm', + 'normalizePath', 'nrow', 'numeric_version', 'nzchar', 'objects', + 'oldClass', 'on.exit', 'open', 'open.connection', 'open.srcfile', + 'open.srcfilealias', 'open.srcfilecopy', 'options', 'order', + 'ordered', 'outer', 'packBits', 'packageEvent', + 'packageHasNamespace', 'packageStartupMessage', 'package_version', + 'pairlist', 'parent.env', 'parent.frame', 'parse', + 'parseNamespaceFile', 'paste', 'paste0', 'path.expand', + 'path.package', 'pipe', 'pmatch', 'pmax', 'pmax.int', 'pmin', + 'pmin.int', 'polyroot', 'pos.to.env', 'pretty', 'pretty.default', + 'prettyNum', 'print', 'print.AsIs', 'print.DLLInfo', + 'print.DLLInfoList', 'print.DLLRegisteredRoutines', 'print.Date', + 'print.NativeRoutineList', 'print.POSIXct', 'print.POSIXlt', + 'print.by', 'print.condition', 'print.connection', + 'print.data.frame', 'print.default', 'print.difftime', + 'print.factor', 'print.function', 'print.hexmode', + 'print.libraryIQR', 'print.listof', 'print.noquote', + 'print.numeric_version', 'print.octmode', 'print.packageInfo', + 'print.proc_time', 'print.restart', 'print.rle', + 'print.simple.list', 'print.srcfile', 'print.srcref', + 'print.summary.table', 'print.summaryDefault', 'print.table', + 'print.warnings', 'prmatrix', 'proc.time', 'prod', 'prop.table', + 'provideDimnames', 'psigamma', 'pushBack', 'pushBackLength', 'q', + 'qr', 'qr.Q', 'qr.R', 'qr.X', 'qr.coef', 'qr.default', 'qr.fitted', + 'qr.qty', 'qr.qy', 'qr.resid', 'qr.solve', 'quarters', + 'quarters.Date', 'quarters.POSIXt', 'quit', 'quote', 'range', + 'range.default', 'rank', 'rapply', 'raw', 'rawConnection', + 'rawConnectionValue', 'rawShift', 'rawToBits', 'rawToChar', 'rbind', + 'rbind.data.frame', 'rcond', 'read.dcf', 'readBin', 'readChar', + 'readLines', 'readRDS', 'readRenviron', 'readline', 'reg.finalizer', + 'regexec', 'regexpr', 'registerS3method', 'registerS3methods', + 'regmatches', 'remove', 'removeTaskCallback', 'rep', 'rep.Date', + 'rep.POSIXct', 'rep.POSIXlt', 'rep.factor', 'rep.int', + 'rep.numeric_version', 'rep_len', 'replace', 'replicate', + 'requireNamespace', 'restartDescription', 'restartFormals', + 'retracemem', 'rev', 'rev.default', 'rle', 'rm', 'round', + 'round.Date', 'round.POSIXt', 'row', 'row.names', + 'row.names.data.frame', 'row.names.default', 'rowMeans', 'rowSums', + 'rownames', 'rowsum', 'rowsum.data.frame', 'rowsum.default', + 'sQuote', 'sample', 'sample.int', 'sapply', 'save', 'save.image', + 'saveRDS', 'scale', 'scale.default', 'scan', 'search', + 'searchpaths', 'seek', 'seek.connection', 'seq', 'seq.Date', + 'seq.POSIXt', 'seq.default', 'seq.int', 'seq_along', 'seq_len', + 'sequence', 'serialize', 'set.seed', 'setHook', 'setNamespaceInfo', + 'setSessionTimeLimit', 'setTimeLimit', 'setdiff', 'setequal', + 'setwd', 'shQuote', 'showConnections', 'sign', 'signalCondition', + 'signif', 'simpleCondition', 'simpleError', 'simpleMessage', + 'simpleWarning', 'simplify2array', 'sin', 'single', + 'sinh', 'sink', 'sink.number', 'slice.index', 'socketConnection', + 'socketSelect', 'solve', 'solve.default', 'solve.qr', 'sort', + 'sort.POSIXlt', 'sort.default', 'sort.int', 'sort.list', 'split', + 'split.Date', 'split.POSIXct', 'split.data.frame', 'split.default', + 'sprintf', 'sqrt', 'srcfile', 'srcfilealias', 'srcfilecopy', + 'srcref', 'standardGeneric', 'stderr', 'stdin', 'stdout', 'stop', + 'stopifnot', 'storage.mode', 'strftime', 'strptime', 'strsplit', + 'strtoi', 'strtrim', 'structure', 'strwrap', 'sub', 'subset', + 'subset.data.frame', 'subset.default', 'subset.matrix', + 'substitute', 'substr', 'substring', 'sum', 'summary', + 'summary.Date', 'summary.POSIXct', 'summary.POSIXlt', + 'summary.connection', 'summary.data.frame', 'summary.default', + 'summary.factor', 'summary.matrix', 'summary.proc_time', + 'summary.srcfile', 'summary.srcref', 'summary.table', + 'suppressMessages', 'suppressPackageStartupMessages', + 'suppressWarnings', 'svd', 'sweep', 'sys.call', 'sys.calls', + 'sys.frame', 'sys.frames', 'sys.function', 'sys.load.image', + 'sys.nframe', 'sys.on.exit', 'sys.parent', 'sys.parents', + 'sys.save.image', 'sys.source', 'sys.status', 'system', + 'system.file', 'system.time', 'system2', 't', 't.data.frame', + 't.default', 'table', 'tabulate', 'tail', 'tan', 'tanh', 'tapply', + 'taskCallbackManager', 'tcrossprod', 'tempdir', 'tempfile', + 'testPlatformEquivalence', 'textConnection', 'textConnectionValue', + 'toString', 'toString.default', 'tolower', 'topenv', 'toupper', + 'trace', 'traceback', 'tracemem', 'tracingState', 'transform', + 'transform.data.frame', 'transform.default', 'trigamma', 'trunc', + 'trunc.Date', 'trunc.POSIXt', 'truncate', 'truncate.connection', + 'try', 'tryCatch', 'typeof', 'unclass', 'undebug', 'union', + 'unique', 'unique.POSIXlt', 'unique.array', 'unique.data.frame', + 'unique.default', 'unique.matrix', 'unique.numeric_version', + 'units', 'units.difftime', 'unix.time', 'unlink', 'unlist', + 'unloadNamespace', 'unlockBinding', 'unname', 'unserialize', + 'unsplit', 'untrace', 'untracemem', 'unz', 'upper.tri', 'url', + 'utf8ToInt', 'vapply', 'version', 'warning', 'warnings', 'weekdays', + 'weekdays.Date', 'weekdays.POSIXt', 'which', 'which.max', + 'which.min', 'with', 'with.default', 'withCallingHandlers', + 'withRestarts', 'withVisible', 'within', 'within.data.frame', + 'within.list', 'write', 'write.dcf', 'writeBin', 'writeChar', + 'writeLines', 'xor', 'xor.hexmode', 'xor.octmode', + 'xpdrows.data.frame', 'xtfrm', 'xtfrm.AsIs', 'xtfrm.Date', + 'xtfrm.POSIXct', 'xtfrm.POSIXlt', 'xtfrm.Surv', 'xtfrm.default', + 'xtfrm.difftime', 'xtfrm.factor', 'xtfrm.numeric_version', 'xzfile', + 'zapsmall' + ) tokens = { 'comments': [ @@ -1315,8 +1327,7 @@ class SLexer(RegexLexer): (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation), ], 'keywords': [ - (r'(' + r'|'.join(builtins_base) + r')' - r'(?![\w\. =])', + (words(builtins_base, suffix=r'(?![\w\. =])'), Keyword.Pseudo), (r'(if|else|for|while|repeat|in|next|break|return|switch|function)' r'(?![\w\.])', @@ -1365,14 +1376,14 @@ class SLexer(RegexLexer): include('statements'), # blocks: (r'\{|\}', Punctuation), - #(r'\{', Punctuation, 'block'), + # (r'\{', Punctuation, 'block'), (r'.', Text), ], - #'block': [ + # 'block': [ # include('statements'), # ('\{', Punctuation, '#push'), # ('\}', Punctuation, '#pop') - #], + # ], 'string_squote': [ (r'([^\'\\]|\\.)*\'', String, '#pop'), ], @@ -1398,7 +1409,7 @@ class BugsLexer(RegexLexer): aliases = ['bugs', 'winbugs', 'openbugs'] filenames = ['*.bug'] - _FUNCTIONS = [ + _FUNCTIONS = ( # Scalar functions 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh', 'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance', @@ -1411,8 +1422,8 @@ class BugsLexer(RegexLexer): 'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals', 'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM', 'sd', 'sort', 'sum', - ## Special - 'D', 'I', 'F', 'T', 'C'] + # Special + 'D', 'I', 'F', 'T', 'C') """ OpenBUGS built-in functions From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII @@ -1426,27 +1437,26 @@ class BugsLexer(RegexLexer): """ - _DISTRIBUTIONS = ['dbern', 'dbin', 'dcat', 'dnegbin', 'dpois', + _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois', 'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp', 'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar', 'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar', 'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm', - 'dmt', 'dwish'] + 'dmt', 'dwish') """ OpenBUGS built-in distributions Functions from http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI """ - tokens = { - 'whitespace' : [ + 'whitespace': [ (r"\s+", Text), - ], - 'comments' : [ + ], + 'comments': [ # Comments (r'#.*$', Comment.Single), - ], + ], 'root': [ # Comments include('comments'), @@ -1473,8 +1483,8 @@ class BugsLexer(RegexLexer): (r'\+|-|\*|/', Operator), # Block (r'[{}]', Punctuation), - ] - } + ] + } def analyse_text(text): if re.search(r"^\s*model\s*{", text, re.M): @@ -1482,6 +1492,7 @@ class BugsLexer(RegexLexer): else: return 0.0 + class JagsLexer(RegexLexer): """ Pygments Lexer for JAGS. @@ -1493,8 +1504,8 @@ class JagsLexer(RegexLexer): aliases = ['jags'] filenames = ['*.jag', '*.bug'] - ## JAGS - _FUNCTIONS = [ + # JAGS + _FUNCTIONS = ( 'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh', 'cos', 'cosh', 'cloglog', 'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact', @@ -1503,32 +1514,32 @@ class JagsLexer(RegexLexer): 'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse', 'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan', # Truncation/Censoring (should I include) - 'T', 'I'] + 'T', 'I') # Distributions with density, probability and quartile functions - _DISTRIBUTIONS = ['[dpq]%s' % x for x in - ['bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp', - 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm', - 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib']] + _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in + ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp', + 'df', 'gamma', 'gen.gamma', 'logis', 'lnorm', + 'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib')) # Other distributions without density and probability - _OTHER_DISTRIBUTIONS = [ + _OTHER_DISTRIBUTIONS = ( 'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper', 'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq', - 'dnbinom', 'dweibull', 'ddirich'] + 'dnbinom', 'dweibull', 'ddirich') tokens = { - 'whitespace' : [ + 'whitespace': [ (r"\s+", Text), - ], - 'names' : [ + ], + 'names': [ # Regular variable names (r'[a-zA-Z][\w.]*\b', Name), - ], - 'comments' : [ + ], + 'comments': [ # do not use stateful comments (r'(?s)/\*.*?\*/', Comment.Multiline), # Comments (r'#.*$', Comment.Single), - ], + ], 'root': [ # Comments include('comments'), @@ -1542,8 +1553,8 @@ class JagsLexer(RegexLexer): # Builtins # Need to use lookahead because . is a valid char (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS - + _DISTRIBUTIONS - + _OTHER_DISTRIBUTIONS), + + _DISTRIBUTIONS + + _OTHER_DISTRIBUTIONS), Name.Builtin), # Names include('names'), @@ -1555,8 +1566,8 @@ class JagsLexer(RegexLexer): # # JAGS includes many more than OpenBUGS (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator), (r'[{}]', Punctuation), - ] - } + ] + } def analyse_text(text): if re.search(r'^\s*model\s*\{', text, re.M): @@ -1569,6 +1580,7 @@ class JagsLexer(RegexLexer): else: return 0 + class StanLexer(RegexLexer): """Pygments Lexer for Stan models. @@ -1583,14 +1595,14 @@ class StanLexer(RegexLexer): filenames = ['*.stan'] tokens = { - 'whitespace' : [ + 'whitespace': [ (r"\s+", Text), - ], - 'comments' : [ + ], + 'comments': [ (r'(?s)/\*.*?\*/', Comment.Multiline), # Comments (r'(//|#).*$', Comment.Single), - ], + ], 'root': [ # Stan is more restrictive on strings than this regex (r'"[^"]*"', String), @@ -1634,8 +1646,8 @@ class StanLexer(RegexLexer): (r"\+|-|\.?\*|\.?/|\\|'|\^|==?|!=?|<=?|>=?|\|\||&&", Operator), # Block delimiters (r'[{}]', Punctuation), - ] - } + ] + } def analyse_text(text): if re.search(r'^\s*parameters\s*\{', text, re.M): @@ -1657,226 +1669,228 @@ class IDLLexer(RegexLexer): flags = re.IGNORECASE | re.MULTILINE - _RESERVED = ['and', 'begin', 'break', 'case', 'common', 'compile_opt', - 'continue', 'do', 'else', 'end', 'endcase', 'elseelse', - 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch', - 'endwhile', 'eq', 'for', 'foreach', 'forward_function', - 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le', - 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro', - 'repeat', 'switch', 'then', 'until', 'while', 'xor'] + _RESERVED = ( + 'and', 'begin', 'break', 'case', 'common', 'compile_opt', + 'continue', 'do', 'else', 'end', 'endcase', 'elseelse', + 'endfor', 'endforeach', 'endif', 'endrep', 'endswitch', + 'endwhile', 'eq', 'for', 'foreach', 'forward_function', + 'function', 'ge', 'goto', 'gt', 'if', 'inherits', 'le', + 'lt', 'mod', 'ne', 'not', 'of', 'on_ioerror', 'or', 'pro', + 'repeat', 'switch', 'then', 'until', 'while', 'xor') """Reserved words from: http://www.exelisvis.com/docs/reswords.html""" - _BUILTIN_LIB = ['abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10', - 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query', - 'arg_present', 'array_equal', 'array_indices', 'arrow', - 'ascii_template', 'asin', 'assoc', 'atan', 'axis', - 'a_correlate', 'bandpass_filter', 'bandreject_filter', - 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk', - 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen', - 'binomial', 'bin_date', 'bit_ffs', 'bit_population', - 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint', - 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder', - 'bytscl', 'caldat', 'calendar', 'call_external', - 'call_function', 'call_method', 'call_procedure', 'canny', - 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev', - 'check_math', - 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen', - 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts', - 'cmyk_convert', 'colorbar', 'colorize_sample', - 'colormap_applicable', 'colormap_gradient', - 'colormap_rotation', 'colortable', 'color_convert', - 'color_exchange', 'color_quan', 'color_range_map', 'comfit', - 'command_line_args', 'complex', 'complexarr', 'complexround', - 'compute_mesh_normals', 'cond', 'congrid', 'conj', - 'constrained_min', 'contour', 'convert_coord', 'convol', - 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos', - 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct', - 'create_view', 'crossp', 'crvlength', 'cti_test', - 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord', - 'cw_animate', 'cw_animate_getp', 'cw_animate_load', - 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index', - 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel', - 'cw_form', 'cw_fslider', 'cw_light_editor', - 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient', - 'cw_palette_editor', 'cw_palette_editor_get', - 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider', - 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists', - 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key', - 'define_msgblk', 'define_msgblk_from_file', 'defroi', - 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv', - 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix', - 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile', - 'dialog_printersetup', 'dialog_printjob', - 'dialog_read_image', 'dialog_write_image', 'digital_filter', - 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure', - 'dlm_load', 'dlm_register', 'doc_library', 'double', - 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec', - 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn', - 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx', - 'erode', 'errorplot', 'errplot', 'estimator_filter', - 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint', - 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath', - 'file_basename', 'file_chmod', 'file_copy', 'file_delete', - 'file_dirname', 'file_expand_path', 'file_info', - 'file_lines', 'file_link', 'file_mkdir', 'file_move', - 'file_poll_input', 'file_readlink', 'file_same', - 'file_search', 'file_test', 'file_which', 'findgen', - 'finite', 'fix', 'flick', 'float', 'floor', 'flow3', - 'fltarr', 'flush', 'format_axis_values', 'free_lun', - 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root', - 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct', - 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint', - 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv', - 'getwindows', 'get_drive_list', 'get_dxf_objects', - 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size', - 'greg2jul', 'grib_\w*', 'grid3', 'griddata', - 'grid_input', 'grid_tps', 'gs_iter', - 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close', - 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse', - 'hanning', 'hash', 'hdf_\w*', 'heap_free', - 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save', - 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal', - 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int', - 'i18n_multibytetoutf8', 'i18n_multibytetowidechar', - 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte', - 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity', - 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64', - 'idl_validname', 'iellipse', 'igamma', 'igetcurrent', - 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image', - 'image_cont', 'image_statistics', 'imaginary', 'imap', - 'indgen', 'intarr', 'interpol', 'interpolate', - 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated', - 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon', - 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve', - 'irotate', 'ir_filter', 'isa', 'isave', 'iscale', - 'isetcurrent', 'isetproperty', 'ishft', 'isocontour', - 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector', - 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse', - 'json_serialize', 'jul2greg', 'julday', 'keyword_set', - 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date', - 'label_region', 'ladfit', 'laguerre', 'laplacian', - 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ', - 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes', - 'la_gm_linear_model', 'la_hqr', 'la_invert', - 'la_least_squares', 'la_least_square_equality', - 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol', - 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql', - 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt', - 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit', - 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr', - 'lngamma', 'lnp_test', 'loadct', 'locale_get', - 'logical_and', 'logical_or', 'logical_true', 'lon64arr', - 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove', - 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll', - 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points', - 'map_continents', 'map_grid', 'map_image', 'map_patch', - 'map_proj_forward', 'map_proj_image', 'map_proj_info', - 'map_proj_init', 'map_proj_inverse', 'map_set', - 'matrix_multiply', 'matrix_power', 'max', 'md_test', - 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory', - 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge', - 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth', - 'mesh_surfacearea', 'mesh_validate', 'mesh_volume', - 'message', 'min', 'min_curve_surf', 'mk_html_help', - 'modifyct', 'moment', 'morph_close', 'morph_distance', - 'morph_gradient', 'morph_hitormiss', 'morph_open', - 'morph_thin', 'morph_tophat', 'multi', 'm_correlate', - 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick', - 'noise_scatter', 'noise_slur', 'norm', 'n_elements', - 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy', - 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid', - 'online_help', 'on_error', 'open', 'oplot', 'oploterr', - 'parse_url', 'particle_trace', 'path_cache', 'path_sep', - 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox', - 'plot_field', 'pnt_line', 'point_lun', 'polarplot', - 'polar_contour', 'polar_surface', 'poly', 'polyfill', - 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp', - 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell', - 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes', - 'print', 'printd', 'product', 'profile', 'profiler', - 'profiles', 'project_vol', 'psafm', 'pseudo', - 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new', - 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull', - 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp', - 'query_csv', 'query_dicom', 'query_gif', 'query_image', - 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict', - 'query_png', 'query_ppm', 'query_srf', 'query_tiff', - 'query_wav', 'radon', 'randomn', 'randomu', 'ranks', - 'rdpix', 'read', 'reads', 'readu', 'read_ascii', - 'read_binary', 'read_bmp', 'read_csv', 'read_dicom', - 'read_gif', 'read_image', 'read_interfile', 'read_jpeg', - 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png', - 'read_ppm', 'read_spr', 'read_srf', 'read_sylk', - 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap', - 'read_xwd', 'real_part', 'rebin', 'recall_commands', - 'recon3', 'reduce_colors', 'reform', 'region_grow', - 'register_cursor', 'regress', 'replicate', - 'replicate_inplace', 'resolve_all', 'resolve_routine', - 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts', - 'rot', 'rotate', 'round', 'routine_filepath', - 'routine_info', 'rs_test', 'r_correlate', 'r_test', - 'save', 'savgol', 'scale3', 'scale3d', 'scope_level', - 'scope_traceback', 'scope_varfetch', 'scope_varname', - 'search2d', 'search3d', 'sem_create', 'sem_delete', - 'sem_lock', 'sem_release', 'setenv', 'set_plot', - 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr', - 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap', - 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin', - 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun', - 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket', - 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat', - 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab', - 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize', - 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress', - 'streamline', 'stregex', 'stretch', 'string', 'strjoin', - 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid', - 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign', - 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc', - 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace', - 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan', - 'tanh', 'tek_color', 'temporary', 'tetra_clip', - 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed', - 'timegen', 'time_test2', 'tm_test', 'total', 'trace', - 'transpose', 'triangulate', 'trigrid', 'triql', 'trired', - 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff', - 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd', - 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint', - 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr', - 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym', - 'value_locate', 'variance', 'vector', 'vector_field', 'vel', - 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj', - 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw', - 'where', 'widget_base', 'widget_button', 'widget_combobox', - 'widget_control', 'widget_displaycontextmen', 'widget_draw', - 'widget_droplist', 'widget_event', 'widget_info', - 'widget_label', 'widget_list', 'widget_propertysheet', - 'widget_slider', 'widget_tab', 'widget_table', - 'widget_text', 'widget_tree', 'widget_tree_move', - 'widget_window', 'wiener_filter', 'window', 'writeu', - 'write_bmp', 'write_csv', 'write_gif', 'write_image', - 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict', - 'write_png', 'write_ppm', 'write_spr', 'write_srf', - 'write_sylk', 'write_tiff', 'write_wav', 'write_wave', - 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt', - 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet', - 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar', - 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet', - 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps', - 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise', - 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont', - 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl', - 'xmtool', 'xobjview', 'xobjview_rotate', - 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d', - 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit', - 'xvolume', 'xvolume_rotate', 'xvolume_write_image', - 'xyouts', 'zoom', 'zoom_24'] + _BUILTIN_LIB = ( + 'abs', 'acos', 'adapt_hist_equal', 'alog', 'alog10', + 'amoeba', 'annotate', 'app_user_dir', 'app_user_dir_query', + 'arg_present', 'array_equal', 'array_indices', 'arrow', + 'ascii_template', 'asin', 'assoc', 'atan', 'axis', + 'a_correlate', 'bandpass_filter', 'bandreject_filter', + 'barplot', 'bar_plot', 'beseli', 'beselj', 'beselk', + 'besely', 'beta', 'bilinear', 'binary_template', 'bindgen', + 'binomial', 'bin_date', 'bit_ffs', 'bit_population', + 'blas_axpy', 'blk_con', 'box_cursor', 'breakpoint', + 'broyden', 'butterworth', 'bytarr', 'byte', 'byteorder', + 'bytscl', 'caldat', 'calendar', 'call_external', + 'call_function', 'call_method', 'call_procedure', 'canny', + 'catch', 'cd', 'cdf_\w*', 'ceil', 'chebyshev', + 'check_math', + 'chisqr_cvf', 'chisqr_pdf', 'choldc', 'cholsol', 'cindgen', + 'cir_3pnt', 'close', 'cluster', 'cluster_tree', 'clust_wts', + 'cmyk_convert', 'colorbar', 'colorize_sample', + 'colormap_applicable', 'colormap_gradient', + 'colormap_rotation', 'colortable', 'color_convert', + 'color_exchange', 'color_quan', 'color_range_map', 'comfit', + 'command_line_args', 'complex', 'complexarr', 'complexround', + 'compute_mesh_normals', 'cond', 'congrid', 'conj', + 'constrained_min', 'contour', 'convert_coord', 'convol', + 'convol_fft', 'coord2to3', 'copy_lun', 'correlate', 'cos', + 'cosh', 'cpu', 'cramer', 'create_cursor', 'create_struct', + 'create_view', 'crossp', 'crvlength', 'cti_test', + 'ct_luminance', 'cursor', 'curvefit', 'cvttobm', 'cv_coord', + 'cw_animate', 'cw_animate_getp', 'cw_animate_load', + 'cw_animate_run', 'cw_arcball', 'cw_bgroup', 'cw_clr_index', + 'cw_colorsel', 'cw_defroi', 'cw_field', 'cw_filesel', + 'cw_form', 'cw_fslider', 'cw_light_editor', + 'cw_light_editor_get', 'cw_light_editor_set', 'cw_orient', + 'cw_palette_editor', 'cw_palette_editor_get', + 'cw_palette_editor_set', 'cw_pdmenu', 'cw_rgbslider', + 'cw_tmpl', 'cw_zoom', 'c_correlate', 'dblarr', 'db_exists', + 'dcindgen', 'dcomplex', 'dcomplexarr', 'define_key', + 'define_msgblk', 'define_msgblk_from_file', 'defroi', + 'defsysv', 'delvar', 'dendrogram', 'dendro_plot', 'deriv', + 'derivsig', 'determ', 'device', 'dfpmin', 'diag_matrix', + 'dialog_dbconnect', 'dialog_message', 'dialog_pickfile', + 'dialog_printersetup', 'dialog_printjob', + 'dialog_read_image', 'dialog_write_image', 'digital_filter', + 'dilate', 'dindgen', 'dissolve', 'dist', 'distance_measure', + 'dlm_load', 'dlm_register', 'doc_library', 'double', + 'draw_roi', 'edge_dog', 'efont', 'eigenql', 'eigenvec', + 'ellipse', 'elmhes', 'emboss', 'empty', 'enable_sysrtn', + 'eof', 'eos_\w*', 'erase', 'erf', 'erfc', 'erfcx', + 'erode', 'errorplot', 'errplot', 'estimator_filter', + 'execute', 'exit', 'exp', 'expand', 'expand_path', 'expint', + 'extrac', 'extract_slice', 'factorial', 'fft', 'filepath', + 'file_basename', 'file_chmod', 'file_copy', 'file_delete', + 'file_dirname', 'file_expand_path', 'file_info', + 'file_lines', 'file_link', 'file_mkdir', 'file_move', + 'file_poll_input', 'file_readlink', 'file_same', + 'file_search', 'file_test', 'file_which', 'findgen', + 'finite', 'fix', 'flick', 'float', 'floor', 'flow3', + 'fltarr', 'flush', 'format_axis_values', 'free_lun', + 'fstat', 'fulstr', 'funct', 'fv_test', 'fx_root', + 'fz_roots', 'f_cvf', 'f_pdf', 'gamma', 'gamma_ct', + 'gauss2dfit', 'gaussfit', 'gaussian_function', 'gaussint', + 'gauss_cvf', 'gauss_pdf', 'gauss_smooth', 'getenv', + 'getwindows', 'get_drive_list', 'get_dxf_objects', + 'get_kbrd', 'get_login_info', 'get_lun', 'get_screen_size', + 'greg2jul', 'grib_\w*', 'grid3', 'griddata', + 'grid_input', 'grid_tps', 'gs_iter', + 'h5[adfgirst]_\w*', 'h5_browser', 'h5_close', + 'h5_create', 'h5_get_libversion', 'h5_open', 'h5_parse', + 'hanning', 'hash', 'hdf_\w*', 'heap_free', + 'heap_gc', 'heap_nosave', 'heap_refcount', 'heap_save', + 'help', 'hilbert', 'histogram', 'hist_2d', 'hist_equal', + 'hls', 'hough', 'hqr', 'hsv', 'h_eq_ct', 'h_eq_int', + 'i18n_multibytetoutf8', 'i18n_multibytetowidechar', + 'i18n_utf8tomultibyte', 'i18n_widechartomultibyte', + 'ibeta', 'icontour', 'iconvertcoord', 'idelete', 'identity', + 'idlexbr_assistant', 'idlitsys_createtool', 'idl_base64', + 'idl_validname', 'iellipse', 'igamma', 'igetcurrent', + 'igetdata', 'igetid', 'igetproperty', 'iimage', 'image', + 'image_cont', 'image_statistics', 'imaginary', 'imap', + 'indgen', 'intarr', 'interpol', 'interpolate', + 'interval_volume', 'int_2d', 'int_3d', 'int_tabulated', + 'invert', 'ioctl', 'iopen', 'iplot', 'ipolygon', + 'ipolyline', 'iputdata', 'iregister', 'ireset', 'iresolve', + 'irotate', 'ir_filter', 'isa', 'isave', 'iscale', + 'isetcurrent', 'isetproperty', 'ishft', 'isocontour', + 'isosurface', 'isurface', 'itext', 'itranslate', 'ivector', + 'ivolume', 'izoom', 'i_beta', 'journal', 'json_parse', + 'json_serialize', 'jul2greg', 'julday', 'keyword_set', + 'krig2d', 'kurtosis', 'kw_test', 'l64indgen', 'label_date', + 'label_region', 'ladfit', 'laguerre', 'laplacian', + 'la_choldc', 'la_cholmprove', 'la_cholsol', 'la_determ', + 'la_eigenproblem', 'la_eigenql', 'la_eigenvec', 'la_elmhes', + 'la_gm_linear_model', 'la_hqr', 'la_invert', + 'la_least_squares', 'la_least_square_equality', + 'la_linear_equation', 'la_ludc', 'la_lumprove', 'la_lusol', + 'la_svd', 'la_tridc', 'la_trimprove', 'la_triql', + 'la_trired', 'la_trisol', 'least_squares_filter', 'leefilt', + 'legend', 'legendre', 'linbcg', 'lindgen', 'linfit', + 'linkimage', 'list', 'll_arc_distance', 'lmfit', 'lmgr', + 'lngamma', 'lnp_test', 'loadct', 'locale_get', + 'logical_and', 'logical_or', 'logical_true', 'lon64arr', + 'lonarr', 'long', 'long64', 'lsode', 'ludc', 'lumprove', + 'lusol', 'lu_complex', 'machar', 'make_array', 'make_dll', + 'make_rt', 'map', 'mapcontinents', 'mapgrid', 'map_2points', + 'map_continents', 'map_grid', 'map_image', 'map_patch', + 'map_proj_forward', 'map_proj_image', 'map_proj_info', + 'map_proj_init', 'map_proj_inverse', 'map_set', + 'matrix_multiply', 'matrix_power', 'max', 'md_test', + 'mean', 'meanabsdev', 'mean_filter', 'median', 'memory', + 'mesh_clip', 'mesh_decimate', 'mesh_issolid', 'mesh_merge', + 'mesh_numtriangles', 'mesh_obj', 'mesh_smooth', + 'mesh_surfacearea', 'mesh_validate', 'mesh_volume', + 'message', 'min', 'min_curve_surf', 'mk_html_help', + 'modifyct', 'moment', 'morph_close', 'morph_distance', + 'morph_gradient', 'morph_hitormiss', 'morph_open', + 'morph_thin', 'morph_tophat', 'multi', 'm_correlate', + 'ncdf_\w*', 'newton', 'noise_hurl', 'noise_pick', + 'noise_scatter', 'noise_slur', 'norm', 'n_elements', + 'n_params', 'n_tags', 'objarr', 'obj_class', 'obj_destroy', + 'obj_hasmethod', 'obj_isa', 'obj_new', 'obj_valid', + 'online_help', 'on_error', 'open', 'oplot', 'oploterr', + 'parse_url', 'particle_trace', 'path_cache', 'path_sep', + 'pcomp', 'plot', 'plot3d', 'ploterr', 'plots', 'plot_3dbox', + 'plot_field', 'pnt_line', 'point_lun', 'polarplot', + 'polar_contour', 'polar_surface', 'poly', 'polyfill', + 'polyfillv', 'polygon', 'polyline', 'polyshade', 'polywarp', + 'poly_2d', 'poly_area', 'poly_fit', 'popd', 'powell', + 'pref_commit', 'pref_get', 'pref_set', 'prewitt', 'primes', + 'print', 'printd', 'product', 'profile', 'profiler', + 'profiles', 'project_vol', 'psafm', 'pseudo', + 'ps_show_fonts', 'ptrarr', 'ptr_free', 'ptr_new', + 'ptr_valid', 'pushd', 'p_correlate', 'qgrid3', 'qhull', + 'qromb', 'qromo', 'qsimp', 'query_ascii', 'query_bmp', + 'query_csv', 'query_dicom', 'query_gif', 'query_image', + 'query_jpeg', 'query_jpeg2000', 'query_mrsid', 'query_pict', + 'query_png', 'query_ppm', 'query_srf', 'query_tiff', + 'query_wav', 'radon', 'randomn', 'randomu', 'ranks', + 'rdpix', 'read', 'reads', 'readu', 'read_ascii', + 'read_binary', 'read_bmp', 'read_csv', 'read_dicom', + 'read_gif', 'read_image', 'read_interfile', 'read_jpeg', + 'read_jpeg2000', 'read_mrsid', 'read_pict', 'read_png', + 'read_ppm', 'read_spr', 'read_srf', 'read_sylk', + 'read_tiff', 'read_wav', 'read_wave', 'read_x11_bitmap', + 'read_xwd', 'real_part', 'rebin', 'recall_commands', + 'recon3', 'reduce_colors', 'reform', 'region_grow', + 'register_cursor', 'regress', 'replicate', + 'replicate_inplace', 'resolve_all', 'resolve_routine', + 'restore', 'retall', 'return', 'reverse', 'rk4', 'roberts', + 'rot', 'rotate', 'round', 'routine_filepath', + 'routine_info', 'rs_test', 'r_correlate', 'r_test', + 'save', 'savgol', 'scale3', 'scale3d', 'scope_level', + 'scope_traceback', 'scope_varfetch', 'scope_varname', + 'search2d', 'search3d', 'sem_create', 'sem_delete', + 'sem_lock', 'sem_release', 'setenv', 'set_plot', + 'set_shading', 'sfit', 'shade_surf', 'shade_surf_irr', + 'shade_volume', 'shift', 'shift_diff', 'shmdebug', 'shmmap', + 'shmunmap', 'shmvar', 'show3', 'showfont', 'simplex', 'sin', + 'sindgen', 'sinh', 'size', 'skewness', 'skip_lun', + 'slicer3', 'slide_image', 'smooth', 'sobel', 'socket', + 'sort', 'spawn', 'spher_harm', 'sph_4pnt', 'sph_scat', + 'spline', 'spline_p', 'spl_init', 'spl_interp', 'sprsab', + 'sprsax', 'sprsin', 'sprstp', 'sqrt', 'standardize', + 'stddev', 'stop', 'strarr', 'strcmp', 'strcompress', + 'streamline', 'stregex', 'stretch', 'string', 'strjoin', + 'strlen', 'strlowcase', 'strmatch', 'strmessage', 'strmid', + 'strpos', 'strput', 'strsplit', 'strtrim', 'struct_assign', + 'struct_hide', 'strupcase', 'surface', 'surfr', 'svdc', + 'svdfit', 'svsol', 'swap_endian', 'swap_endian_inplace', + 'symbol', 'systime', 's_test', 't3d', 'tag_names', 'tan', + 'tanh', 'tek_color', 'temporary', 'tetra_clip', + 'tetra_surface', 'tetra_volume', 'text', 'thin', 'threed', + 'timegen', 'time_test2', 'tm_test', 'total', 'trace', + 'transpose', 'triangulate', 'trigrid', 'triql', 'trired', + 'trisol', 'tri_surf', 'truncate_lun', 'ts_coef', 'ts_diff', + 'ts_fcast', 'ts_smooth', 'tv', 'tvcrs', 'tvlct', 'tvrd', + 'tvscl', 'typename', 't_cvt', 't_pdf', 'uindgen', 'uint', + 'uintarr', 'ul64indgen', 'ulindgen', 'ulon64arr', 'ulonarr', + 'ulong', 'ulong64', 'uniq', 'unsharp_mask', 'usersym', + 'value_locate', 'variance', 'vector', 'vector_field', 'vel', + 'velovect', 'vert_t3d', 'voigt', 'voronoi', 'voxel_proj', + 'wait', 'warp_tri', 'watershed', 'wdelete', 'wf_draw', + 'where', 'widget_base', 'widget_button', 'widget_combobox', + 'widget_control', 'widget_displaycontextmen', 'widget_draw', + 'widget_droplist', 'widget_event', 'widget_info', + 'widget_label', 'widget_list', 'widget_propertysheet', + 'widget_slider', 'widget_tab', 'widget_table', + 'widget_text', 'widget_tree', 'widget_tree_move', + 'widget_window', 'wiener_filter', 'window', 'writeu', + 'write_bmp', 'write_csv', 'write_gif', 'write_image', + 'write_jpeg', 'write_jpeg2000', 'write_nrif', 'write_pict', + 'write_png', 'write_ppm', 'write_spr', 'write_srf', + 'write_sylk', 'write_tiff', 'write_wav', 'write_wave', + 'wset', 'wshow', 'wtn', 'wv_applet', 'wv_cwt', + 'wv_cw_wavelet', 'wv_denoise', 'wv_dwt', 'wv_fn_coiflet', + 'wv_fn_daubechies', 'wv_fn_gaussian', 'wv_fn_haar', + 'wv_fn_morlet', 'wv_fn_paul', 'wv_fn_symlet', + 'wv_import_data', 'wv_import_wavelet', 'wv_plot3d_wps', + 'wv_plot_multires', 'wv_pwt', 'wv_tool_denoise', + 'xbm_edit', 'xdisplayfile', 'xdxf', 'xfont', + 'xinteranimate', 'xloadct', 'xmanager', 'xmng_tmpl', + 'xmtool', 'xobjview', 'xobjview_rotate', + 'xobjview_write_image', 'xpalette', 'xpcolor', 'xplot3d', + 'xregistered', 'xroi', 'xsq_test', 'xsurface', 'xvaredit', + 'xvolume', 'xvolume_rotate', 'xvolume_write_image', + 'xyouts', 'zoom', 'zoom_24') """Functions from: http://www.exelisvis.com/docs/routines-1.html""" tokens = { 'root': [ (r'^\s*;.*?\n', Comment.Singleline), - (r'\b(' + '|'.join(_RESERVED) + r')\b', Keyword), - (r'\b(' + '|'.join(_BUILTIN_LIB) + r')\b', Name.Builtin), + (words(_RESERVED, prefix=r'\b', suffix=r'\b'), Keyword), + (words(_BUILTIN_LIB, prefix=r'\b', suffix=r'\b'), Name.Builtin), (r'\+=|-=|\^=|\*=|/=|#=|##=|<=|>=|=', Operator), (r'\+\+|--|->|\+|-|##|#|\*|/|<|>|&&|\^|~|\|\|\?|:', Operator), (r'\b(mod=|lt=|le=|eq=|ne=|ge=|gt=|not=|and=|or=|xor=)', Operator), @@ -1906,7 +1920,7 @@ class RdLexer(RegexLexer): # To account for verbatim / LaTeX-like / and R-like areas # would require parsing. tokens = { - 'root' : [ + 'root': [ # catch escaped brackets and percent sign (r'\\[\\{}%]', String.Escape), # comments @@ -1922,8 +1936,8 @@ class RdLexer(RegexLexer): # everything else (r'[^\\%\n{}]+', Text), (r'.', Text), - ] - } + ] + } class IgorLexer(RegexLexer): @@ -1941,22 +1955,22 @@ class IgorLexer(RegexLexer): flags = re.IGNORECASE | re.MULTILINE - flowControl = [ + flowControl = ( 'if', 'else', 'elseif', 'endif', 'for', 'endfor', 'strswitch', 'switch', 'case', 'default', 'endswitch', 'do', 'while', 'try', 'catch', 'endtry', 'break', 'continue', 'return', - ] - types = [ + ) + types = ( 'variable', 'string', 'constant', 'strconstant', 'NVAR', 'SVAR', 'WAVE', 'STRUCT', 'dfref' - ] - keywords = [ + ) + keywords = ( 'override', 'ThreadSafe', 'static', 'FuncFit', 'Proc', 'Picture', 'Prompt', 'DoPrompt', 'macro', 'window', 'graph', 'function', 'end', 'Structure', 'EndStructure', 'EndMacro', 'Menu', 'SubMenu', 'Prompt', 'DoPrompt', - ] - operations = [ + ) + operations = ( 'Abort', 'AddFIFOData', 'AddFIFOVectData', 'AddMovieAudio', 'AddMovieFrame', 'APMath', 'Append', 'AppendImage', 'AppendLayoutObject', 'AppendMatrixContour', 'AppendText', @@ -2058,8 +2072,8 @@ class IgorLexer(RegexLexer): 'Triangulate3d', 'Unwrap', 'ValDisplay', 'Variable', 'WaveMeanStdv', 'WaveStats', 'WaveTransform', 'wfprintf', 'WignerTransform', 'WindowFunction', - ] - functions = [ + ) + functions = ( 'abs', 'acos', 'acosh', 'AiryA', 'AiryAD', 'AiryB', 'AiryBD', 'alog', 'area', 'areaXY', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'AxisValFromPixel', 'Besseli', 'Besselj', 'Besselk', 'Bessely', 'bessi', @@ -2134,8 +2148,8 @@ class IgorLexer(RegexLexer): 'trunc', 'Variance', 'vcsr', 'WaveCRC', 'WaveDims', 'WaveExists', 'WaveMax', 'WaveMin', 'WaveRefsEqual', 'WaveType', 'WhichListItem', 'WinType', 'WNoise', 'x', 'x2pnt', 'xcsr', 'y', 'z', 'zcsr', 'ZernikeR', - ] - functions += [ + ) + functions += ( 'AddListItem', 'AnnotationInfo', 'AnnotationList', 'AxisInfo', 'AxisList', 'CaptureHistory', 'ChildWindowList', 'CleanupName', 'ContourInfo', 'ContourNameList', 'ControlNameList', 'CsrInfo', @@ -2164,22 +2178,22 @@ class IgorLexer(RegexLexer): 'ImageNameToWaveRef', 'NewFreeWave', 'TagWaveRef', 'TraceNameToWaveRef', 'WaveRefIndexed', 'XWaveRefFromTrace', 'GetDataFolderDFR', 'GetWavesDataFolderDFR', 'NewFreeDataFolder', 'ThreadGroupGetDFR', - ] + ) tokens = { 'root': [ (r'//.*$', Comment.Single), (r'"([^"\\]|\\.)*"', String), # Flow Control. - (r'\b(%s)\b' % '|'.join(flowControl), Keyword), + (words(flowControl, prefix=r'\b', suffix=r'\b'), Keyword), # Types. - (r'\b(%s)\b' % '|'.join(types), Keyword.Type), + (words(types, prefix=r'\b', suffix=r'\b'), Keyword.Type), # Keywords. - (r'\b(%s)\b' % '|'.join(keywords), Keyword.Reserved), + (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Reserved), # Built-in operations. - (r'\b(%s)\b' % '|'.join(operations), Name.Class), + (words(operations, prefix=r'\b', suffix=r'\b'), Name.Class), # Built-in functions. - (r'\b(%s)\b' % '|'.join(functions), Name.Function), + (words(functions, prefix=r'\b', suffix=r'\b'), Name.Function), # Compiler directives. (r'^#(include|pragma|define|ifdef|ifndef|endif)', Name.Decorator), @@ -2235,6 +2249,7 @@ class MathematicaLexer(RegexLexer): ], } + class GAPLexer(RegexLexer): """ For `GAP `_ source code. @@ -2246,41 +2261,41 @@ class GAPLexer(RegexLexer): filenames = ['*.g', '*.gd', '*.gi', '*.gap'] tokens = { - 'root' : [ - (r'#.*$', Comment.Single), - (r'"(?:[^"\\]|\\.)*"', String), - (r'\(|\)|\[|\]|\{|\}', Punctuation), - (r'''(?x)\b(?: - if|then|elif|else|fi| - for|while|do|od| - repeat|until| - break|continue| - function|local|return|end| - rec| - quit|QUIT| - IsBound|Unbind| - TryNextMethod| - Info|Assert - )\b''', Keyword), - (r'''(?x)\b(?: - true|false|fail|infinity - )\b''', - Name.Constant), - (r'''(?x)\b(?: - (Declare|Install)([A-Z][A-Za-z]+)| - BindGlobal|BIND_GLOBAL - )\b''', - Name.Builtin), - (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator), - (r'''(?x)\b(?: - and|or|not|mod|in - )\b''', - Operator.Word), - (r'''(?x) - (?:[a-zA-Z_0-9]+|`[^`]*`) - (?:::[a-zA-Z_0-9]+|`[^`]*`)*''', Name.Variable), - (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), - (r'\.[0-9]+(?:e[0-9]+)?', Number), - (r'.', Text) - ] + 'root': [ + (r'#.*$', Comment.Single), + (r'"(?:[^"\\]|\\.)*"', String), + (r'\(|\)|\[|\]|\{|\}', Punctuation), + (r'''(?x)\b(?: + if|then|elif|else|fi| + for|while|do|od| + repeat|until| + break|continue| + function|local|return|end| + rec| + quit|QUIT| + IsBound|Unbind| + TryNextMethod| + Info|Assert + )\b''', Keyword), + (r'''(?x)\b(?: + true|false|fail|infinity + )\b''', + Name.Constant), + (r'''(?x)\b(?: + (Declare|Install)([A-Z][A-Za-z]+)| + BindGlobal|BIND_GLOBAL + )\b''', + Name.Builtin), + (r'\.|,|:=|;|=|\+|-|\*|/|\^|>|<', Operator), + (r'''(?x)\b(?: + and|or|not|mod|in + )\b''', + Operator.Word), + (r'''(?x) + (?:[a-zA-Z_0-9]+|`[^`]*`) + (?:::[a-zA-Z_0-9]+|`[^`]*`)*''', Name.Variable), + (r'[0-9]+(?:\.[0-9]*)?(?:e[0-9]+)?', Number), + (r'\.[0-9]+(?:e[0-9]+)?', Number), + (r'.', Text) + ] } -- cgit v1.2.1 From 9d3184ead0ddafd2fe7278bd94eb815523773599 Mon Sep 17 00:00:00 2001 From: Georg Brandl Date: Fri, 19 Sep 2014 23:53:18 +0200 Subject: split up web lexers --- pygments/lexers/_mapping.py | 66 +- pygments/lexers/actionscript.py | 238 +++ pygments/lexers/css.py | 496 +++++ pygments/lexers/data.py | 86 +- pygments/lexers/dotnet.py | 2 +- pygments/lexers/dsls.py | 45 +- pygments/lexers/haxe.py | 934 ++++++++ pygments/lexers/html.py | 589 +++++ pygments/lexers/installers.py | 3 +- pygments/lexers/javascript.py | 1192 +++++++++++ pygments/lexers/markup.py | 2 +- pygments/lexers/modeling.py | 2 +- pygments/lexers/parsers.py | 2 +- pygments/lexers/php.py | 246 +++ pygments/lexers/templates.py | 5 +- pygments/lexers/text.py | 2 +- pygments/lexers/web.py | 4515 +-------------------------------------- pygments/lexers/webmisc.py | 920 ++++++++ 18 files changed, 4757 insertions(+), 4588 deletions(-) create mode 100644 pygments/lexers/actionscript.py create mode 100644 pygments/lexers/css.py create mode 100644 pygments/lexers/haxe.py create mode 100644 pygments/lexers/html.py create mode 100644 pygments/lexers/javascript.py create mode 100644 pygments/lexers/php.py create mode 100644 pygments/lexers/webmisc.py diff --git a/pygments/lexers/_mapping.py b/pygments/lexers/_mapping.py index f0848dd4..2c387371 100644 --- a/pygments/lexers/_mapping.py +++ b/pygments/lexers/_mapping.py @@ -18,8 +18,8 @@ from __future__ import print_function LEXERS = { 'ABAPLexer': ('pygments.lexers.business', 'ABAP', ('abap',), ('*.abap',), ('text/x-abap',)), 'APLLexer': ('pygments.lexers.misc.apl', 'APL', ('apl',), ('*.apl',), ()), - 'ActionScript3Lexer': ('pygments.lexers.web', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), - 'ActionScriptLexer': ('pygments.lexers.web', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), + 'ActionScript3Lexer': ('pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), + 'ActionScriptLexer': ('pygments.lexers.actionscript', 'ActionScript', ('as', 'actionscript'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')), 'AdaLexer': ('pygments.lexers.pascal', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)), 'AgdaLexer': ('pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)), 'AlloyLexer': ('pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)), @@ -67,13 +67,13 @@ LEXERS = { 'CheetahJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Cheetah', ('js+cheetah', 'javascript+cheetah', 'js+spitfire', 'javascript+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')), 'CheetahLexer': ('pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')), 'CheetahXmlLexer': ('pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')), - 'CirruLexer': ('pygments.lexers.web', 'Cirru', ('cirru',), ('*.cirru', '*.cr'), ('text/x-cirru',)), + 'CirruLexer': ('pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru', '*.cr'), ('text/x-cirru',)), 'ClayLexer': ('pygments.lexers.c_like.other', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), 'ClojureLexer': ('pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), 'ClojureScriptLexer': ('pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), 'CobolFreeformatLexer': ('pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), 'CobolLexer': ('pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), - 'CoffeeScriptLexer': ('pygments.lexers.web', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), + 'CoffeeScriptLexer': ('pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), 'ColdfusionCFCLexer': ('pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), 'ColdfusionHtmlLexer': ('pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), 'ColdfusionLexer': ('pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), @@ -86,7 +86,7 @@ LEXERS = { 'CssDjangoLexer': ('pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), 'CssErbLexer': ('pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), 'CssGenshiLexer': ('pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), - 'CssLexer': ('pygments.lexers.web', 'CSS', ('css',), ('*.css',), ('text/css',)), + 'CssLexer': ('pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), 'CssPhpLexer': ('pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), 'CssSmartyLexer': ('pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), 'CudaLexer': ('pygments.lexers.c_like.other', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), @@ -95,14 +95,15 @@ LEXERS = { 'DLexer': ('pygments.lexers.c_like.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), 'DObjdumpLexer': ('pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), 'DarcsPatchLexer': ('pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), - 'DartLexer': ('pygments.lexers.web', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), + 'DartLexer': ('pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), + 'DebianControlLexer': ('pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), 'DelphiLexer': ('pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas',), ('text/x-pascal',)), 'DgLexer': ('pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), 'DiffLexer': ('pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), 'DjangoLexer': ('pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')), 'DockerLexer': ('pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)), - 'DtdLexer': ('pygments.lexers.web', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), - 'DuelLexer': ('pygments.lexers.web', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), + 'DtdLexer': ('pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)), + 'DuelLexer': ('pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')), 'DylanConsoleLexer': ('pygments.lexers.misc.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)), 'DylanLexer': ('pygments.lexers.misc.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)), 'DylanLidLexer': ('pygments.lexers.misc.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)), @@ -140,18 +141,18 @@ LEXERS = { 'GosuTemplateLexer': ('pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), 'GroffLexer': ('pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), 'GroovyLexer': ('pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy',), ('text/x-groovy',)), - 'HamlLexer': ('pygments.lexers.web', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), + 'HamlLexer': ('pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), 'HandlebarsHtmlLexer': ('pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), 'HandlebarsLexer': ('pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), 'HaskellLexer': ('pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), - 'HaxeLexer': ('pygments.lexers.web', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), + 'HaxeLexer': ('pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), 'HtmlDjangoLexer': ('pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), 'HtmlGenshiLexer': ('pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), - 'HtmlLexer': ('pygments.lexers.web', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), + 'HtmlLexer': ('pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), 'HtmlPhpLexer': ('pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), 'HtmlSmartyLexer': ('pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), 'HttpLexer': ('pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), - 'HxmlLexer': ('pygments.lexers.dsls', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), + 'HxmlLexer': ('pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()), 'HyLexer': ('pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')), 'HybrisLexer': ('pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')), 'IDLLexer': ('pygments.lexers.math', 'IDL', ('idl',), ('*.pro',), ('text/idl',)), @@ -164,21 +165,21 @@ LEXERS = { 'IoLexer': ('pygments.lexers.misc.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)), 'IokeLexer': ('pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)), 'IrcLogsLexer': ('pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)), - 'JadeLexer': ('pygments.lexers.web', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), + 'JadeLexer': ('pygments.lexers.html', 'Jade', ('jade',), ('*.jade',), ('text/x-jade',)), 'JagsLexer': ('pygments.lexers.math', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), 'JasminLexer': ('pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), 'JavaLexer': ('pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), 'JavascriptDjangoLexer': ('pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), 'JavascriptErbLexer': ('pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), 'JavascriptGenshiLexer': ('pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), - 'JavascriptLexer': ('pygments.lexers.web', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), + 'JavascriptLexer': ('pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js',), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), 'JavascriptPhpLexer': ('pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), 'JavascriptSmartyLexer': ('pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), - 'JsonLexer': ('pygments.lexers.web', 'JSON', ('json',), ('*.json',), ('application/json',)), + 'JsonLexer': ('pygments.lexers.data', 'JSON', ('json',), ('*.json',), ('application/json',)), 'JspLexer': ('pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), 'JuliaConsoleLexer': ('pygments.lexers.math', 'Julia console', ('jlcon',), (), ()), 'JuliaLexer': ('pygments.lexers.math', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), - 'KalLexer': ('pygments.lexers.web', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), + 'KalLexer': ('pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')), 'KconfigLexer': ('pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), 'KokaLexer': ('pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), 'KotlinLexer': ('pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt',), ('text/x-kotlin',)), @@ -186,7 +187,7 @@ LEXERS = { 'LassoCssLexer': ('pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), 'LassoHtmlLexer': ('pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), 'LassoJavascriptLexer': ('pygments.lexers.templates', 'JavaScript+Lasso', ('js+lasso', 'javascript+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')), - 'LassoLexer': ('pygments.lexers.web', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), + 'LassoLexer': ('pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)), 'LassoXmlLexer': ('pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)), 'LighttpdConfLexer': ('pygments.lexers.configs', 'Lighttpd configuration file', ('lighty', 'lighttpd'), (), ('text/x-lighttpd-conf',)), 'LimboLexer': ('pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)), @@ -195,7 +196,7 @@ LEXERS = { 'LiterateCryptolLexer': ('pygments.lexers.haskell', 'Literate Cryptol', ('lcry', 'literate-cryptol', 'lcryptol'), ('*.lcry',), ('text/x-literate-cryptol',)), 'LiterateHaskellLexer': ('pygments.lexers.haskell', 'Literate Haskell', ('lhs', 'literate-haskell', 'lhaskell'), ('*.lhs',), ('text/x-literate-haskell',)), 'LiterateIdrisLexer': ('pygments.lexers.haskell', 'Literate Idris', ('lidr', 'literate-idris', 'lidris'), ('*.lidr',), ('text/x-literate-idris',)), - 'LiveScriptLexer': ('pygments.lexers.web', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), + 'LiveScriptLexer': ('pygments.lexers.javascript', 'LiveScript', ('live-script', 'livescript'), ('*.ls',), ('text/livescript',)), 'LlvmLexer': ('pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)), 'LogosLexer': ('pygments.lexers.c_like.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), 'LogtalkLexer': ('pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), @@ -208,7 +209,7 @@ LEXERS = { 'MakoLexer': ('pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)), 'MakoXmlLexer': ('pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)), 'MaqlLexer': ('pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')), - 'MaskLexer': ('pygments.lexers.web', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), + 'MaskLexer': ('pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)), 'MasonLexer': ('pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)), 'MathematicaLexer': ('pygments.lexers.math', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')), 'MatlabLexer': ('pygments.lexers.math', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)), @@ -222,7 +223,7 @@ LEXERS = { 'MqlLexer': ('pygments.lexers.c_like.other', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)), 'MscgenLexer': ('pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()), 'MuPADLexer': ('pygments.lexers.math', 'MuPAD', ('mupad',), ('*.mu',), ()), - 'MxmlLexer': ('pygments.lexers.web', 'MXML', ('mxml',), ('*.mxml',), ()), + 'MxmlLexer': ('pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()), 'MySqlLexer': ('pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)), 'MyghtyCssLexer': ('pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)), 'MyghtyHtmlLexer': ('pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)), @@ -244,7 +245,7 @@ LEXERS = { 'ObjdumpLexer': ('pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), 'ObjectiveCLexer': ('pygments.lexers.c_like.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), 'ObjectiveCppLexer': ('pygments.lexers.c_like.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), - 'ObjectiveJLexer': ('pygments.lexers.web', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), + 'ObjectiveJLexer': ('pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), 'OcamlLexer': ('pygments.lexers.misc.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), 'OctaveLexer': ('pygments.lexers.math', 'Octave', ('octave',), ('*.m',), ('text/octave',)), 'OocLexer': ('pygments.lexers.misc.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), @@ -254,7 +255,7 @@ LEXERS = { 'PawnLexer': ('pygments.lexers.misc.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)), 'Perl6Lexer': ('pygments.lexers.perl', 'Perl6', ('perl6', 'pl6'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t'), ('text/x-perl6', 'application/x-perl6')), 'PerlLexer': ('pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t'), ('text/x-perl', 'application/x-perl')), - 'PhpLexer': ('pygments.lexers.web', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), + 'PhpLexer': ('pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), 'PigLexer': ('pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), 'PikeLexer': ('pygments.lexers.c_like.other', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), 'PlPgsqlLexer': ('pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)), @@ -274,7 +275,7 @@ LEXERS = { 'PythonLexer': ('pygments.lexers.python', 'Python', ('python', 'py', 'sage'), ('*.py', '*.pyw', '*.sc', 'SConstruct', 'SConscript', '*.tac', '*.sage'), ('text/x-python', 'application/x-python')), 'PythonTracebackLexer': ('pygments.lexers.python', 'Python Traceback', ('pytb',), ('*.pytb',), ('text/x-python-traceback',)), 'QBasicLexer': ('pygments.lexers.misc.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), - 'QmlLexer': ('pygments.lexers.web', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)), + 'QmlLexer': ('pygments.lexers.webmisc', 'QML', ('qml',), ('*.qml',), ('application/x-qml',)), 'RConsoleLexer': ('pygments.lexers.math', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()), 'RPMSpecLexer': ('pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)), 'RacketLexer': ('pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')), @@ -303,19 +304,20 @@ LEXERS = { 'RustLexer': ('pygments.lexers.c_like.rust', 'Rust', ('rust',), ('*.rs',), ('text/x-rustsrc',)), 'SLexer': ('pygments.lexers.math', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), 'SMLLexer': ('pygments.lexers.misc.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), - 'SassLexer': ('pygments.lexers.web', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), + 'SassLexer': ('pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), 'ScalaLexer': ('pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), - 'ScamlLexer': ('pygments.lexers.web', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), + 'ScamlLexer': ('pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), 'SchemeLexer': ('pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), 'ScilabLexer': ('pygments.lexers.math', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), - 'ScssLexer': ('pygments.lexers.web', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), + 'ScssLexer': ('pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), 'ShellSessionLexer': ('pygments.lexers.shell', 'Shell Session', ('shell-session',), ('*.shell-session',), ('application/x-sh-session',)), - 'SlimLexer': ('pygments.lexers.web', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), + 'SlimLexer': ('pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)), 'SmaliLexer': ('pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)), 'SmalltalkLexer': ('pygments.lexers.misc.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)), 'SmartyLexer': ('pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)), 'SnobolLexer': ('pygments.lexers.misc.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)), 'SourcePawnLexer': ('pygments.lexers.misc.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)), + 'SourcesListLexer': ('pygments.lexers.installers', 'Debian Sourcelist', ('sourceslist', 'sources.list', 'debsources'), ('sources.list',), ()), 'SparqlLexer': ('pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)), 'SqlLexer': ('pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)), 'SqliteConsoleLexer': ('pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)), @@ -332,7 +334,7 @@ LEXERS = { 'TextLexer': ('pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), 'TodotxtLexer': ('pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), 'TreetopLexer': ('pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()), - 'TypeScriptLexer': ('pygments.lexers.web', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), + 'TypeScriptLexer': ('pygments.lexers.javascript', 'TypeScript', ('ts',), ('*.ts',), ('text/x-typescript',)), 'UrbiscriptLexer': ('pygments.lexers.misc.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), 'VCTreeStatusLexer': ('pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), 'VGLLexer': ('pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), @@ -345,17 +347,17 @@ LEXERS = { 'VerilogLexer': ('pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), 'VhdlLexer': ('pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), 'VimLexer': ('pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), - 'XQueryLexer': ('pygments.lexers.web', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), + 'XQueryLexer': ('pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')), 'XmlDjangoLexer': ('pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), (), ('application/xml+django', 'application/xml+jinja')), 'XmlErbLexer': ('pygments.lexers.templates', 'XML+Ruby', ('xml+erb', 'xml+ruby'), (), ('application/xml+ruby',)), - 'XmlLexer': ('pygments.lexers.web', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), + 'XmlLexer': ('pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')), 'XmlPhpLexer': ('pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)), 'XmlSmartyLexer': ('pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)), - 'XsltLexer': ('pygments.lexers.web', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), + 'XsltLexer': ('pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')), 'XtendLexer': ('pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)), 'YamlJinjaLexer': ('pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls',), ('text/x-yaml+jinja', 'text/x-sls')), 'YamlLexer': ('pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)), - 'ZephirLexer': ('pygments.lexers.web', 'Zephir', ('zephir',), ('*.zep',), ()), + 'ZephirLexer': ('pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()), } if __name__ == '__main__': diff --git a/pygments/lexers/actionscript.py b/pygments/lexers/actionscript.py new file mode 100644 index 00000000..7b866865 --- /dev/null +++ b/pygments/lexers/actionscript.py @@ -0,0 +1,238 @@ +# -*- coding: utf-8 -*- +""" + pygments.lexers.actionscript + ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + + Lexers for ActionScript and MXML. + + :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import re + +from pygments.lexer import RegexLexer, bygroups, using, this, words +from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ + Number, Punctuation + +__all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer'] + + +class ActionScriptLexer(RegexLexer): + """ + For ActionScript source code. + + .. versionadded:: 0.9 + """ + + name = 'ActionScript' + aliases = ['as', 'actionscript'] + filenames = ['*.as'] + mimetypes = ['application/x-actionscript', 'text/x-actionscript', + 'text/actionscript'] + + flags = re.DOTALL + tokens = { + 'root': [ + (r'\s+', Text), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex), + (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator), + (r'[{}\[\]();.]+', Punctuation), + (words(( + 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break', + 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch', + 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this', + 'switch'), suffix=r'\b'), + Keyword), + (words(( + 'class', 'public', 'final', 'internal', 'native', 'override', 'private', + 'protected', 'static', 'import', 'extends', 'implements', 'interface', + 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get', + 'namespace', 'package', 'set'), suffix=r'\b'), + Keyword.Declaration), + (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b', + Keyword.Constant), + (words(( + 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion', + 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array', + 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData', + 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType', + 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle', + 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu', + 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem', + 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError', + 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject', + 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter', + 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher', + 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference', + 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType', + 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter', + 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent', + 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput' + 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable', + 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int', + 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent', + 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation', + 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection', + 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent', + 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent', + 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping', + 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy', + 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample', 'Scene', + 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError', + 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject', + 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel', + 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite', + 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState', + 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet', + 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField', + 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign', + 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform', + 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest', + 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError', + 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket', + 'XMLUI'), suffix=r'\b'), + Name.Builtin), + (words(( + 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN', + 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion', + 'isFinite', 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent', + 'unescape'), suffix=r'\b'), + Name.Function), + (r'[$a-zA-Z_]\w*', Name.Other), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-f]+', Number.Hex), + (r'[0-9]+', Number.Integer), + (r'"(\\\\|\\"|[^"])*"', String.Double), + (r"'(\\\\|\\'|[^'])*'", String.Single), + ] + } + + +class ActionScript3Lexer(RegexLexer): + """ + For ActionScript 3 source code. + + .. versionadded:: 0.11 + """ + + name = 'ActionScript 3' + aliases = ['as3', 'actionscript3'] + filenames = ['*.as'] + mimetypes = ['application/x-actionscript3', 'text/x-actionscript3', + 'text/actionscript3'] + + identifier = r'[$a-zA-Z_]\w*' + typeidentifier = identifier + '(?:\.<\w+>)?' + + flags = re.DOTALL | re.MULTILINE + tokens = { + 'root': [ + (r'\s+', Text), + (r'(function\s+)(' + identifier + r')(\s*)(\()', + bygroups(Keyword.Declaration, Name.Function, Text, Operator), + 'funcparams'), + (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' + + typeidentifier + r')', + bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text, + Keyword.Type)), + (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)', + bygroups(Keyword, Text, Name.Namespace, Text)), + (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()', + bygroups(Keyword, Text, Keyword.Type, Text, Operator)), + (r'//.*?\n', Comment.Single), + (r'/\*.*?\*/', Comment.Multiline), + (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex), + (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)), + (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' + r'throw|try|catch|with|new|typeof|arguments|instanceof|this|' + r'switch|import|include|as|is)\b', + Keyword), + (r'(class|public|final|internal|native|override|private|protected|' + r'static|import|extends|implements|interface|intrinsic|return|super|' + r'dynamic|function|const|get|namespace|package|set)\b', + Keyword.Declaration), + (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b', + Keyword.Constant), + (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|' + r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|' + r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|' + r'unescape)\b', Name.Function), + (identifier, Name), + (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), + (r'0x[0-9a-f]+', Number.Hex), + (r'[0-9]+', Number.Integer), + (r'"(\\\\|\\"|[^"])*"', String.Double), + (r"'(\\\\|\\'|[^'])*'", String.Single), + (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator), + ], + 'funcparams': [ + (r'\s+', Text), + (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' + + typeidentifier + r'|\*)(\s*)', + bygroups(Text, Punctuation, Name, Text, Operator, Text, + Keyword.Type, Text), 'defval'), + (r'\)', Operator, 'type') + ], + 'type': [ + (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)', + bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'), + (r'\s*', Text, '#pop:2') + ], + 'defval': [ + (r'(=)(\s*)([^(),]+)(\s*)(,?)', + bygroups(Operator, Text, using(this), Text, Operator), '#pop'), + (r',?', Operator, '#pop') + ] + } + + def analyse_text(text): + if re.match(r'\w+\s*:\s*\w', text): + return 0.3 + return 0 + + +class MxmlLexer(RegexLexer): + """ + For MXML markup. + Nested AS3 in )', - bygroups(using(HtmlLexer), - using(JavascriptLexer), using(HtmlLexer))), - (r'(.+?)(?=<)', using(HtmlLexer)), - (r'.+', using(HtmlLexer)), - ], - } - - -class ScamlLexer(ExtendedRegexLexer): - """ - For `Scaml markup `_. Scaml is Haml for Scala. - - .. versionadded:: 1.4 - """ - - name = 'Scaml' - aliases = ['scaml'] - filenames = ['*.scaml'] - mimetypes = ['text/x-scaml'] - - flags = re.IGNORECASE - # Scaml does not yet support the " |\n" notation to - # wrap long lines. Once it does, use the custom faux - # dot instead. - # _dot = r'(?: \|\n(?=.* \|)|.)' - _dot = r'.' - - tokens = { - 'root': [ - (r'[ \t]*\n', Text), - (r'[ \t]*', _indentation), - ], - - 'css': [ - (r'\.[\w:-]+', Name.Class, 'tag'), - (r'\#[\w:-]+', Name.Function, 'tag'), - ], - - 'eval-or-plain': [ - (r'[&!]?==', Punctuation, 'plain'), - (r'([&!]?[=~])(' + _dot + r'*\n)', - bygroups(Punctuation, using(ScalaLexer)), - 'root'), - default('plain'), - ], - - 'content': [ - include('css'), - (r'%[\w:-]+', Name.Tag, 'tag'), - (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), - (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', - bygroups(Comment, Comment.Special, Comment), - '#pop'), - (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), - '#pop'), - (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc, - 'scaml-comment-block'), '#pop'), - (r'(-@\s*)(import)?(' + _dot + r'*\n)', - bygroups(Punctuation, Keyword, using(ScalaLexer)), - '#pop'), - (r'(-)(' + _dot + r'*\n)', - bygroups(Punctuation, using(ScalaLexer)), - '#pop'), - (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'), - '#pop'), - include('eval-or-plain'), - ], - - 'tag': [ - include('css'), - (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), - (r'\[' + _dot + '*?\]', using(ScalaLexer)), - (r'\(', Text, 'html-attributes'), - (r'/[ \t]*\n', Punctuation, '#pop:2'), - (r'[<>]{1,2}(?=[ \t=])', Punctuation), - include('eval-or-plain'), - ], - - 'plain': [ - (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), - (r'(#\{)(' + _dot + '*?)(\})', - bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), - (r'\n', Text, 'root'), - ], - - 'html-attributes': [ - (r'\s+', Text), - (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'), - (r'[\w:-]+', Name.Attribute), - (r'\)', Text, '#pop'), - ], - - 'html-attribute-value': [ - (r'[ \t]+', Text), - (r'\w+', Name.Variable, '#pop'), - (r'@\w+', Name.Variable.Instance, '#pop'), - (r'\$\w+', Name.Variable.Global, '#pop'), - (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), - (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), - ], - - 'html-comment-block': [ - (_dot + '+', Comment), - (r'\n', Text, 'root'), - ], - - 'scaml-comment-block': [ - (_dot + '+', Comment.Preproc), - (r'\n', Text, 'root'), - ], - - 'filter-block': [ - (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), - (r'(#\{)(' + _dot + '*?)(\})', - bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), - (r'\n', Text, 'root'), - ], - } - - -class JadeLexer(ExtendedRegexLexer): - """ - For Jade markup. - Jade is a variant of Scaml, see: - http://scalate.fusesource.org/documentation/scaml-reference.html - - .. versionadded:: 1.4 - """ - - name = 'Jade' - aliases = ['jade'] - filenames = ['*.jade'] - mimetypes = ['text/x-jade'] - - flags = re.IGNORECASE - _dot = r'.' - - tokens = { - 'root': [ - (r'[ \t]*\n', Text), - (r'[ \t]*', _indentation), - ], - - 'css': [ - (r'\.[\w:-]+', Name.Class, 'tag'), - (r'\#[\w:-]+', Name.Function, 'tag'), - ], - - 'eval-or-plain': [ - (r'[&!]?==', Punctuation, 'plain'), - (r'([&!]?[=~])(' + _dot + r'*\n)', - bygroups(Punctuation, using(ScalaLexer)), 'root'), - default('plain'), - ], - - 'content': [ - include('css'), - (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), - (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', - bygroups(Comment, Comment.Special, Comment), - '#pop'), - (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), - '#pop'), - (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc, - 'scaml-comment-block'), '#pop'), - (r'(-@\s*)(import)?(' + _dot + r'*\n)', - bygroups(Punctuation, Keyword, using(ScalaLexer)), - '#pop'), - (r'(-)(' + _dot + r'*\n)', - bygroups(Punctuation, using(ScalaLexer)), - '#pop'), - (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'), - '#pop'), - (r'[\w:-]+', Name.Tag, 'tag'), - (r'\|', Text, 'eval-or-plain'), - ], - - 'tag': [ - include('css'), - (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), - (r'\[' + _dot + '*?\]', using(ScalaLexer)), - (r'\(', Text, 'html-attributes'), - (r'/[ \t]*\n', Punctuation, '#pop:2'), - (r'[<>]{1,2}(?=[ \t=])', Punctuation), - include('eval-or-plain'), - ], - - 'plain': [ - (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), - (r'(#\{)(' + _dot + '*?)(\})', - bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), - (r'\n', Text, 'root'), - ], - - 'html-attributes': [ - (r'\s+', Text), - (r'[\w:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'), - (r'[\w:-]+', Name.Attribute), - (r'\)', Text, '#pop'), - ], - - 'html-attribute-value': [ - (r'[ \t]+', Text), - (r'\w+', Name.Variable, '#pop'), - (r'@\w+', Name.Variable.Instance, '#pop'), - (r'\$\w+', Name.Variable.Global, '#pop'), - (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), - (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), - ], - - 'html-comment-block': [ - (_dot + '+', Comment), - (r'\n', Text, 'root'), - ], - - 'scaml-comment-block': [ - (_dot + '+', Comment.Preproc), - (r'\n', Text, 'root'), - ], - - 'filter-block': [ - (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), - (r'(#\{)(' + _dot + '*?)(\})', - bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), - (r'\n', Text, 'root'), - ], - } - - -class XQueryLexer(ExtendedRegexLexer): - """ - An XQuery lexer, parsing a stream and outputting the tokens needed to - highlight xquery code. - - .. versionadded:: 1.4 - """ - name = 'XQuery' - aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm'] - filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'] - mimetypes = ['text/xquery', 'application/xquery'] - - xquery_parse_state = [] - - # FIX UNICODE LATER - #ncnamestartchar = ( - # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" - # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|" - # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|" - # ur"[\u10000-\uEFFFF]" - #) - ncnamestartchar = r"(?:[A-Z]|_|[a-z])" - # FIX UNICODE LATER - #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|" - # ur"[\u203F-\u2040]") - ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])" - ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar) - pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])" - pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])" - pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar) - prefixedname = "%s:%s" % (ncname, ncname) - unprefixedname = ncname - qname = "(?:%s|%s)" % (prefixedname, unprefixedname) - - entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)' - charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)' - - stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")' - stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')" - - # FIX UNICODE LATER - #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' - # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') - elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]' - #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|' - # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]') - quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]' - #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' - # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') - aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]' - - - # CHAR elements - fix the above elementcontentchar, quotattrcontentchar, - # aposattrcontentchar - #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] - - flags = re.DOTALL | re.MULTILINE | re.UNICODE - - def punctuation_root_callback(lexer, match, ctx): - yield match.start(), Punctuation, match.group(1) - # transition to root always - don't pop off stack - ctx.stack = ['root'] - ctx.pos = match.end() - - def operator_root_callback(lexer, match, ctx): - yield match.start(), Operator, match.group(1) - # transition to root always - don't pop off stack - ctx.stack = ['root'] - ctx.pos = match.end() - - def popstate_tag_callback(lexer, match, ctx): - yield match.start(), Name.Tag, match.group(1) - ctx.stack.append(lexer.xquery_parse_state.pop()) - ctx.pos = match.end() - - def popstate_xmlcomment_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append(lexer.xquery_parse_state.pop()) - ctx.pos = match.end() - - def popstate_kindtest_callback(lexer, match, ctx): - yield match.start(), Punctuation, match.group(1) - next_state = lexer.xquery_parse_state.pop() - if next_state == 'occurrenceindicator': - if re.match("[?*+]+", match.group(2)): - yield match.start(), Punctuation, match.group(2) - ctx.stack.append('operator') - ctx.pos = match.end() - else: - ctx.stack.append('operator') - ctx.pos = match.end(1) - else: - ctx.stack.append(next_state) - ctx.pos = match.end(1) - - def popstate_callback(lexer, match, ctx): - yield match.start(), Punctuation, match.group(1) - # if we have run out of our state stack, pop whatever is on the pygments - # state stack - if len(lexer.xquery_parse_state) == 0: - ctx.stack.pop() - elif len(ctx.stack) > 1: - ctx.stack.append(lexer.xquery_parse_state.pop()) - else: - # i don't know if i'll need this, but in case, default back to root - ctx.stack = ['root'] - ctx.pos = match.end() - - def pushstate_element_content_starttag_callback(lexer, match, ctx): - yield match.start(), Name.Tag, match.group(1) - lexer.xquery_parse_state.append('element_content') - ctx.stack.append('start_tag') - ctx.pos = match.end() - - def pushstate_cdata_section_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('cdata_section') - lexer.xquery_parse_state.append(ctx.state.pop) - ctx.pos = match.end() - - def pushstate_starttag_callback(lexer, match, ctx): - yield match.start(), Name.Tag, match.group(1) - lexer.xquery_parse_state.append(ctx.state.pop) - ctx.stack.append('start_tag') - ctx.pos = match.end() - - def pushstate_operator_order_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - ctx.stack = ['root'] - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_operator_root_validate(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - ctx.stack = ['root'] - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_operator_root_validate_withmode(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Keyword, match.group(3) - ctx.stack = ['root'] - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_operator_processing_instruction_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('processing_instruction') - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_element_content_processing_instruction_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('processing_instruction') - lexer.xquery_parse_state.append('element_content') - ctx.pos = match.end() - - def pushstate_element_content_cdata_section_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('cdata_section') - lexer.xquery_parse_state.append('element_content') - ctx.pos = match.end() - - def pushstate_operator_cdata_section_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('cdata_section') - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_element_content_xmlcomment_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('xml_comment') - lexer.xquery_parse_state.append('element_content') - ctx.pos = match.end() - - def pushstate_operator_xmlcomment_callback(lexer, match, ctx): - yield match.start(), String.Doc, match.group(1) - ctx.stack.append('xml_comment') - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - def pushstate_kindtest_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('kindtest') - ctx.stack.append('kindtest') - ctx.pos = match.end() - - def pushstate_operator_kindtestforpi_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('operator') - ctx.stack.append('kindtestforpi') - ctx.pos = match.end() - - def pushstate_operator_kindtest_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('operator') - ctx.stack.append('kindtest') - ctx.pos = match.end() - - def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx): - yield match.start(), Name.Tag, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('occurrenceindicator') - ctx.stack.append('kindtest') - ctx.pos = match.end() - - def pushstate_operator_starttag_callback(lexer, match, ctx): - yield match.start(), Name.Tag, match.group(1) - lexer.xquery_parse_state.append('operator') - ctx.stack.append('start_tag') - ctx.pos = match.end() - - def pushstate_operator_root_callback(lexer, match, ctx): - yield match.start(), Punctuation, match.group(1) - lexer.xquery_parse_state.append('operator') - ctx.stack = ['root']#.append('root') - ctx.pos = match.end() - - def pushstate_operator_root_construct_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('operator') - ctx.stack = ['root'] - ctx.pos = match.end() - - def pushstate_root_callback(lexer, match, ctx): - yield match.start(), Punctuation, match.group(1) - cur_state = ctx.stack.pop() - lexer.xquery_parse_state.append(cur_state) - ctx.stack = ['root']#.append('root') - ctx.pos = match.end() - - def pushstate_operator_attribute_callback(lexer, match, ctx): - yield match.start(), Name.Attribute, match.group(1) - ctx.stack.append('operator') - ctx.pos = match.end() - - def pushstate_operator_callback(lexer, match, ctx): - yield match.start(), Keyword, match.group(1) - yield match.start(), Text, match.group(2) - yield match.start(), Punctuation, match.group(3) - lexer.xquery_parse_state.append('operator') - ctx.pos = match.end() - - tokens = { - 'comment': [ - # xquery comments - (r'(:\))', Comment, '#pop'), - (r'(\(:)', Comment, '#push'), - (r'[^:)]', Comment), - (r'([^:)]|:|\))', Comment), - ], - 'whitespace': [ - (r'\s+', Text), - ], - 'operator': [ - include('whitespace'), - (r'(\})', popstate_callback), - (r'\(:', Comment, 'comment'), - - (r'(\{)', pushstate_root_callback), - (r'then|else|external|at|div|except', Keyword, 'root'), - (r'order by', Keyword, 'root'), - (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'), - (r'and|or', Operator.Word, 'root'), - (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)', - Operator.Word, 'root'), - (r'return|satisfies|to|union|where|preserve\s+strip', - Keyword, 'root'), - (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)', - operator_root_callback), - (r'(::|;|\[|//|/|,)', - punctuation_root_callback), - (r'(castable|cast)(\s+)(as)\b', - bygroups(Keyword, Text, Keyword), 'singletype'), - (r'(instance)(\s+)(of)\b', - bygroups(Keyword, Text, Keyword), 'itemtype'), - (r'(treat)(\s+)(as)\b', - bygroups(Keyword, Text, Keyword), 'itemtype'), - (r'(case|as)\b', Keyword, 'itemtype'), - (r'(\))(\s*)(as)', - bygroups(Punctuation, Text, Keyword), 'itemtype'), - (r'\$', Name.Variable, 'varname'), - (r'(for|let)(\s+)(\$)', - bygroups(Keyword, Text, Name.Variable), 'varname'), - #(r'\)|\?|\]', Punctuation, '#push'), - (r'\)|\?|\]', Punctuation), - (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)), - (r'ascending|descending|default', Keyword, '#push'), - (r'external', Keyword), - (r'collation', Keyword, 'uritooperator'), - # finally catch all string literals and stay in operator state - (stringdouble, String.Double), - (stringsingle, String.Single), - - (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'), - ], - 'uritooperator': [ - (stringdouble, String.Double, '#pop'), - (stringsingle, String.Single, '#pop'), - ], - 'namespacedecl': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)), - (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)), - (stringdouble, String.Double), - (stringsingle, String.Single), - (r',', Punctuation), - (r'=', Operator), - (r';', Punctuation, 'root'), - (ncname, Name.Namespace), - ], - 'namespacekeyword': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - (stringdouble, String.Double, 'namespacedecl'), - (stringsingle, String.Single, 'namespacedecl'), - (r'inherit|no-inherit', Keyword, 'root'), - (r'namespace', Keyword, 'namespacedecl'), - (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)), - (r'preserve|no-preserve', Keyword), - (r',', Punctuation), - ], - 'varname': [ - (r'\(:', Comment, 'comment'), - (qname, Name.Variable, 'operator'), - ], - 'singletype': [ - (r'\(:', Comment, 'comment'), - (ncname + r'(:\*)', Name.Variable, 'operator'), - (qname, Name.Variable, 'operator'), - ], - 'itemtype': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - (r'\$', Punctuation, 'varname'), - (r'(void)(\s*)(\()(\s*)(\))', - bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'), - (r'(element|attribute|schema-element|schema-attribute|comment|text|' - r'node|binary|document-node|empty-sequence)(\s*)(\()', - pushstate_occurrenceindicator_kindtest_callback), - # Marklogic specific type? - (r'(processing-instruction)(\s*)(\()', - bygroups(Keyword, Text, Punctuation), - ('occurrenceindicator', 'kindtestforpi')), - (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])', - bygroups(Keyword, Text, Punctuation, Text, Punctuation), - 'occurrenceindicator'), - (r'\(\#', Punctuation, 'pragma'), - (r';', Punctuation, '#pop'), - (r'then|else', Keyword, '#pop'), - (r'(at)(\s+)(' + stringdouble + ')', - bygroups(Keyword, Text, String.Double), 'namespacedecl'), - (r'(at)(\s+)(' + stringsingle + ')', - bygroups(Keyword, Text, String.Single), 'namespacedecl'), - (r'except|intersect|in|is|return|satisfies|to|union|where', - Keyword, 'root'), - (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'), - (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'), - (r'external|at', Keyword, 'root'), - (r'(stable)(\s+)(order)(\s+)(by)', - bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'), - (r'(castable|cast)(\s+)(as)', - bygroups(Keyword, Text, Keyword), 'singletype'), - (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)), - (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)), - (r'case|as', Keyword, 'itemtype'), - (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), - (ncname + r':\*', Keyword.Type, 'operator'), - (qname, Keyword.Type, 'occurrenceindicator'), - ], - 'kindtest': [ - (r'\(:', Comment, 'comment'), - (r'{', Punctuation, 'root'), - (r'(\))([*+?]?)', popstate_kindtest_callback), - (r'\*', Name, 'closekindtest'), - (qname, Name, 'closekindtest'), - (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback), - ], - 'kindtestforpi': [ - (r'\(:', Comment, 'comment'), - (r'\)', Punctuation, '#pop'), - (ncname, Name.Variable), - (stringdouble, String.Double), - (stringsingle, String.Single), - ], - 'closekindtest': [ - (r'\(:', Comment, 'comment'), - (r'(\))', popstate_callback), - (r',', Punctuation), - (r'(\{)', pushstate_operator_root_callback), - (r'\?', Punctuation), - ], - 'xml_comment': [ - (r'(-->)', popstate_xmlcomment_callback), - (r'[^-]{1,2}', Literal), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), - ], - 'processing_instruction': [ - (r'\s+', Text, 'processing_instruction_content'), - (r'\?>', String.Doc, '#pop'), - (pitarget, Name), - ], - 'processing_instruction_content': [ - (r'\?>', String.Doc, '#pop'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), - ], - 'cdata_section': [ - (r']]>', String.Doc, '#pop'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), - ], - 'start_tag': [ - include('whitespace'), - (r'(/>)', popstate_tag_callback), - (r'>', Name.Tag, 'element_content'), - (r'"', Punctuation, 'quot_attribute_content'), - (r"'", Punctuation, 'apos_attribute_content'), - (r'=', Operator), - (qname, Name.Tag), - ], - 'quot_attribute_content': [ - (r'"', Punctuation, 'start_tag'), - (r'(\{)', pushstate_root_callback), - (r'""', Name.Attribute), - (quotattrcontentchar, Name.Attribute), - (entityref, Name.Attribute), - (charref, Name.Attribute), - (r'\{\{|\}\}', Name.Attribute), - ], - 'apos_attribute_content': [ - (r"'", Punctuation, 'start_tag'), - (r'\{', Punctuation, 'root'), - (r"''", Name.Attribute), - (aposattrcontentchar, Name.Attribute), - (entityref, Name.Attribute), - (charref, Name.Attribute), - (r'\{\{|\}\}', Name.Attribute), - ], - 'element_content': [ - (r')', popstate_tag_callback), - (qname, Name.Tag), - ], - 'xmlspace_decl': [ - (r'\(:', Comment, 'comment'), - (r'preserve|strip', Keyword, '#pop'), - ], - 'declareordering': [ - (r'\(:', Comment, 'comment'), - include('whitespace'), - (r'ordered|unordered', Keyword, '#pop'), - ], - 'xqueryversion': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - (stringdouble, String.Double), - (stringsingle, String.Single), - (r'encoding', Keyword), - (r';', Punctuation, '#pop'), - ], - 'pragma': [ - (qname, Name.Variable, 'pragmacontents'), - ], - 'pragmacontents': [ - (r'#\)', Punctuation, 'operator'), - (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + - unirange(0x10000, 0x10ffff), Literal), - (r'(\s+)', Text), - ], - 'occurrenceindicator': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - (r'\*|\?|\+', Operator, 'operator'), - (r':=', Operator, 'root'), - default('operator'), - ], - 'option': [ - include('whitespace'), - (qname, Name.Variable, '#pop'), - ], - 'qname_braren': [ - include('whitespace'), - (r'(\{)', pushstate_operator_root_callback), - (r'(\()', Punctuation, 'root'), - ], - 'element_qname': [ - (qname, Name.Variable, 'root'), - ], - 'attribute_qname': [ - (qname, Name.Variable, 'root'), - ], - 'root': [ - include('whitespace'), - (r'\(:', Comment, 'comment'), - - # handle operator state - # order on numbers matters - handle most complex first - (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Float, 'operator'), - (r'(\.\d+)[eE][\+\-]?\d+', Number.Float, 'operator'), - (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'), - (r'(\d+)', Number.Integer, 'operator'), - (r'(\.\.|\.|\))', Punctuation, 'operator'), - (r'(declare)(\s+)(construction)', - bygroups(Keyword, Text, Keyword), 'operator'), - (r'(declare)(\s+)(default)(\s+)(order)', - bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'), - (ncname + ':\*', Name, 'operator'), - ('\*:'+ncname, Name.Tag, 'operator'), - ('\*', Name.Tag, 'operator'), - (stringdouble, String.Double, 'operator'), - (stringsingle, String.Single, 'operator'), - - (r'(\})', popstate_callback), - - #NAMESPACE DECL - (r'(declare)(\s+)(default)(\s+)(collation)', - bygroups(Keyword, Text, Keyword, Text, Keyword)), - (r'(module|declare)(\s+)(namespace)', - bygroups(Keyword, Text, Keyword), 'namespacedecl'), - (r'(declare)(\s+)(base-uri)', - bygroups(Keyword, Text, Keyword), 'namespacedecl'), - - #NAMESPACE KEYWORD - (r'(declare)(\s+)(default)(\s+)(element|function)', - bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'), - (r'(import)(\s+)(schema|module)', - bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'), - (r'(declare)(\s+)(copy-namespaces)', - bygroups(Keyword, Text, Keyword), 'namespacekeyword'), - - #VARNAMEs - (r'(for|let|some|every)(\s+)(\$)', - bygroups(Keyword, Text, Name.Variable), 'varname'), - (r'\$', Name.Variable, 'varname'), - (r'(declare)(\s+)(variable)(\s+)(\$)', - bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), - - #ITEMTYPE - (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), - - (r'(element|attribute|schema-element|schema-attribute|comment|' - r'text|node|document-node|empty-sequence)(\s+)(\()', - pushstate_operator_kindtest_callback), - - (r'(processing-instruction)(\s+)(\()', - pushstate_operator_kindtestforpi_callback), - - (r'()?', Other, 'delimiters'), - (r'\s+', Other), - default(('delimiters', 'lassofile')), - ], - 'delimiters': [ - (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'), - (r'\[noprocess\]', Comment.Preproc, 'noprocess'), - (r'\[', Comment.Preproc, 'squarebrackets'), - (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'), - (r'<(!--.*?-->)?', Other), - (r'[^[<]+', Other), - ], - 'nosquarebrackets': [ - (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'), - (r'<', Other), - (r'[^<]+', Other), - ], - 'noprocess': [ - (r'\[/noprocess\]', Comment.Preproc, '#pop'), - (r'\[', Other), - (r'[^[]', Other), - ], - 'squarebrackets': [ - (r'\]', Comment.Preproc, '#pop'), - include('lasso'), - ], - 'anglebrackets': [ - (r'\?>', Comment.Preproc, '#pop'), - include('lasso'), - ], - 'lassofile': [ - (r'\]|\?>', Comment.Preproc, '#pop'), - include('lasso'), - ], - 'whitespacecomments': [ - (r'\s+', Text), - (r'//.*?\n', Comment.Single), - (r'/\*\*!.*?\*/', String.Doc), - (r'/\*.*?\*/', Comment.Multiline), - ], - 'lasso': [ - # whitespace/comments - include('whitespacecomments'), - - # literals - (r'\d*\.\d+(e[+-]?\d+)?', Number.Float), - (r'0x[\da-f]+', Number.Hex), - (r'\d+', Number.Integer), - (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)), - (r"'", String.Single, 'singlestring'), - (r'"', String.Double, 'doublestring'), - (r'`[^`]*`', String.Backtick), - - # names - (r'\$[a-z_][\w.]*', Name.Variable), - (r'#([a-z_][\w.]*|\d+)', Name.Variable.Instance), - (r"(\.)('[a-z_][\w.]*')", - bygroups(Name.Builtin.Pseudo, Name.Variable.Class)), - (r"(self)(\s*->\s*)('[a-z_][\w.]*')", - bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)), - (r'(\.\.?)([a-z_][\w.]*(=(?!=))?)', - bygroups(Name.Builtin.Pseudo, Name.Other.Member)), - (r'(->\\?\s*|&\s*)([a-z_][\w.]*(=(?!=))?)', - bygroups(Operator, Name.Other.Member)), - (r'(self|inherited)\b', Name.Builtin.Pseudo), - (r'-[a-z_][\w.]*', Name.Attribute), - (r'::\s*[a-z_][\w.]*', Name.Label), - (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|' - r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|' - r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|' - r'Error_InvalidDatabase|Error_InvalidPassword|' - r'Error_InvalidUsername|Error_ModuleNotFound|' - r'Error_NoError|Error_NoPermission|Error_OutOfMemory|' - r'Error_ReqColumnMissing|Error_ReqFieldMissing|' - r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|' - r'Error_UpdateError)\b', Name.Exception), - - # definitions - (r'(define)(\s+)([a-z_][\w.]*)(\s*=>\s*)(type|trait|thread)\b', - bygroups(Keyword.Declaration, Text, Name.Class, Operator, Keyword)), - (r'(define)(\s+)([a-z_][\w.]*)(\s*->\s*)([a-z_][\w.]*=?|[-+*/%])', - bygroups(Keyword.Declaration, Text, Name.Class, Operator, - Name.Function), 'signature'), - (r'(define)(\s+)([a-z_][\w.]*)', - bygroups(Keyword.Declaration, Text, Name.Function), 'signature'), - (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|[-+*/%])' - r'(?=\s*\())', bygroups(Keyword, Text, Name.Function), - 'signature'), - (r'(public|protected|private|provide)(\s+)([a-z_][\w.]*)', - bygroups(Keyword, Text, Name.Function)), - - # keywords - (r'(true|false|none|minimal|full|all|void)\b', Keyword.Constant), - (r'(local|var|variable|global|data(?=\s))\b', Keyword.Declaration), - (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|' - r'null|bytes|list|queue|set|stack|staticarray|tie)\b', Keyword.Type), - (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)), - (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)), - (r'require\b', Keyword, 'requiresection'), - (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)), - (r'(/?)(Cache|Database_Names|Database_SchemaNames|' - r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|' - r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|' - r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|' - r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|' - r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|' - r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|' - r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|' - r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|' - r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|' - r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|' - r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|' - r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|' - r'Tag_Name|ascending|average|by|define|descending|do|equals|' - r'frozen|group|handle_failure|import|in|into|join|let|match|max|' - r'min|on|order|parent|protected|provide|public|require|returnhome|' - r'skip|split_thread|sum|take|thread|to|trait|type|where|with|' - r'yield|yieldhome)\b', - bygroups(Punctuation, Keyword)), - - # other - (r',', Punctuation, 'commamember'), - (r'(and|or|not)\b', Operator.Word), - (r'([a-z_][\w.]*)(\s*::\s*[a-z_][\w.]*)?(\s*=(?!=))', - bygroups(Name, Name.Label, Operator)), - (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)), - (r'(=)(n?bw|n?ew|n?cn|lte?|gte?|n?eq|n?rx|ft)\b', - bygroups(Operator, Operator.Word)), - (r':=|[-+*/%=<>&|!?\\]+', Operator), - (r'[{}():;,@^]', Punctuation), - ], - 'singlestring': [ - (r"'", String.Single, '#pop'), - (r"[^'\\]+", String.Single), - include('escape'), - (r"\\", String.Single), - ], - 'doublestring': [ - (r'"', String.Double, '#pop'), - (r'[^"\\]+', String.Double), - include('escape'), - (r'\\', String.Double), - ], - 'escape': [ - (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|' - r'[abefnrtv?\"\'\\]|$)', String.Escape), - ], - 'signature': [ - (r'=>', Operator, '#pop'), - (r'\)', Punctuation, '#pop'), - (r'[(,]', Punctuation, 'parameter'), - include('lasso'), - ], - 'parameter': [ - (r'\)', Punctuation, '#pop'), - (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'), - (r'\.\.\.', Name.Builtin.Pseudo), - include('lasso'), - ], - 'requiresection': [ - (r'(([a-z_][\w.]*=?|[-+*/%])(?=\s*\())', Name, 'requiresignature'), - (r'(([a-z_][\w.]*=?|[-+*/%])(?=(\s*::\s*[\w.]+)?\s*,))', Name), - (r'[a-z_][\w.]*=?|[-+*/%]', Name, '#pop'), - (r'::\s*[a-z_][\w.]*', Name.Label), - (r',', Punctuation), - include('whitespacecomments'), - ], - 'requiresignature': [ - (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'), - (r'\)', Punctuation, '#pop:2'), - (r'-?[a-z_][\w.]*', Name.Attribute), - (r'::\s*[a-z_][\w.]*', Name.Label), - (r'\.\.\.', Name.Builtin.Pseudo), - (r'[(,]', Punctuation), - include('whitespacecomments'), - ], - 'commamember': [ - (r'(([a-z_][\w.]*=?|[-+*/%])' - r'(?=\s*(\(([^()]*\([^()]*\))*[^)]*\)\s*)?(::[\w.\s]+)?=>))', - Name.Function, 'signature'), - include('whitespacecomments'), - default('#pop'), - ], - } - - def __init__(self, **options): - self.builtinshighlighting = get_bool_opt( - options, 'builtinshighlighting', True) - self.requiredelimiters = get_bool_opt( - options, 'requiredelimiters', False) - - self._builtins = set() - self._members = set() - if self.builtinshighlighting: - from pygments.lexers._lassobuiltins import BUILTINS, MEMBERS - for key, value in iteritems(BUILTINS): - self._builtins.update(value) - for key, value in iteritems(MEMBERS): - self._members.update(value) - RegexLexer.__init__(self, **options) - - def get_tokens_unprocessed(self, text): - stack = ['root'] - if self.requiredelimiters: - stack.append('delimiters') - for index, token, value in \ - RegexLexer.get_tokens_unprocessed(self, text, stack): - if (token is Name.Other and value.lower() in self._builtins or - token is Name.Other.Member and - value.lower().rstrip('=') in self._members): - yield index, Name.Builtin, value - continue - yield index, token, value - - def analyse_text(text): - rv = 0.0 - if 'bin/lasso9' in text: - rv += 0.8 - if re.search(r'<\?(=|lasso)|\A\[', text, re.I): - rv += 0.4 - if re.search(r'local\(', text, re.I): - rv += 0.4 - if '?>' in text: - rv += 0.1 - return rv - - -class QmlLexer(RegexLexer): - """ - For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html. - - .. versionadded:: 1.6 - """ - - # QML is based on javascript, so much of this is taken from the - # JavascriptLexer above. - - name = 'QML' - aliases = ['qml'] - filenames = ['*.qml',] - mimetypes = [ 'application/x-qml',] - - - # pasted from JavascriptLexer, with some additions - flags = re.DOTALL - tokens = { - 'commentsandwhitespace': [ - (r'\s+', Text), - (r')', popstate_xmlcomment_callback), + (r'[^-]{1,2}', Literal), + (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + + unirange(0x10000, 0x10ffff), Literal), + ], + 'processing_instruction': [ + (r'\s+', Text, 'processing_instruction_content'), + (r'\?>', String.Doc, '#pop'), + (pitarget, Name), + ], + 'processing_instruction_content': [ + (r'\?>', String.Doc, '#pop'), + (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + + unirange(0x10000, 0x10ffff), Literal), + ], + 'cdata_section': [ + (r']]>', String.Doc, '#pop'), + (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + + unirange(0x10000, 0x10ffff), Literal), + ], + 'start_tag': [ + include('whitespace'), + (r'(/>)', popstate_tag_callback), + (r'>', Name.Tag, 'element_content'), + (r'"', Punctuation, 'quot_attribute_content'), + (r"'", Punctuation, 'apos_attribute_content'), + (r'=', Operator), + (qname, Name.Tag), + ], + 'quot_attribute_content': [ + (r'"', Punctuation, 'start_tag'), + (r'(\{)', pushstate_root_callback), + (r'""', Name.Attribute), + (quotattrcontentchar, Name.Attribute), + (entityref, Name.Attribute), + (charref, Name.Attribute), + (r'\{\{|\}\}', Name.Attribute), + ], + 'apos_attribute_content': [ + (r"'", Punctuation, 'start_tag'), + (r'\{', Punctuation, 'root'), + (r"''", Name.Attribute), + (aposattrcontentchar, Name.Attribute), + (entityref, Name.Attribute), + (charref, Name.Attribute), + (r'\{\{|\}\}', Name.Attribute), + ], + 'element_content': [ + (r')', popstate_tag_callback), + (qname, Name.Tag), + ], + 'xmlspace_decl': [ + (r'\(:', Comment, 'comment'), + (r'preserve|strip', Keyword, '#pop'), + ], + 'declareordering': [ + (r'\(:', Comment, 'comment'), + include('whitespace'), + (r'ordered|unordered', Keyword, '#pop'), + ], + 'xqueryversion': [ + include('whitespace'), + (r'\(:', Comment, 'comment'), + (stringdouble, String.Double), + (stringsingle, String.Single), + (r'encoding', Keyword), + (r';', Punctuation, '#pop'), + ], + 'pragma': [ + (qname, Name.Variable, 'pragmacontents'), + ], + 'pragmacontents': [ + (r'#\)', Punctuation, 'operator'), + (u'\\t|\\r|\\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + + unirange(0x10000, 0x10ffff), Literal), + (r'(\s+)', Text), + ], + 'occurrenceindicator': [ + include('whitespace'), + (r'\(:', Comment, 'comment'), + (r'\*|\?|\+', Operator, 'operator'), + (r':=', Operator, 'root'), + default('operator'), + ], + 'option': [ + include('whitespace'), + (qname, Name.Variable, '#pop'), + ], + 'qname_braren': [ + include('whitespace'), + (r'(\{)', pushstate_operator_root_callback), + (r'(\()', Punctuation, 'root'), + ], + 'element_qname': [ + (qname, Name.Variable, 'root'), + ], + 'attribute_qname': [ + (qname, Name.Variable, 'root'), + ], + 'root': [ + include('whitespace'), + (r'\(:', Comment, 'comment'), + + # handle operator state + # order on numbers matters - handle most complex first + (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Float, 'operator'), + (r'(\.\d+)[eE][\+\-]?\d+', Number.Float, 'operator'), + (r'(\.\d+|\d+\.\d*)', Number.Float, 'operator'), + (r'(\d+)', Number.Integer, 'operator'), + (r'(\.\.|\.|\))', Punctuation, 'operator'), + (r'(declare)(\s+)(construction)', + bygroups(Keyword, Text, Keyword), 'operator'), + (r'(declare)(\s+)(default)(\s+)(order)', + bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'), + (ncname + ':\*', Name, 'operator'), + ('\*:'+ncname, Name.Tag, 'operator'), + ('\*', Name.Tag, 'operator'), + (stringdouble, String.Double, 'operator'), + (stringsingle, String.Single, 'operator'), + + (r'(\})', popstate_callback), + + # NAMESPACE DECL + (r'(declare)(\s+)(default)(\s+)(collation)', + bygroups(Keyword, Text, Keyword, Text, Keyword)), + (r'(module|declare)(\s+)(namespace)', + bygroups(Keyword, Text, Keyword), 'namespacedecl'), + (r'(declare)(\s+)(base-uri)', + bygroups(Keyword, Text, Keyword), 'namespacedecl'), + + # NAMESPACE KEYWORD + (r'(declare)(\s+)(default)(\s+)(element|function)', + bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'), + (r'(import)(\s+)(schema|module)', + bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'), + (r'(declare)(\s+)(copy-namespaces)', + bygroups(Keyword, Text, Keyword), 'namespacekeyword'), + + # VARNAMEs + (r'(for|let|some|every)(\s+)(\$)', + bygroups(Keyword, Text, Name.Variable), 'varname'), + (r'\$', Name.Variable, 'varname'), + (r'(declare)(\s+)(variable)(\s+)(\$)', + bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), + + # ITEMTYPE + (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), + + (r'(element|attribute|schema-element|schema-attribute|comment|' + r'text|node|document-node|empty-sequence)(\s+)(\()', + pushstate_operator_kindtest_callback), + + (r'(processing-instruction)(\s+)(\()', + pushstate_operator_kindtestforpi_callback), + + (r'(