summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorAdrian Thurston <thurston@complang.org>2008-11-01 17:35:50 +0000
committerAdrian Thurston <thurston@complang.org>2008-11-01 17:35:50 +0000
commit10ff0e06801af15050848c701f606ac5de3ebc06 (patch)
tree21e08beb963d6208ef236afe8c9ca16469895547 /test
downloadcolm-10ff0e06801af15050848c701f606ac5de3ebc06.tar.gz
Moved from private repository.
Diffstat (limited to 'test')
-rw-r--r--test/Makefile46
-rw-r--r--test/backtrack1.lm24
-rw-r--r--test/backtrack2.lm24
-rw-r--r--test/backtrack3.lm27
-rw-r--r--test/btscan.in2
-rw-r--r--test/btscan.lm34
-rw-r--r--test/constructex.in3
-rw-r--r--test/constructex.lm37
-rw-r--r--test/counting1.in1
-rw-r--r--test/counting1.lm91
-rw-r--r--test/counting2.lm82
-rw-r--r--test/counting3.lm92
-rw-r--r--test/counting4.lm89
-rw-r--r--test/cxx/Makefile34
-rw-r--r--test/cxx/cxx.lm2163
-rw-r--r--test/cxx/input01.cpp17
-rw-r--r--test/cxx/input02.cpp16
-rw-r--r--test/cxx/input03.cpp19
-rw-r--r--test/cxx/input04.cpp17
-rw-r--r--test/cxx/input05.cpp8
-rw-r--r--test/cxx/input06.cpp7
-rw-r--r--test/cxx/input07.cpp18
-rw-r--r--test/cxx/input08.cpp13
-rw-r--r--test/cxx/input09.cpp7
-rw-r--r--test/cxx/input10.cpp11
-rw-r--r--test/cxx/input11.cpp2
-rw-r--r--test/cxx/input12.cpp8
-rw-r--r--test/cxx/input13.cpp14
-rwxr-xr-xtest/cxx/preproc4
-rw-r--r--test/diff/Makefile34
-rw-r--r--test/diff/diff.lm84
-rw-r--r--test/diff/input1.diff86
-rw-r--r--test/dns/Makefile20
-rw-r--r--test/dns/dns.lm488
-rw-r--r--test/dns/dumpdns11
-rw-r--r--test/dns/extract.c48
-rw-r--r--test/heredoc.in3
-rw-r--r--test/heredoc.lm45
-rw-r--r--test/html/Makefile34
-rw-r--r--test/html/html-lextag.lm324
-rw-r--r--test/html/html.lm307
-rw-r--r--test/html/input01.html8
-rw-r--r--test/http/Makefile34
-rw-r--r--test/http/http.lm68
-rw-r--r--test/http/input12
-rw-r--r--test/http/input213
-rw-r--r--test/http/input38
-rw-r--r--test/http/xinetd.conf10
-rw-r--r--test/island.in19
-rw-r--r--test/island.lm57
-rw-r--r--test/liftattrs.in3
-rw-r--r--test/liftattrs.lm74
-rw-r--r--test/mailbox.in29
-rw-r--r--test/mailbox.lm44
-rw-r--r--test/matchex.in3
-rw-r--r--test/matchex.lm34
-rw-r--r--test/maxlen.lm44
-rw-r--r--test/nestedcomm.in1
-rw-r--r--test/nestedcomm.lm41
-rw-r--r--test/python/Makefile18
-rw-r--r--test/python/input1.py18
-rw-r--r--test/python/input2.py20
-rw-r--r--test/python/input3.py1
-rw-r--r--test/python/input4.py10
-rw-r--r--test/python/python.lm726
-rw-r--r--test/ragelambig.in1
-rw-r--r--test/ragelambig1.lm65
-rw-r--r--test/ragelambig2.lm65
-rw-r--r--test/ragelambig3.lm64
-rw-r--r--test/ragelambig4.lm69
-rw-r--r--test/rediv.in1
-rw-r--r--test/rediv.lm92
-rw-r--r--test/ruby/Makefile34
-rw-r--r--test/ruby/ruby.lm627
-rw-r--r--test/rubyhere.in8
-rw-r--r--test/rubyhere.lm89
-rw-r--r--test/string.in2
-rw-r--r--test/string.lm54
-rw-r--r--test/superid.in1
-rw-r--r--test/superid.lm59
-rw-r--r--test/tags.in1
-rw-r--r--test/tags.lm82
-rw-r--r--test/til.in14
-rw-r--r--test/til.lm124
-rw-r--r--test/travs1.in1
-rw-r--r--test/travs1.lm144
-rw-r--r--test/travs2.in1
-rw-r--r--test/travs2.lm93
-rw-r--r--test/xml/Makefile34
-rw-r--r--test/xml/xml.in3962
-rw-r--r--test/xml/xml.lm167
91 files changed, 11533 insertions, 0 deletions
diff --git a/test/Makefile b/test/Makefile
new file mode 100644
index 00000000..8e420541
--- /dev/null
+++ b/test/Makefile
@@ -0,0 +1,46 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+SUBDIRS = xml python http dns diff html cxx ruby
+
+all: $(BIN) $(SUBDIRS)
+
+.PHONY: $(SUBDIRS)
+
+$(SUBDIRS):
+ @cd $@ && $(MAKE)
+
+$(BIN): %.bin: %.lm
+ ../colm/colm $<
+
+# clean targets
+
+CLEAN_SUBDIRS = $(SUBDIRS:%=%-clean)
+
+.PHONY: $(CLEAN_SUBDIRS)
+
+$(CLEAN_SUBDIRS):
+ @cd $(@:%-clean=%) && $(MAKE) clean
+
+clean: $(CLEAN_SUBDIRS)
+ rm -f *.cpp *.bin
diff --git a/test/backtrack1.lm b/test/backtrack1.lm
new file mode 100644
index 00000000..0f3d8e88
--- /dev/null
+++ b/test/backtrack1.lm
@@ -0,0 +1,24 @@
+# Token names.
+
+lex start
+{
+ literal '+', '*'
+ token number /[0-9]+/
+ ignore ws / [ \t\n]+ /
+}
+
+def F
+ [number '+']
+| [number]
+| [F '*' number]
+
+def E
+ [E '+' F]
+| [F]
+
+def start
+ [E]
+
+start S = parse start( stdin )
+start R = match S ~ 9 + 9
+print_xml( R )
diff --git a/test/backtrack2.lm b/test/backtrack2.lm
new file mode 100644
index 00000000..fc63c7a6
--- /dev/null
+++ b/test/backtrack2.lm
@@ -0,0 +1,24 @@
+
+# Token names.
+lex start
+{
+ token id /[a-z]+/
+ ignore ws /[ \t\n]+/
+}
+
+token bang1 /'!'/
+token bang2 /'!'/
+
+def one [bang1 id id id]
+
+def two [bang2 id id id id]
+
+def prods
+ [one]
+| [two]
+
+def start
+ [prods]
+
+start S = parse start( stdin )
+match S "!aa bb cc dd"
diff --git a/test/backtrack3.lm b/test/backtrack3.lm
new file mode 100644
index 00000000..1f5e6e81
--- /dev/null
+++ b/test/backtrack3.lm
@@ -0,0 +1,27 @@
+
+# Token names.
+lex start
+{
+ token number /[0-9]+/
+ token id /[a-z]+/
+ token string /'"' [^"]* '"'/
+ ignore ws / [ \t\n]+ /
+}
+
+def prefix [id]
+
+def choice1
+ [number number]
+| [number]
+
+def choice2
+ [string id]
+| [number number]
+| [id number]
+| [number]
+
+def start
+ [prefix choice1 choice2 string id id]
+ {
+ match lhs "id 77 88 \"hello\" dude dude"
+ }
diff --git a/test/btscan.in b/test/btscan.in
new file mode 100644
index 00000000..88cec9d5
--- /dev/null
+++ b/test/btscan.in
@@ -0,0 +1,2 @@
+!abb !abba !aab
+
diff --git a/test/btscan.lm b/test/btscan.lm
new file mode 100644
index 00000000..ac7914c2
--- /dev/null
+++ b/test/btscan.lm
@@ -0,0 +1,34 @@
+namespace r1
+{
+ lex r1
+ {
+ literal '!', 'a', 'b'
+ ignore /[ \n\t]+/
+ }
+
+ def line [ '!' 'a' 'b' 'b' 'a']
+}
+
+namespace r2
+{
+ lex r2
+ {
+ literal '!'
+ token id /[a-zA-Z_]+/
+ ignore /[ \n\t]+/
+ }
+
+ def line [ '!' id ]
+}
+
+def item
+ [r1::line]
+| [r2::line]
+
+def btscan
+ [item*]
+
+btscan P = parse btscan( stdin )
+
+match P ~!abb !abba !aab
+print_xml(P)
diff --git a/test/constructex.in b/test/constructex.in
new file mode 100644
index 00000000..f458f2ad
--- /dev/null
+++ b/test/constructex.in
@@ -0,0 +1,3 @@
+<person name=adrian hometown=kingston>
+ <t1 foo=bar2 e=f></t2>
+</person> \ No newline at end of file
diff --git a/test/constructex.lm b/test/constructex.lm
new file mode 100644
index 00000000..01d71f37
--- /dev/null
+++ b/test/constructex.lm
@@ -0,0 +1,37 @@
+lex start
+{
+ token id /[a-zA-Z_][a-zA-Z0-9_]*/
+ literal '=', '<', '>', '/'
+ ignore /[ \t\n\r\v]+/
+}
+
+def attr
+ [id '=' id]
+
+def open_tag
+ ['<' id attr* '>']
+
+def close_tag
+ ['<' '/' id '>']
+
+def tag
+ [open_tag item* close_tag]
+
+def item
+ [tag]
+| [id]
+
+tag PersonTag = parse tag( stdin )
+
+match PersonTag
+ ["<person name=" Val:id attr*">" item* "</person>"]
+
+tag NameTag1 = construct tag
+ ["<name type=person>" Val "</name>"]
+
+tag NameTag2 = construct tag
+ "<name type=person>[Val]</name>"
+
+print( NameTag1, '\n' )
+print( NameTag2, '\n' )
+
diff --git a/test/counting1.in b/test/counting1.in
new file mode 100644
index 00000000..45eeecde
--- /dev/null
+++ b/test/counting1.in
@@ -0,0 +1 @@
+3 1 b c 1 1 0 3 a b c
diff --git a/test/counting1.lm b/test/counting1.lm
new file mode 100644
index 00000000..bd0154ec
--- /dev/null
+++ b/test/counting1.lm
@@ -0,0 +1,91 @@
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+rl rl_num /[0-9]+/
+
+#
+# Tokens
+#
+
+lex start
+{
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ # Tokens.
+ token id /rl_id/
+ token number /rl_num/
+}
+
+#
+# Global Data
+#
+
+global int target
+
+#
+# Productions
+#
+
+
+def get_target
+ [number]
+ {
+ match lhs [Number:number]
+ target = Number.data.atoi()
+ }
+
+# Arbitrary item.
+def item
+ [number]
+| [id]
+
+# Type definition for the count_items nonterminal.
+def count_items
+ int count
+
+ # List production one. The condition stops the
+ # greedy list when it has gone too far.
+ [count_items item]
+ {
+ # Pass up the data
+ lhs.count = r1.count + 1
+ if lhs.count > target {
+ reject
+ }
+ }
+
+ # List production two, the base.
+| []
+ {
+ lhs.count = 0
+ }
+
+# Wrapper which prevents short lists from getting through if the parser
+# encounters an error and needs to backtrack over counted list.
+def counted_list
+ [get_target count_items]
+ {
+ if r2.count < target {
+ reject
+ }
+ }
+
+
+def start
+ [counted_list*]
+ {
+ for List:counted_list in lhs {
+ match List [Count:number Items:count_items]
+ print( 'num items: ', Count.data.atoi(), '\n' )
+
+ int i = 1
+ for Item:item in Items {
+ print( ' item ', i, ': ', Item, '\n' )
+ i = i + 1
+ }
+ }
+ }
diff --git a/test/counting2.lm b/test/counting2.lm
new file mode 100644
index 00000000..1044e5cb
--- /dev/null
+++ b/test/counting2.lm
@@ -0,0 +1,82 @@
+
+#
+# Regular Definitions
+#
+
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+rl rl_num /[0-9]+/
+
+#
+# Tokens
+#
+
+lex start
+{
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ # Tokens.
+ token id /rl_id/
+ token number /rl_num/
+}
+
+#
+# Productions
+#
+
+# Arbitrary item.
+def item
+ [id]
+| [number]
+
+# List production one. The condition stops the
+# greedy list when it has gone too far.
+def count_items
+ int target
+ int count
+
+ [count_items item]
+ {
+ # Pass up the data
+ lhs.target = r1.target
+ lhs.count = r1.count + 1
+
+ if lhs.count > lhs.target {
+ reject
+ }
+ }
+
+ # List production two, the base.
+| [number]
+ {
+ match lhs [Number:number]
+ lhs.target = Number.data.atoi()
+ lhs.count = 0
+ }
+
+
+# Wrapper which prevents short lists from getting through if the parser
+# encounters an error and needs to backtrack over counted list.
+def counted_list
+ [count_items]
+ {
+ if r1.count < r1.target {
+ reject
+ }
+ }
+
+def start
+ [counted_list*]
+ {
+ for List:counted_list in lhs {
+ match List [CountItems:count_items]
+ print( 'num items: ', CountItems.target, '\n' )
+
+ int i = 1
+ for Item:item in CountItems {
+ print( ' item ', i, ': ', Item, '\n' )
+ i = i + 1
+ }
+ }
+ }
diff --git a/test/counting3.lm b/test/counting3.lm
new file mode 100644
index 00000000..d925b732
--- /dev/null
+++ b/test/counting3.lm
@@ -0,0 +1,92 @@
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+rl rl_num /[0-9]+/
+
+#
+# Tokens
+#
+
+lex start
+{
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ literal ';'
+
+ # Tokens.
+ token id /rl_id/
+ token number /rl_num/
+}
+
+#
+# Global Data
+#
+
+global int target
+global int count
+
+#
+# Productions
+#
+
+
+def get_target
+ [number]
+ {
+ count = 0
+ target = r1.data.atoi()
+ print( 'target: ', target, '\n' )
+ }
+
+# Arbitrary item.
+def item
+ [number]
+| [id]
+
+def count_items
+ [one_item count_items]
+| []
+
+def one_item
+ [item]
+ {
+ count = count + 1
+ if count > target {
+ reject
+ }
+ print( 'ITEM\n' )
+ }
+
+
+# Wrapper which prevents short lists from getting through if the parser
+# encounters an error and needs to backtrack over counted list.
+def counted_list
+ [get_target count_items]
+ {
+ print( 'trying: ', count, ' for: ', target, '\n' )
+ if count < target {
+ reject
+ }
+ }
+
+
+def start
+ [counted_list*]
+ {
+
+ for List:counted_list in lhs {
+ match List [Count:number Items:count_items]
+ print( 'num items: ', Count.data.atoi(), '\n' )
+
+ int i = 1
+ for Item:item in Items {
+ print( ' item ', i, ': ', Item, '\n' )
+ i = i + 1
+ }
+ }
+ print( '*** SUCCESS ***\n' )
+ }
diff --git a/test/counting4.lm b/test/counting4.lm
new file mode 100644
index 00000000..b1a75130
--- /dev/null
+++ b/test/counting4.lm
@@ -0,0 +1,89 @@
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+rl rl_num /[0-9]+/
+
+#
+# Tokens
+#
+
+lex start
+{
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ literal ';'
+
+ # Tokens.
+ token id /rl_id/
+ token number /rl_num/
+}
+
+#
+# Global Data
+#
+
+global int target
+global int count
+
+#
+# Productions
+#
+
+
+def get_target
+ [number]
+ {
+ count = 0
+ target = r1.data.atoi()
+ print( 'target: ', target, '\n' )
+ }
+
+# Arbitrary item.
+def item
+ [number]
+| [id]
+
+def count_items
+ [count_inc item count_items]
+| [count_end]
+
+def count_inc
+ []
+ {
+ if count < target
+ count = count + 1
+ else
+ reject
+ }
+
+def count_end
+ []
+ {
+ if count < target
+ reject
+ }
+
+def counted_list
+ [get_target count_items]
+
+def start
+ [counted_list*]
+ {
+ for List:counted_list in lhs {
+ match List [Count:number Items:count_items]
+ print( 'num items: ', Count.data.atoi(), '\n' )
+
+ int i = 1
+ for Item:item in Items {
+ print( ' item ', i, ': ', Item, '\n' )
+ i = i + 1
+ }
+ }
+ print( '*** SUCCESS ***\n' )
+ }
+
+parse start(stdin)
diff --git a/test/cxx/Makefile b/test/cxx/Makefile
new file mode 100644
index 00000000..a775f8c7
--- /dev/null
+++ b/test/cxx/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f cxx.cpp *.bin
diff --git a/test/cxx/cxx.lm b/test/cxx/cxx.lm
new file mode 100644
index 00000000..560bfde2
--- /dev/null
+++ b/test/cxx/cxx.lm
@@ -0,0 +1,2163 @@
+#
+# Data types for global data.
+#
+
+# Map definition
+map object_map [str object_list]
+
+# Language objects.
+def lang_object
+ int typeId
+ str name
+
+ # If the object is a typedef, this points to the real object.
+ ptr lang_object typedefOf
+
+ object_map objectMap
+ object_list inherited
+ ptr lang_object lookupParent
+ ptr lang_object specializationOf
+ []
+
+# This structure is used to keep track of information necessary to make a
+# declaration. While parsing a declaration it records the declaration's
+# attributes.
+def declaration_data
+ int isTypedef
+ int isFriend
+ int isTemplate
+
+ ptr lang_object typeObj
+ []
+
+def declarator_data
+ ptr lang_object qualObj
+ ptr lang_object pdcScope
+ ptr lang_object lookupObj
+ []
+
+list declaration_data_list [declaration_data]
+list declarator_data_list [declarator_data]
+
+# Constants for language object types.
+global int NamespaceType = typeid namespace_id
+global int ClassType = typeid class_id
+global int TemplateClassType = typeid templ_class_id
+global int EnumType = typeid enum_id
+global int IdType = typeid identifier
+global int TypedefType = typeid typedef_id
+global int TemplateIdType = typeid template_id
+
+# Object stack definition. Uses references to objects.
+list object_list [ptr lang_object]
+
+# Stack of integers.
+list int_stack [int]
+
+#
+# Global data declarations
+#
+
+# Object stacks.
+global object_list curNamespace = construct object_list []
+global object_list declNs = construct object_list []
+global object_list lookupNs = construct object_list []
+global object_list qualNs = construct object_list []
+global object_list templateParamNs = construct object_list []
+
+# Declaration, declarator data.
+global declaration_data_list declarationData = construct declaration_data_list []
+global declarator_data_list declaratorData = construct declarator_data_list []
+
+# Template declarations
+global int_stack templDecl = construct int_stack []
+
+# Root namespace object
+global ptr lang_object rootNamespace = createLangObject( NamespaceType,
+ '<root_namespace>', nil )
+
+# Initialize the namespace and declaration stacks with the root namespace
+curNamespace.push( rootNamespace )
+declNs.push( rootNamespace )
+lookupNs.push( rootNamespace )
+
+# Start with no qualification (note variables are initialized to zero)
+qualNs.push( nil )
+
+templDecl.push( 0 )
+declarationData.push( construct declaration_data(
+ isTypedef: 0, isFriend: 0, isTemplate: 0 ) [] )
+
+#
+# Identifier lookup.
+#
+
+# Lookup the token in the members of an object.
+ptr lang_object lookupInObject( ptr lang_object obj, str name )
+{
+ # LOG print( ' looking in ', obj->name, '\n' )
+
+ object_list ol = obj->objectMap.find( name )
+ if ol {
+ # LOG print( ' * found an object: ', ol.head, '\n' )
+ return ol.head
+ }
+
+ return nil
+}
+
+# Lookup in an object and all the objects beneath it in the inheritance
+# tree.
+ptr lang_object lookupWithInheritance( ptr lang_object obj, str name )
+{
+ ptr lang_object found = lookupInObject( obj, name )
+ if found
+ return found
+
+ object_list localObjInherited = obj->inherited
+ for II: ptr lang_object in localObjInherited {
+ ptr lang_object inh = II
+
+ # First check if the inherited object is the one we are after.
+ if inh->name == name && inh->typeId == ClassType {
+ # LOG print( ' * found a class name\n' )
+ return inh
+ }
+
+ # Otherwise look inside the inherited object.
+ found = lookupWithInheritance( inh, name )
+ if found
+ return found
+ }
+
+ return nil
+}
+
+ptr lang_object unqualifiedLookup( str name )
+{
+ ptr lang_object found
+
+ # Start with the objects in the templateParamNs.
+ object_list localTemplateParamNs = templateParamNs
+ for TemplParaObjIter: ptr lang_object in rev_child(localTemplateParamNs) {
+ found = lookupWithInheritance( TemplParaObjIter, name )
+ if found
+ break
+ }
+
+ if !found {
+ # Iterator over the objects starting at the head of the lookup stack
+ # and going up through the lookup parents.
+ ptr lang_object lookupIn = lookupNs.top
+ while lookupIn {
+ found = lookupWithInheritance( lookupIn, name )
+ if found
+ break
+ lookupIn = lookupIn->lookupParent
+ }
+ }
+
+ return found
+}
+
+# The C++ scanner.
+lex start
+{
+ rl fract_const / digit* '.' digit+ | digit+ '.' /
+ rl exponent / [eE] [+\-]? digit+ /
+ rl float_suffix / [flFL] /
+
+ # Single and double literals.
+ token TK_SingleLit /( 'L'? "'" ( [^'\\\n] | '\\' any )* "'" )/
+ token TK_DoubleLit /( 'L'? '"' ( [^"\\\n] | '\\' any )* '"' )/
+
+ literal 'extern', 'namespace', 'friend', 'typedef', 'auto', 'register',
+ 'static', 'mutable', 'inline', 'virtual', 'explicit', 'const',
+ 'volatile', 'restrict', 'class', 'struct', 'union', 'template',
+ 'private', 'protected', 'public', 'using', 'void', 'char',
+ 'wchar_t', 'bool', 'int', 'float', 'double', 'short', 'long',
+ 'signed', 'unsigned', 'enum', 'new', 'delete', 'operator',
+ 'typename', 'export', 'throw', 'try', 'catch', 'sizeof',
+ 'dynamic_cast', 'static_cast', 'reinterpret_cast', 'const_cast',
+ 'typeid', 'this', 'true', 'false', 'switch', 'case', 'default',
+ 'if', 'else', 'while', 'do', 'for', 'break', 'continue',
+ 'return', 'goto'
+
+ literal '__typeof'
+
+ literal '{', '}', ';', ',', '=', '(', ')', ':', '&', '*', '[', ']', '~', '+', '-',
+ '/', '<', '>', '|', '^', '%', '!', '?', '.'
+
+ literal '::', '==', '!=', '&&', '||', '*=', '/=', '%=', '+=', '-=', '&=',
+ '^=', '|=', '++', '--', '->', '->*', '.*', '...', '<<=', '>>='
+
+ # Token translation targets.
+ def unknown_id [lookup_id]
+ def class_id [lookup_id]
+ def namespace_id [lookup_id]
+ def templ_class_id [lookup_id]
+ def enum_id [lookup_id]
+ def typedef_id [lookup_id]
+ def identifier [lookup_id]
+ def template_id [lookup_id]
+
+ # Identifiers
+ token lookup_id
+ ptr lang_object obj
+ ptr lang_object qualObj
+
+ /( [a-zA-Z_] [a-zA-Z0-9_]* )/
+ {
+ str name = match_text
+ ptr lang_object found = nil
+ ptr lang_object qualObj = nil
+ if qualNs.top {
+ # LOG print( 'qualified lookup of ', name, '\n' )
+
+ # Transfer the qualification to the token and reset it.
+ qualObj = qualNs.top
+ qualNs.top = nil
+
+ # Lookup using the qualification.
+ found = lookupWithInheritance( qualObj, name )
+ }
+ else {
+ # No qualification, full search.
+ # LOG print( 'unqualified lookup of ', name, '\n' )
+ found = unqualifiedLookup( name )
+ }
+
+ # If no match, return an Unknown ID
+ int id = typeid unknown_id
+ if found
+ id = found->typeId
+
+ any LookupId = make_token( typeid lookup_id,
+ pull(stdin, match_length), found, qualObj )
+ send( make_tree( id, LookupId ) )
+
+ }
+
+ # Floats.
+ token TK_Float /( fract_const exponent? float_suffix? |
+ digit+ exponent float_suffix? )/
+
+ # Integer decimal. Leading part buffered by float.
+ token TK_IntegerDecimal /( ( '0' | [1-9] [0-9]* ) [ulUL]{0,3} )/
+
+ # Integer octal. Leading part buffered by float.
+ token TK_IntegerOctal /( '0' [0-9]+ [ulUL]{0,2} )/
+
+ # Integer hex. Leading 0 buffered by float.
+ token TK_IntegerHex /( '0x' [0-9a-fA-F]+ [ulUL]{0,2} )/
+
+ # Preprocessor line.
+ ignore /'#' [^\n]* '\n'/
+
+ # Comments and whitespace.
+ ignore /( '/*' (any | '\n')* :>> '*/' )/
+ ignore /( '//' any* :> '\n' )/
+ ignore /( any - 33..126 )+/
+}
+
+#
+# Support functions
+#
+
+ptr lang_object createLangObject( int typeId, str name, ptr lang_object lookupParent )
+{
+ ptr lang_object obj = new construct lang_object(
+ typeId: typeId,
+ name: name,
+ objectMap: construct object_map [],
+ inherited: construct object_list [],
+ lookupParent: lookupParent ) []
+ return obj
+}
+
+# Building the language object tree.
+int insertObject( ptr lang_object definedIn, str name, ptr lang_object obj )
+{
+ object_list ol = definedIn->objectMap.find( name )
+ if !ol {
+ # Element not in the map already
+ ol = construct object_list []
+ }
+ ol.append( obj )
+ definedIn->objectMap.store( name, ol )
+}
+
+ptr lang_object findClass( ptr lang_object inObj, str name )
+{
+ object_list ol = inObj->objectMap.find( name )
+ if ol {
+ for ObjIter: ptr lang_object in ol {
+ ptr lang_object obj = ObjIter
+ if obj->typeId == ClassType {
+ return obj
+ }
+ }
+ }
+ return nil
+}
+
+ptr lang_object findTemplateClass( ptr lang_object inObj, str name )
+{
+ object_list ol = inObj->objectMap.find( name )
+ if ol {
+ for ObjIter: ptr lang_object in ol {
+ ptr lang_object obj = ObjIter
+ if obj->typeId == TemplateClassType
+ return obj
+ }
+ }
+ return nil
+}
+
+def root_qual_opt
+ []
+| ['::']
+
+def nested_name_specifier_opt
+ [nested_name_specifier_opt qualifying_name '::' designated_qualifying_name '::']
+| [nested_name_specifier_opt qualifying_name '::']
+| []
+
+def nested_name_specifier
+ [nested_name_specifier designated_qualifying_name '::']
+| [nested_name_specifier qualifying_name '::']
+| [qualifying_name '::']
+
+def qualifying_name
+ [class_name]
+ {
+ qualNs.top = r1.lookupId.obj
+ }
+
+| [namespace_id]
+ {
+ match r1 [Id: lookup_id]
+ qualNs.top = Id.obj
+ }
+
+| [typedef_id]
+ {
+ match r1 [Id: lookup_id]
+ qualNs.top = Id.obj->typedefOf
+ }
+
+def designated_qualifying_name
+ ['template' any_id]
+ {
+ # FIXME: nulling qualNs is not the right thing to do here.
+ qualNs.top = nil
+ }
+
+| ['template' any_id
+ templ_arg_open template_argument_list_opt templ_arg_close]
+ {
+ # FIXME: nulling qualNs is not the right thing to do here.
+ qualNs.top = nil
+ }
+
+#
+# Id Expression
+#
+
+def id_expression
+ lookup_id lookupId
+
+ [root_qual_opt nested_name_specifier_opt unknown_id]
+ {
+ lhs.lookupId = lookup_id in r3
+ }
+
+| [root_qual_opt nested_name_specifier_opt identifier]
+ {
+ lhs.lookupId = lookup_id in r3
+ }
+
+| [root_qual_opt nested_name_specifier_opt operator_function_id]
+ {
+ # Normally the token translation transfers the qualification. Since
+ # the operator_function_id does not end in a lookup we must do it ourselves.
+ ptr lang_object qualObj = qualNs.top
+ qualNs.top = nil
+
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<operator_function_id>'
+ lhs.lookupId.qualObj = qualObj
+ }
+
+| [root_qual_opt nested_name_specifier_opt conversion_function_id]
+ {
+ # Normally the token translation transfers the qualification. Since
+ # the operator_function_id does not } in a lookup we must do it ourselves.
+ ptr lang_object qualObj = qualNs.top
+ qualNs.top = nil
+
+ # Do we need qual reset here becauase operator_function_id does not do it?
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<conversion_function_id>'
+ lhs.lookupId.qualObj = qualObj
+ }
+
+| [root_qual_opt nested_name_specifier_opt '~' class_name]
+ {
+ lhs.lookupId = r4.lookupId
+ }
+
+| [root_qual_opt nested_name_specifier_opt template_name]
+ {
+ lhs.lookupId = r3.lookupId
+ }
+
+def template_name
+ lookup_id lookupId
+
+ [template_id templ_arg_open template_argument_list_opt templ_arg_close]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+| [template_id]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+
+#
+# Class Names
+#
+
+def class_name
+ lookup_id lookupId
+
+ [class_id]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+| [templ_class_id]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+| [templ_class_id templ_arg_open template_argument_list_opt templ_arg_close]
+ {
+ # TODO: Look for a specialization.
+ lhs.lookupId = lookup_id in r1
+ }
+
+def templ_arg_open
+ ['<']
+ {
+ qualNs.push( nil )
+ }
+
+def templ_arg_close
+ ['>']
+ {
+ qualNs.pop()
+ }
+
+def declaration
+ [block_declaration] commit
+| [function_definition] commit
+| [template_declaration] commit
+| [explicit_instantiation] commit
+| [explicit_specialization] commit
+| [linkage_specification] commit
+| [namespace_definition] commit
+
+#
+# Declarations
+#
+
+def block_declaration
+ [simple_declaration]
+| [using_declaration]
+| [using_directive]
+
+def simple_declaration
+ [declaration_start simple_declaration_forms declaration_end ';']
+
+# Ordering is important for optimization. The form with the optional
+# decl_specifier_sing should go second.
+def simple_declaration_forms
+ [decl_specifier_mult_seq_opt decl_specifier_sing
+ decl_specifier_mult_seq_opt init_declarator_list_opt]
+
+| [decl_specifier_mult_seq_opt init_declarator_list_opt]
+
+def declaration_start
+ []
+ {
+ # LOG print( 'opening new declaration_data with templDecl: ', templDecl.top, '\n' )
+ declarationData.push( construct declaration_data (
+ isTypedef: 0, isFriend: 0, isTemplate: 0 ) [] )
+
+ # Transfer the template flag and reset it.
+ declarationData.top.isTemplate = templDecl.top
+ templDecl.push( 0 )
+ }
+
+def declaration_end
+ []
+ {
+ # LOG print( 'closing declaration_data\n' )
+ declarationData.pop()
+ templDecl.pop()
+ }
+
+def decl_specifier_sing
+ [type_specifier_sing]
+ {
+ # Store the object type of the declaration (if any) for use
+ # by typedefs.
+ declarationData.top.typeObj = r1.lookupId.obj
+ }
+
+def type_specifier_seq
+ lookup_id lookupId
+
+ [type_specifier_mult_seq_opt type_specifier_sing type_specifier_mult_seq_opt]
+ {
+ lhs.lookupId = r2.lookupId
+ }
+
+def type_specifier_sing
+ lookup_id lookupId
+
+ [simple_type_specifier]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+| [class_specifier]
+ {
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<class_specifier>'
+ }
+
+| [enum_specifier]
+ {
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<enum_specifier>'
+ }
+
+| [elaborated_type_specifier]
+ {
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<elaborated_type_specifier>'
+ }
+
+# Type specifier sequence without enum specifier or class specifier.
+def necs_type_specifier_seq
+ [type_specifier_mult_seq_opt necs_type_specifier_sing type_specifier_mult_seq_opt]
+
+# Type specifier singular without enum specifier or class specifier.
+def necs_type_specifier_sing
+ [simple_type_specifier]
+| [elaborated_type_specifier]
+
+def type_specifier_mult_seq_opt
+ [type_specifier_mult_seq_opt type_specifier_mult]
+| []
+
+def type_specifier_mult_seq
+ [type_specifier_mult_seq type_specifier_mult]
+| [type_specifier_mult]
+
+def simple_type_specifier
+ lookup_id lookupId
+
+ [simple_type_specifier_name]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+| [simple_type_specifier_kw_seq]
+ {
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<simple_type_specifier_kw_seq>'
+ }
+
+| ['typename' root_qual_opt nested_name_specifier type_name]
+ {
+ lhs.lookupId = r4.lookupId
+ }
+
+| ['typename' root_qual_opt nested_name_specifier identifier]
+ {
+ lhs.lookupId = lookup_id in r4
+ }
+
+| ['typename' root_qual_opt nested_name_specifier unknown_id]
+ {
+ lhs.lookupId = lookup_id in r4
+ }
+
+ # Extension.
+| ['__typeof' '(' identifier ')']
+ {
+ lhs.lookupId = construct lookup_id ["x"]
+ lhs.lookupId.data = '<simple_type_specifier_kw_seq>'
+ }
+
+def simple_type_specifier_name
+ lookup_id lookupId
+
+ [qual_type_name]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+def simple_type_specifier_kw_seq
+ [simple_type_specifier_kw_seq simple_type_specifier_kw]
+| [simple_type_specifier_kw]
+
+def simple_type_specifier_kw
+ ['void']
+| ['char']
+| ['wchar_t']
+| ['bool']
+| ['int']
+| ['float']
+| ['double']
+| ['short']
+| ['long']
+| ['signed']
+| ['unsigned']
+
+def qual_type_name
+ lookup_id lookupId
+
+ [root_qual_opt nested_name_specifier_opt type_name]
+ {
+ lhs.lookupId = r3.lookupId
+ }
+
+def type_name
+ lookup_id lookupId
+
+ [class_name]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+| [enum_id]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+| [typedef_id]
+ {
+ lhs.lookupId = lookup_id in r1
+ }
+
+# NOTE: the typename case is moved to simple type specifier
+# to take advantage of its conflict resolution.
+def elaborated_type_specifier
+ [class_key nested_name_specifier_opt class_head_name]
+ {
+ lookup_id Id = lookup_id in r3
+ str name = Id.data
+
+ # Get the ns the class is declared in.
+ ptr lang_object parentObj = declNs.top
+ if Id.qualObj
+ parentObj = Id.qualObj
+
+ # Look for the class in the given scope.
+ ptr lang_object declaredClass = findClass( parentObj, name )
+ if !declaredClass
+ declaredClass = findTemplateClass( parentObj, name )
+
+ if !declaredClass {
+ # LOG print( 'creating new class: ', name, '\n' )
+
+ # Class does not exist in the parent scope, create it.
+ int nsType = declaredClassType()
+
+ declaredClass = createLangObject( nsType, name, lookupNs.top )
+
+ # FIXME: handle friends. Make the class visible only if we are NOT
+ # in a friend declaration. The new class object is necessary to
+ # properly process the body of the class.
+ if declarationData.top.isFriend == 0
+ insertObject( parentObj, name, declaredClass )
+ }
+ }
+
+ # TODO: Lookup type specialization.
+| [class_key nested_name_specifier_opt templ_class_id
+ templ_arg_open template_argument_list_opt templ_arg_close]
+
+| ['enum' nested_name_specifier_opt enum_head_name]
+ {
+ # TODO: should look for existing enums of the same name.
+ lookup_id Id = lookup_id in r3
+ # LOG print( 'creating enumeration ', Id.data, '\n' )
+ ptr lang_object enum = createLangObject( EnumType, Id.data, lookupNs.top )
+ insertObject( declNs.top, Id.data, enum )
+ }
+
+def decl_specifier_mult_seq_opt
+ [decl_specifier_mult_seq_opt decl_specifier_mult]
+| []
+
+def decl_specifier_mult_seq
+ [decl_specifier_mult_seq decl_specifier_mult]
+| [decl_specifier_mult]
+
+def decl_specifier_mult
+ [type_specifier_mult]
+| [storage_class_specifier]
+| [function_specifier]
+
+| ['friend']
+ {
+ declarationData.top.isFriend = 1
+ }
+
+| ['typedef']
+ {
+ declarationData.top.isTypedef = 1
+ }
+
+def storage_class_specifier
+ ['auto']
+| ['register']
+| ['static']
+| ['extern']
+| ['mutable']
+
+def function_specifier
+ ['inline']
+| ['virtual']
+| ['explicit']
+
+def type_specifier_mult
+ [cv_qualifier]
+
+def cv_qualifier
+ ['const']
+| ['volatile']
+| ['restrict']
+
+def cv_qualifier_rep
+ [cv_qualifier_rep cv_qualifier]
+| []
+
+def namespace_definition
+ [named_namespace_definition]
+| [unnamed_namespace_definition]
+
+def named_namespace_definition
+ [original_namespace_definition]
+| [extension_namespace_definition]
+
+#
+# Enumerations
+#
+
+def enum_specifier
+ ['enum' nested_name_specifier_opt
+ enum_head_name '{' enumerator_list_opt '}']
+ {
+ # TODO: should look for existing enums of the same name.
+ lookup_id Id = lookup_id in r3
+ # LOG print( 'creating enumeration ', Id.data, '\n' )
+ ptr lang_object enum = createLangObject( EnumType, Id.data, lookupNs.top )
+ insertObject( declNs.top, Id.data, enum )
+ }
+
+| ['enum' '{' enumerator_list_opt '}']
+
+def enum_head_name
+ [class_id]
+| [templ_class_id]
+| [namespace_id]
+| [typedef_id]
+| [enum_id]
+| [identifier]
+| [template_id]
+| [unknown_id]
+
+def enumerator_list_opt
+ [enumerator_list]
+| [enumerator_list ',']
+| []
+
+def enumerator_list
+ [enumerator_list ',' enumerator_definition]
+| [enumerator_definition]
+
+def enumerator_definition
+ [enumerator_id]
+ {
+ lookup_id Id = lookup_id in r1
+ ptr lang_object enumId = createLangObject( IdType, Id.data, lookupNs.top )
+ insertObject( declNs.top, Id.data, enumId )
+ }
+
+| [enumerator_id '=' constant_expression]
+ {
+ lookup_id Id = lookup_id in r1
+ ptr lang_object enumId = createLangObject( IdType, Id.data, lookupNs.top )
+ insertObject( declNs.top, Id.data, enumId )
+ }
+
+def enumerator_id
+ [namespace_id]
+| [typedef_id]
+| [enum_id]
+| [class_id]
+| [templ_class_id]
+| [template_id]
+| [identifier]
+| [unknown_id]
+
+#
+# Declarators
+#
+
+def init_declarator_list_opt
+ [init_declarator_list]
+| []
+
+def init_declarator_list
+ [init_declarator_list ',' init_declarator]
+| [init_declarator]
+
+def init_declarator
+ [declarator initializer_opt]
+
+def initializer_opt
+ ['=' initializer_clause]
+| ['(' expression ')']
+| []
+
+def initializer_clause
+ [assignment_expression]
+| ['{' initializer_list '}']
+| ['{' initializer_list ',' '}']
+| ['{' '}']
+
+def initializer_list
+ [initializer_list ',' initializer_clause]
+| [initializer_clause]
+
+#
+# Expressions
+#
+
+def expression
+ [expression ',' assignment_expression]
+| [assignment_expression]
+
+def expression_opt
+ [expression]
+| []
+
+def constant_expression
+ [conditional_expression]
+
+def constant_expression_opt
+ [constant_expression]
+| []
+
+def assignment_expression
+ [conditional_expression]
+| [logical_or_expression assignment_op assignment_expression]
+| [throw_expression]
+
+def assignment_op
+ ['=']
+| ['*=']
+| ['/=']
+| ['%=']
+| ['+=']
+| ['-=']
+| ['>>=']
+| ['<<=']
+| ['&=']
+| ['^=']
+| ['|=']
+
+def conditional_expression
+ [logical_or_expression]
+| [logical_or_expression '?' expression ':' assignment_expression]
+
+def logical_or_expression
+ [logical_or_expression '||' logical_and_expression]
+| [logical_and_expression]
+
+def logical_and_expression
+ [logical_and_expression '&&' inclusive_or_expression]
+| [inclusive_or_expression]
+
+def inclusive_or_expression
+ [inclusive_or_expression '|' exclusive_or_expression]
+| [exclusive_or_expression]
+
+def exclusive_or_expression
+ [exclusive_or_expression '^' and_expression]
+| [and_expression]
+
+def and_expression
+ [and_expression '&' equality_expression]
+| [equality_expression]
+
+def equality_expression
+ [equality_expression '==' relational_expression]
+| [equality_expression '!=' relational_expression]
+| [relational_expression]
+
+def relational_expression
+ [relational_expression '<' shift_expression]
+| [relational_expression '>' shift_expression]
+| [relational_expression lt_eq shift_expression]
+| [relational_expression gt_eq shift_expression]
+| [shift_expression]
+
+def shift_expression
+ [shift_expression shift_left additive_expression]
+| [shift_expression shift_right additive_expression]
+| [additive_expression]
+
+def additive_expression
+ [additive_expression '+' multiplicative_expression]
+| [additive_expression '-' multiplicative_expression]
+| [multiplicative_expression]
+
+def multiplicative_expression
+ [multiplicative_expression '*' pm_expression]
+| [multiplicative_expression '/' pm_expression]
+| [multiplicative_expression '%' pm_expression]
+| [pm_expression]
+
+def pm_expression
+ [pm_expression '->*' cast_expression]
+| [pm_expression '.*' cast_expression]
+| [cast_expression]
+
+def cast_expression
+ [unary_expression]
+| ['(' type_id ')' cast_expression]
+
+def delete_expression
+ [root_qual_opt 'delete' cast_expression]
+| [root_qual_opt 'delete' '[' ']' cast_expression]
+
+def new_initializer_opt
+ [new_initializer]
+| []
+
+def new_initializer
+ ['(' expression_opt ')']
+
+def direct_new_declarator
+ ['[' expression ']']
+| [direct_new_declarator '[' constant_expression ']']
+
+def new_declarator_opt
+ [new_declarator]
+| []
+
+def new_declarator
+ [direct_new_declarator]
+| [ptr_operator_seq direct_new_declarator]
+| [ptr_operator_seq]
+
+def new_type_id
+ [necs_type_specifier_seq new_declarator_opt]
+
+def new_placement
+ ['(' expression ')']
+
+def new_expression
+ [root_qual_opt 'new' new_type_id new_initializer_opt]
+| [root_qual_opt 'new' new_placement new_type_id new_initializer_opt]
+| [root_qual_opt 'new' '(' type_id ')' new_initializer_opt]
+| [root_qual_opt 'new' new_placement '(' type_id ')' new_initializer_opt]
+
+def unary_operator
+ ['*']
+| ['&']
+| ['+']
+| ['-']
+| ['!']
+| ['~']
+
+def unary_expression
+ [postfix_expression]
+| ['++' cast_expression]
+| ['--' cast_expression]
+| [unary_operator cast_expression]
+| ['sizeof' '(' type_id ')']
+| ['sizeof' unary_expression]
+| [new_expression]
+| [delete_expression]
+
+def function_style_type_conv
+ [simple_type_specifier]
+
+
+def postfix_expression
+ [primary_expression]
+| [postfix_expression '[' expression ']']
+| [postfix_expression '(' expression_opt ')']
+| [function_style_type_conv '(' expression_opt ')']
+| [member_request_expr dot_arrow id_expression]
+| [member_request_expr dot_arrow pseudo_destructor_call]
+| [postfix_expression '++']
+| [postfix_expression '--']
+| ['dynamic_cast' templ_arg_open type_id templ_arg_close '(' expression ')']
+| ['static_cast' templ_arg_open type_id templ_arg_close '(' expression ')']
+| ['reinterpret_cast' templ_arg_open type_id templ_arg_close '(' expression ')']
+| ['const_cast' templ_arg_open type_id templ_arg_close '(' expression ')']
+| ['typeid' '(' expression ')']
+| ['typeid' '(' type_id ')']
+
+def pseudo_destructor_call
+ [root_qual_opt nested_name_specifier_opt '~' pdc_type_name]
+
+def primary_expression
+ [expr_lit]
+| ['this']
+| ['(' expression ')']
+| [id_expression]
+
+# This is an GNU extension.
+def primary_expression
+ ['(' '{' statement_rep '}' ')']
+
+def expr_lit
+ [TK_IntegerDecimal]
+| [TK_IntegerOctal]
+| [TK_IntegerHex]
+| [TK_SingleLit]
+| [TK_Float]
+| [double_lit_list]
+| ['true']
+| ['false']
+
+def double_lit_list
+ [TK_DoubleLit double_lit_list]
+| [TK_DoubleLit]
+
+def member_request_expr
+ [postfix_expression]
+# {
+# # FIXME: If no proper type is found, we must fail.
+# # LOG print( 'setting member request scope\n' )
+# # qualNs.set( $1->type != 0 ? $1->type->getObject() : 0 );
+# }
+
+def dot_arrow
+ ['->']
+| ['.']
+
+def pdc_type_name
+ [enum_id]
+| [typedef_id]
+
+#
+# Statements
+#
+
+def statement_rep
+ [statement_rep statement]
+| []
+
+def statement
+ [declaration_statement]
+| [labeled_statement]
+| [expression_statement]
+| [compound_statement]
+| [selection_statement]
+| [iteration_statement]
+| [jump_statement]
+| [try_block]
+
+def labeled_statement
+ [label_id ':' statement]
+| ['case' constant_expression ':' statement]
+| ['default' ':' statement]
+
+def label_id
+ [unknown_id]
+| [identifier]
+| [class_id]
+| [templ_class_id]
+| [namespace_id]
+| [typedef_id]
+| [enum_id]
+| [template_id]
+
+def compound_statement
+ ['{' compound_begin statement_rep compound_end '}']
+
+def compound_begin
+ []
+ {
+ ptr lang_object newCompound = createLangObject( 0, '<compound_begin>', lookupNs.top )
+ lookupNs.push( newCompound )
+ declNs.push( newCompound )
+ # LOG print( 'opening <compound>\n' )
+ }
+
+def compound_end
+ []
+ {
+ lookupNs.pop()
+ declNs.pop()
+ # LOG print( 'closing <compound>\n' )
+ }
+
+def selection_statement
+ ['if' '(' condition ')' statement elseif_clauses else_clause]
+| ['switch' '(' condition ')' statement]
+
+def elseif_clauses
+ [elseif_clauses 'else' 'if' '(' condition ')' statement]
+| []
+
+def else_clause
+ ['else' statement]
+| []
+
+def iteration_statement
+ ['while' '(' condition ')' statement]
+| ['do' statement 'while' '(' expression ')' ';']
+| ['for' '(' for_init_statement condition_opt ';' expression_opt ')' statement]
+
+def jump_statement
+ ['break' ';']
+| ['continue' ';']
+| ['return' expression_opt ';']
+| ['goto' any_id ';']
+
+def any_id
+ [unknown_id]
+| [class_id]
+| [namespace_id]
+| [templ_class_id]
+| [enum_id]
+| [typedef_id]
+| [identifier]
+| [template_id]
+
+
+def for_init_statement
+ [expression_statement]
+| [stmt_block_declaration_forms ';']
+
+def condition
+ [expression]
+| [type_specifier_seq declarator '=' assignment_expression]
+
+def condition_opt
+ [condition]
+| []
+
+def expression_statement
+ [expression ';']
+| [';']
+
+def declaration_statement
+ [stmt_block_declaration]
+
+def stmt_block_declaration
+ [declaration_start stmt_block_declaration_forms declaration_end ';']
+| [using_declaration]
+| [using_directive]
+
+def stmt_block_declaration_forms
+ [decl_specifier_mult_seq_opt decl_specifier_sing decl_specifier_mult_seq_opt
+ init_declarator_list_opt]
+| [decl_specifier_mult_seq init_declarator_list_opt]
+
+#
+# Declarators
+#
+
+def declarator
+ ptr lang_object lookupObj
+
+ [ptr_operator_seq_opt declarator_id decl_array_or_param_rep declarator_end]
+ {
+ lhs.lookupObj = r4.lookupObj
+ }
+
+| [ptr_operator_seq_opt '(' sub_declarator ')' decl_array_or_param_rep declarator_end]
+ {
+ lhs.lookupObj = r6.lookupObj
+ }
+
+def sub_declarator
+ [ptr_operator_seq declarator_id decl_array_or_param_rep]
+| [ptr_operator_seq '(' sub_declarator ')' decl_array_or_param_rep]
+| ['(' sub_declarator ')' decl_array_or_param_rep]
+| [declarator_id decl_array_or_param_rep]
+
+def decl_array_or_param_rep
+ [decl_array_or_param_rep decl_array_or_param]
+| []
+
+def decl_array_or_param
+ ['[' constant_expression_opt ']']
+| ['(' parameter_declaration_clause ')' cv_qualifier_rep exception_specification_opt]
+
+def declarator_id
+ [declarator_id_forms]
+ {
+ str name = r1.lookupId.data
+ ptr lang_object qualObj = r1.lookupId.qualObj
+
+ ptr lang_object parentObj = declNs.top
+ if qualObj {
+ parentObj = qualObj
+ }
+
+ # Decide if we are declaring a constructor/destructor.
+ bool isConstructor
+ if parentObj == r1.lookupId.obj {
+ isConstructor = true
+ # LOG print( 'making declarator ', name, ' a constructor/destructor\n' )
+ }
+
+ if parentObj->specializationOf &&
+ parentObj->specializationOf == r1.lookupId.obj
+ {
+ isConstructor = true
+ # LOG print( 'making declarator ', name, ' a constructor/destructor\n' )
+ }
+
+ ptr lang_object obj = nil
+ if name && !isConstructor && declarationData.top.isFriend == 0 {
+ if declarationData.top.isTypedef {
+ obj = createLangObject( TypedefType, name, lookupNs.top )
+ obj->typedefOf = declarationData.top.typeObj
+ insertObject( parentObj, name, obj )
+
+ # LOG print( 'making declarator ', name, ' a typedef\n' )
+ }
+ else {
+ if !qualObj {
+ if declarationData.top.isTemplate {
+ # If in a template declaration and the name is not qualified then
+ # create the template id.
+ obj = createLangObject( TemplateIdType, name, lookupNs.top )
+ #object->objType = declarationData.top.type
+ insertObject( declNs.top, name, obj )
+
+ # LOG print( 'making declarator ', name, ' a template id\n' )
+ }
+ else {
+ obj = createLangObject( IdType, name, lookupNs.top )
+ #object->objType = declarationData.top().type;
+ insertObject( declNs.top, name, obj )
+
+ # LOG print( 'making declarator ', name, ' an id\n' )
+ }
+ }
+ }
+ }
+
+ declaratorData.push( construct declarator_data (
+ qualObj: qualObj, lookupObj: lookupNs.top ) [] )
+
+ # If the declarator is qualified, push the qualification to the lookup
+ # stack. Also save it in the declarator data so it can be passed to a
+ # function body if needed.
+ if qualObj {
+ lookupNs.push( qualObj )
+ declaratorData.top.lookupObj = qualObj
+ }
+
+ # LOG print( 'reduced declarator_id: ', name, '\n' )
+ }
+
+# Undoes the setup done by declarator_id and pdc_start.
+def declarator_end
+ ptr lang_object lookupObj
+
+ []
+ {
+ # Get the lookupObject from the scope and pass it up. If we are about to
+ # parse a function body it will be needed.
+ lhs.lookupObj = declaratorData.top.lookupObj
+
+ ptr lang_object pdcScope = declaratorData.top.pdcScope
+ ptr lang_object qualObj = declaratorData.top.qualObj
+
+ declaratorData.pop()
+
+ if pdcScope {
+ # LOG print( 'closing <pdc_scope>\n' )
+ lookupNs.pop()
+ declNs.pop()
+ }
+
+ if qualObj {
+ # LOG print( 'popping lookupNs\n' )
+ lookupNs.pop()
+ }
+ }
+
+def declarator_id_forms
+ lookup_id lookupId
+
+ [id_expression]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+| [root_qual_opt nested_name_specifier_opt type_name]
+ {
+ lhs.lookupId = r3.lookupId
+ }
+
+| [root_qual_opt nested_name_specifier_opt '~' class_id]
+ {
+ lhs.lookupId = lookup_id in r4
+ }
+
+| [root_qual_opt nested_name_specifier_opt '~' templ_class_id]
+ {
+ lhs.lookupId = lookup_id in r4
+ }
+
+def type_id
+ lookup_id lookupId
+
+ [type_specifier_seq abstract_declarator_opt]
+ {
+ lhs.lookupId = r1.lookupId
+ }
+
+def abstract_declarator_opt
+ [abstract_declarator]
+| []
+
+def abstract_declarator
+ [ptr_operator_seq abstract_noid abstract_decl_array_or_param_seq_opt declarator_end]
+| [ptr_operator_seq '(' sub_abstract_declarator ')'
+ abstract_decl_array_or_param_seq_opt declarator_end]
+| [abstract_noid abstract_decl_array_or_param_seq declarator_end]
+| ['(' sub_abstract_declarator ')' abstract_decl_array_or_param_seq_opt declarator_end]
+
+def sub_abstract_declarator
+ [ptr_operator_seq abstract_noid abstract_decl_array_or_param_seq_opt]
+
+| [ptr_operator_seq '(' sub_abstract_declarator ')'
+ abstract_decl_array_or_param_seq_opt]
+
+| ['(' sub_abstract_declarator ')' abstract_decl_array_or_param_seq_opt]
+
+def abstract_noid
+ []
+ {
+ # Make scope for declarator.
+ declaratorData.push( construct declarator_data [] )
+ }
+
+def abstract_decl_array_or_param_seq_opt
+ [abstract_decl_array_or_param_seq_opt abstract_decl_array_or_param]
+| []
+
+def abstract_decl_array_or_param_seq
+ [abstract_decl_array_or_param_seq abstract_decl_array_or_param]
+| [abstract_decl_array_or_param]
+
+def abstract_decl_array_or_param
+ ['[' constant_expression_opt ']']
+| ['(' parameter_declaration_clause ')' cv_qualifier_rep
+ exception_specification_opt]
+
+def parameter_declaration_clause
+ [pdc_start parameter_declaration_list]
+| [pdc_start parameter_declaration_list '...']
+| [pdc_start parameter_declaration_list ',' '...']
+| [pdc_start '...']
+| [pdc_start]
+
+def pdc_start
+ []
+ {
+ if !declaratorData.top.pdcScope {
+ # We are going to need a scope for the declarator.
+ ptr lang_object pdcScope = createLangObject( 0, '<pdc_scope>', lookupNs.top )
+ lookupNs.push( pdcScope )
+ declNs.push( pdcScope )
+
+ declaratorData.top.pdcScope = pdcScope
+ declaratorData.top.lookupObj = pdcScope
+ # LOG print( 'opening <pdc_scope>\n' )
+ }
+ }
+
+def parameter_declaration_list
+ [parameter_declaration_list ',' parameter_declaration]
+| [parameter_declaration]
+
+def parameter_declaration
+ [declaration_start parameter_declaration_forms declaration_end]
+
+# Ordering the productions such that decl_specifier_sing is tried first is good
+# for performance.
+def parameter_declaration_forms
+ [decl_specifier_mult_seq_opt decl_specifier_sing decl_specifier_mult_seq_opt
+ param_maybe_declarator maybe_parameter_init]
+
+| [decl_specifier_mult_seq param_maybe_declarator maybe_parameter_init]
+
+def param_maybe_declarator
+ [abstract_declarator]
+| [declarator]
+| []
+
+def maybe_parameter_init
+ ['=' constant_expression]
+| []
+
+def ptr_operator
+ ['&']
+| [root_qual_opt nested_name_specifier_opt '*' cv_qualifier_rep]
+
+def ptr_operator_seq
+ [ptr_operator_seq ptr_operator]
+| [ptr_operator]
+
+def ptr_operator_seq_opt
+ [ptr_operator_seq_opt ptr_operator]
+| []
+
+#
+# Functions
+#
+
+def function_definition
+ [function_def_declaration ctor_initializer_opt function_body function_def_end]
+
+def function_def_declaration
+ [declaration_start function_def_declaration_forms declaration_end]
+
+def function_def_declaration_forms
+ [decl_specifier_mult_seq_opt decl_specifier_sing
+ decl_specifier_mult_seq_opt function_def_declarator]
+| [decl_specifier_mult_seq function_def_declarator]
+| [function_def_declarator]
+
+def function_def_declarator
+ [declarator]
+ {
+ # The lookupObj from the declarator is the deepest lookup object found
+ # while parsing the declarator. Make it visible in the function body.
+ # This could be the args, the qualObj, or the parent to the function.
+ lookupNs.push( r1.lookupObj )
+ }
+
+def function_def_end
+ []
+ {
+ # Pop the lookup object.
+ lookupNs.pop()
+ }
+
+def function_body
+ [function_body_begin '{' statement_rep function_body_end '}']
+
+def function_body_begin
+ []
+ {
+ ptr lang_object newFunctionBody = createLangObject( 0,
+ '<function_body_begin>', lookupNs.top )
+ lookupNs.push( newFunctionBody )
+ declNs.push( newFunctionBody )
+ templDecl.push( 0 )
+ # LOG print( 'opening <function_body>\n' )
+ }
+
+def function_body_end
+ []
+ {
+ # First undoes the function body begin work. Then undoes the setup in
+ # function_def_declarator.
+ declNs.pop()
+ lookupNs.pop()
+ templDecl.pop()
+ # LOG print( 'closing <function_body>\n' )
+ }
+
+
+
+#
+# Classs
+#
+
+int declaredClassType()
+{
+ if declarationData.top.isTemplate {
+ return TemplateClassType
+ } else {
+ return ClassType
+ }
+}
+
+def class_specifier
+ [class_head base_clause_opt '{' class_member_rep class_body_end '}']
+ {
+# FIXME: reparse not implemented yet
+# # Visit class function bodies, but skip nested classes.
+# for CFB: class_function_body in lhs {
+# skipping class_specifier
+#
+# # Reparse the text of the class function body as a function body
+# function_body FB = reparse function_body( CFB )
+#
+# # Replace the class function body with the parsed function body.
+# CFB = construct class_function_body
+# [FB]
+# }
+ }
+
+def class_head
+ [class_key]
+ {
+ int nsType = declaredClassType()
+
+ # LOG print( 'creating new anonymous class\n' )
+ ptr lang_object newClass = createLangObject( nsType,
+ '<anon_class>', lookupNs.top )
+ lookupNs.push( newClass )
+ declNs.push( newClass )
+ }
+
+| [class_key nested_name_specifier_opt class_head_name]
+ {
+ lookup_id Id = lookup_id in r3
+ str name = Id.data
+
+ # Get the ns the class is declared in.
+ ptr lang_object parentObj = declNs.top
+ if Id.qualObj
+ parentObj = Id.qualObj
+
+ # Look for the class in the given scope.
+ ptr lang_object declaredClass = findClass( parentObj, name )
+ if !declaredClass
+ declaredClass = findTemplateClass( parentObj, name )
+
+ if !declaredClass {
+ # LOG print( 'creating new class: ', name, '\n' )
+
+ # Class does not exist in the parent scope, create it.
+ int nsType = declaredClassType()
+
+ declaredClass = createLangObject( nsType, name, lookupNs.top )
+
+ # FIXME: handle friends. Make the class visible only if we are NOT
+ # in a friend declaration. The new class object is necessary to
+ # properly process the body of the class.
+ if declarationData.top.isFriend == 0
+ insertObject( parentObj, name, declaredClass )
+ }
+
+ # Push the found/new class.
+ lookupNs.push( declaredClass )
+ declNs.push( declaredClass )
+ }
+
+| [class_key nested_name_specifier_opt templ_class_id
+ templ_arg_open template_argument_list_opt templ_arg_close]
+ {
+ match r3 [Id: lookup_id]
+ str id = Id.data
+ ptr lang_object classObj = Id.obj
+
+ # TODO: Try to find the specializaition in the template class object.
+ # TypeList typeList;
+ # makeTypeList( typeList, $6->last );
+
+ ptr lang_object declaredClass
+ #declaredClass = classObj->findSpecExact( typeList );
+ if !declaredClass {
+ # LOG print( 'making new template specialization\n' )
+ int nsType = declaredClassType()
+ declaredClass = createLangObject( nsType, id, lookupNs.top )
+ # LOG print( 'declaredClass: ', declaredClass, '\n' )
+ declaredClass->specializationOf = classObj
+ # $$->typeListMapEl = classObj->typeListMap.insert( typeList, declaredClass );
+ }
+
+ # Push the found/new class.
+ lookupNs.push( declaredClass )
+ declNs.push( declaredClass )
+ }
+
+def class_body_end
+ []
+ {
+ # Pop the class ns.
+ lookupNs.pop()
+ declNs.pop()
+
+ # LOG print( 'closing off class\n' )
+ }
+
+def class_head_name
+ [class_id]
+| [templ_class_id]
+| [namespace_id]
+| [typedef_id]
+| [enum_id]
+| [unknown_id]
+| [identifier]
+| [template_id]
+
+def class_key
+ ['class']
+| ['struct']
+| ['union']
+
+def class_member_rep
+ [class_member_rep class_member]
+| []
+
+def class_member
+ [member_declaration]
+| [access_specifier ':']
+
+def member_declaration
+ [declaration_start member_declaration_forms declaration_end ';']
+| [class_function_definition]
+| [using_declaration]
+| [template_declaration]
+
+def class_function_definition
+ [function_def_declaration ctor_initializer_opt class_function_body function_def_end]
+
+lex cfb_conts
+{
+ token cfb_open /'{'/
+ token cfb_close /'}'/
+ token cfb_string /
+ "'" ( [^'\\\n] | '\\' any )* "'" |
+ '"' ( [^"\\\n] | '\\' any )* '"'/
+ token cfb_comment /
+ ( '/*' (any | '\n')* :>> '*/' ) |
+ ( '//' any* :> '\n' )/
+ token cfb_data /[^{}'"/]+ | '/'/
+}
+
+def cfb_item
+ [cfb_data]
+| [cfb_string]
+| [cfb_comment]
+| [cfb_open cfb_item* cfb_close]
+
+def cfb_conts
+ [cfb_item* cfb_close]
+
+
+
+def class_function_body
+# ['{' cfb_conts]
+#| [function_body]
+ [function_body]
+
+# Get better performance if the form with decl_specifier_sing comes first.
+def member_declaration_forms
+ [decl_specifier_mult_seq_opt decl_specifier_sing
+ decl_specifier_mult_seq_opt member_declarator_list_opt]
+
+def member_declaration_forms
+ [decl_specifier_mult_seq_opt member_declarator_list_opt]
+
+def member_declarator_list_opt
+ [member_declarator_list]
+| []
+
+def member_declarator_list
+ [member_declarator_list ',' member_declarator]
+| [member_declarator]
+
+def member_declarator
+ [declarator]
+| [declarator '=' constant_expression]
+| [declarator ':' constant_expression]
+| [':' constant_expression]
+
+def access_specifier
+ ['private']
+| ['protected']
+| ['public']
+
+def access_specifier_opt
+ [access_specifier]
+| []
+
+def using_declaration
+ ['using' id_expression ';']
+ {
+ ptr lang_object obj = r2.lookupId.obj
+ if obj
+ insertObject( declNs.top, obj->name, obj )
+ }
+
+| ['using' type_id ';']
+ {
+ ptr lang_object obj = r2.lookupId.obj
+ if obj
+ insertObject( declNs.top, obj->name, obj )
+ }
+
+def using_directive
+ ['using' 'namespace' root_qual_opt nested_name_specifier_opt
+ namespace_id ';']
+ {
+ # This uses a simple, incomplete guard against cycles in the graph of
+ # using namespaces. A more sophisticated and complete guard would look
+ # for longer cycles as well. Note that even gcc 3.3.5 does not bother.
+ match r5 [Id: lookup_id]
+ ptr lang_object usingObject = Id.obj
+ ptr lang_object inObject = declNs.top
+ if usingObject != inObject
+ inObject->inherited.append( usingObject )
+ }
+
+
+#
+# Derived classes
+#
+
+def base_clause_opt
+ [base_clause]
+| []
+
+def base_clause
+ [':' base_specifier_list]
+
+def base_specifier_list
+ [base_specifier_list ',' base_specifier]
+| [base_specifier]
+
+int addBaseSpecifier( ptr lang_object inObject, ptr lang_object inheritedObject )
+{
+ # Resolve typedefs.
+ if inheritedObject->typeId == TypedefType
+ inheritedObject = inheritedObject->typedefOf
+
+ inObject->inherited.append( inheritedObject )
+}
+
+def base_specifier
+ [root_qual_opt nested_name_specifier_opt type_name]
+ {
+ addBaseSpecifier( declNs.top, r3.lookupId.obj )
+ }
+
+| ['virtual' access_specifier_opt root_qual_opt nested_name_specifier_opt type_name]
+ {
+ addBaseSpecifier( declNs.top, r5.lookupId.obj )
+ }
+
+| [access_specifier virtual_opt root_qual_opt nested_name_specifier_opt type_name]
+ {
+ addBaseSpecifier( declNs.top, r5.lookupId.obj )
+ }
+
+def virtual_opt
+ ['virtual']
+| []
+
+#
+# Special member functions
+#
+
+def conversion_function_id
+ ['operator' conversion_type_id]
+
+def conversion_type_id
+ [necs_type_specifier_seq ptr_operator_seq_opt]
+
+def ctor_initializer_opt
+ [ctor_initializer]
+| []
+
+def ctor_initializer
+ [':' mem_initializer_list]
+
+def mem_initializer_list
+ [mem_initializer_list ',' mem_initializer]
+| [mem_initializer]
+
+def mem_initializer
+ [mem_initializer_id '(' expression_opt ')']
+
+def mem_initializer_id
+ [root_qual_opt nested_name_specifier_opt unknown_id]
+| [root_qual_opt nested_name_specifier_opt identifier]
+| [root_qual_opt nested_name_specifier_opt type_name]
+| [root_qual_opt nested_name_specifier_opt template_name]
+
+
+#
+# Overloading
+#
+def operator_function_id
+ ['operator' operator]
+
+def operator
+ ['+'] | ['-'] | ['*'] | ['/'] | ['='] | ['<'] | ['>'] | ['&'] | ['|'] |
+ ['^'] | ['%'] | ['~'] | ['!'] | ['(' ')'] | ['[' ']'] | ['new'] |
+ ['delete'] | ['->'] | ['++'] | ['--'] | ['*='] | ['/='] | ['%='] |
+ ['+='] | ['-='] | ['>>='] | ['<<='] | ['&='] | ['^='] | ['|='] | ['=='] |
+ ['!='] | ['&&'] | ['||'] | [lt_eq] | [gt_eq] | [shift_left] | [shift_right]
+
+def lt_eq
+ ['<' '=']
+# try {
+# if ( $2->leader != 0 ) {
+# #ifdef LOG_REDUCE
+# cerr << "rejecting less-than equals-to" << endl;
+# #endif
+# reject();
+# }
+# };
+
+def gt_eq
+ ['>' '=']
+# try {
+# if ( $2->leader != 0 ) {
+# #ifdef LOG_REDUCE
+# cerr << "rejecting greater-than equals-to" << endl;
+# #endif
+# reject();
+# }
+# };
+
+def shift_left
+ ['<' '<']
+# try {
+# if ( $2->leader != 0 ) {
+# #ifdef LOG_REDUCE
+# cerr << "rejecting shift left" << endl;
+# #endif
+# reject();
+# }
+# };
+
+def shift_right
+ ['>' '>']
+# try {
+# if ( $2->leader != 0 ) {
+# #ifdef LOG_REDUCE
+# cerr << "rejecting shift right" << endl;
+# #endif
+# reject();
+# }
+# };
+
+#
+# Templates
+#
+
+def template_declaration
+ [template_declaration_params declaration]
+ {
+ templDecl.pop()
+ templateParamNs.pop()
+ }
+
+def template_declaration_params
+ ['template' '<' tpl_start template_parameter_list '>']
+ {
+ templDecl.push( 1 )
+ }
+
+| ['export' 'template' '<' tpl_start template_parameter_list '>']
+ {
+ templDecl.push( 1 )
+ }
+
+def tpl_start
+ []
+ {
+ # Create a new scope for the template parameters.
+ ptr lang_object newTemplateParamScope =
+ createLangObject( 0, '<tpl_start>', lookupNs.top )
+ templateParamNs.push( newTemplateParamScope )
+ }
+
+def template_parameter_list
+ [template_parameter_list ',' template_parameter]
+| [template_parameter]
+
+def template_parameter
+ [type_parameter]
+| [template_parameter_declaration]
+
+def template_parameter_declaration
+ [declaration_start template_parameter_declaration_forms declaration_end]
+
+def template_parameter_declaration_forms
+ [decl_specifier_mult_seq param_maybe_declarator maybe_parameter_init]
+
+| [temp_param_decl_specifier_sing decl_specifier_mult_seq_opt
+ param_maybe_declarator maybe_parameter_init]
+
+| [decl_specifier_mult_seq temp_param_decl_specifier_sing
+ decl_specifier_mult_seq_opt param_maybe_declarator maybe_parameter_init]
+
+def temp_param_decl_specifier_sing
+ [temp_param_type_specifier_sing]
+
+# Template parameters cannot support elaborated type specifer or class specifier.
+def temp_param_type_specifier_sing
+ [templ_simple_type_specifier]
+| [enum_specifier]
+
+def templ_simple_type_specifier
+ [simple_type_specifier_name]
+| [simple_type_specifier_kw_seq]
+
+def type_parameter
+ ['class' type_param_id type_param_init_opt]
+ {
+ lookup_id Id = lookup_id in r2
+ if Id {
+ # The lookup ns should be a template param scope.
+ ptr lang_object newClass =
+ createLangObject( ClassType, Id.data, lookupNs.top )
+ insertObject( templateParamNs.top, Id.data, newClass )
+ }
+ }
+
+| ['typename' type_param_id type_param_init_opt]
+ {
+ lookup_id Id = lookup_id in r2
+ if Id {
+ # The lookup ns should be a template param scope.
+ ptr lang_object newClass =
+ createLangObject( ClassType, Id.data, lookupNs.top )
+ insertObject( templateParamNs.top, Id.data, newClass )
+ }
+ }
+
+| ['template' '<' tpl_start template_parameter_list '>'
+ 'class' type_param_id templ_type_param_init_opt]
+ {
+ lookup_id Id = lookup_id in r7
+ if Id {
+ ptr lang_object newClass =
+ createLangObject( TemplateClassType, Id.data, lookupNs.top )
+ insertObject( templateParamNs.top, Id.data, newClass )
+ }
+ }
+
+def templ_type_param_init_opt
+ ['=' id_expression]
+| []
+
+def type_param_init_opt
+ ['=' type_id]
+| []
+
+def type_param_id
+ [namespace_id]
+| [typedef_id]
+| [enum_id]
+| [class_id]
+| [templ_class_id]
+| [identifier]
+| [template_id]
+| [unknown_id]
+| []
+
+def template_argument_list_opt
+ [template_argument_list]
+| []
+
+def template_argument_list
+ [template_argument_list ',' template_argument]
+| [template_argument]
+
+def template_argument
+ [type_id]
+| [assignment_expression]
+
+def explicit_instantiation
+ ['template' declaration]
+| [declaration_start decl_specifier_mult_seq 'template' declaration declaration_end]
+
+def explicit_specialization
+ ['template' '<' '>' declaration]
+
+## Not sure what this one is about?
+#explicit_specialization:
+# declaration_start decl_specifier_mult_seq KW_Template '<' '>'
+# declaration declaration_end;
+
+
+#
+# Original namespace definition
+#
+
+def original_namespace_definition
+ [orig_namespace_def_name '{' declaration* namespace_end '}']
+
+def orig_namespace_def_name ['namespace' unknown_id]
+ {
+ match r2 [Id: lookup_id]
+ ptr lang_object nspace = createLangObject(
+ NamespaceType, Id.data, lookupNs.top )
+
+ # Insert the new object into the dictionary of the parent.
+ insertObject( curNamespace.top, Id.data, nspace )
+
+ # Push the namespace
+ curNamespace.push( nspace )
+ declNs.push( nspace )
+ lookupNs.push( nspace )
+
+ # LOG print( 'created original namespace: ', Id.data, '\n' )
+ }
+
+def namespace_end []
+ {
+ # Pop the namespace.
+ curNamespace.pop()
+ declNs.pop()
+ lookupNs.pop()
+
+ # LOG print( 'closed namespace\n' )
+ }
+
+#
+# Extension namespace definition
+#
+
+def extension_namespace_definition
+ [ext_namespace_def_name '{' declaration* namespace_end '}']
+
+def ext_namespace_def_name ['namespace' namespace_id]
+ {
+ match r2 [Id: lookup_id]
+ ptr lang_object nspace = Id.obj
+
+ # Push the namespace
+ curNamespace.push( nspace )
+ declNs.push( nspace )
+ lookupNs.push( nspace )
+
+ # LOG print( 'found extended namespace: ', Id.data, '\n' )
+ }
+
+#
+# Unnamed namespace definition
+#
+def unnamed_namespace_definition
+ [unnamed_namespace_def_name '{' declaration* namespace_end '}']
+
+def unnamed_namespace_def_name ['namespace']
+ {
+ ptr lang_object nspace = createLangObject(
+ NamespaceType, '<unnamed_namespace>',
+ lookupNs.top )
+
+ # Push the namespace
+ curNamespace.push( nspace )
+ declNs.push( nspace )
+ lookupNs.push( nspace )
+
+ # LOG print( 'parsed unnamed namespace\n' )
+ }
+
+#
+# linkage_specification
+#
+def linkage_specification
+ ['extern' TK_DoubleLit '{' declaration* '}']
+| ['extern' TK_DoubleLit declaration]
+
+#
+# Exception Handling.
+#
+
+def try_block
+ ['try' compound_statement handler_seq]
+
+def handler_seq
+ [handler_seq handler]
+| [handler]
+
+def handler
+ ['catch' '(' exception_declaration ')' compound_statement]
+
+def exception_declaration
+ [type_specifier_seq declarator]
+| [type_specifier_seq abstract_declarator]
+| [type_specifier_seq]
+
+def exception_declaration
+ ['...']
+
+def throw_expression
+ ['throw' assignment_expression]
+| ['throw']
+
+def exception_specification_opt
+ [exception_specification]
+| []
+
+def exception_specification
+ ['throw' '(' type_id_list_opt ')']
+
+def type_id_list_opt
+ [type_id_list]
+| []
+
+def type_id_list
+ [type_id_list ',' type_id]
+| [type_id]
+
+def start
+ [declaration*]
+
+#
+# Grammar done.
+#
+
+int printObject( str indent, ptr lang_object obj )
+{
+ print( indent, obj->name )
+
+ if obj->objectMap.length > 0
+ print( ' {\n' )
+
+ object_map ChildNames = obj->objectMap
+ for MapEl: object_list in child( ChildNames ) {
+ for Obj: ptr lang_object in MapEl
+ printObject( indent + ' ', Obj )
+ }
+
+ if obj->objectMap.length > 0
+ print( indent, '}' )
+
+ print( '\n' )
+}
+
+int printNamespace()
+{
+ printObject( '', rootNamespace )
+}
+
+start S = parse start( stdin )
+#printNamespace()
+#print( '***** SUCCESS *****\n' )
+#print_xml( S )
diff --git a/test/cxx/input01.cpp b/test/cxx/input01.cpp
new file mode 100644
index 00000000..e2b125f4
--- /dev/null
+++ b/test/cxx/input01.cpp
@@ -0,0 +1,17 @@
+namespace ns1
+{
+ namespace sub1 { class A {}; }
+ namespace sub2 { class B {}; }
+}
+
+namespace ns2
+{
+// int i = b;
+ class C
+ {
+ };
+
+ using namespace ns1;
+}
+
+ns2::sub1::A a;
diff --git a/test/cxx/input02.cpp b/test/cxx/input02.cpp
new file mode 100644
index 00000000..65ebe0db
--- /dev/null
+++ b/test/cxx/input02.cpp
@@ -0,0 +1,16 @@
+
+struct A
+{
+ struct B {};
+};
+
+struct C
+{
+ struct D : virtual public A {};
+};
+
+C::D::A d;
+
+C c;
+
+
diff --git a/test/cxx/input03.cpp b/test/cxx/input03.cpp
new file mode 100644
index 00000000..bccfbf0e
--- /dev/null
+++ b/test/cxx/input03.cpp
@@ -0,0 +1,19 @@
+
+struct C
+{
+
+};
+
+enum E
+{
+ C,
+ b
+};
+
+E e;
+
+enum E
+{
+ C,
+ b
+};
diff --git a/test/cxx/input04.cpp b/test/cxx/input04.cpp
new file mode 100644
index 00000000..9741a82b
--- /dev/null
+++ b/test/cxx/input04.cpp
@@ -0,0 +1,17 @@
+
+
+int i;
+class C
+{
+ int j;
+};
+
+class D
+{
+ int ~D();
+};
+
+int C::k;
+int C::~C;
+
+typedef int Int;
diff --git a/test/cxx/input05.cpp b/test/cxx/input05.cpp
new file mode 100644
index 00000000..7936ce09
--- /dev/null
+++ b/test/cxx/input05.cpp
@@ -0,0 +1,8 @@
+
+class C {};
+void ~C( );
+void C::operator +( int i );
+
+int i;
+
+//void operator C( void k );
diff --git a/test/cxx/input06.cpp b/test/cxx/input06.cpp
new file mode 100644
index 00000000..008edd4b
--- /dev/null
+++ b/test/cxx/input06.cpp
@@ -0,0 +1,7 @@
+
+class C
+{
+
+};
+
+int C::f( int i, int j( void v ) );
diff --git a/test/cxx/input07.cpp b/test/cxx/input07.cpp
new file mode 100644
index 00000000..474ba9a1
--- /dev/null
+++ b/test/cxx/input07.cpp
@@ -0,0 +1,18 @@
+class C
+{
+ class D {};
+
+ typedef C I;
+
+ I::D i;
+};
+
+C c;
+
+void function( int i, int j );
+{
+ asdlkf
+}
+
+
+
diff --git a/test/cxx/input08.cpp b/test/cxx/input08.cpp
new file mode 100644
index 00000000..2e9ec7a0
--- /dev/null
+++ b/test/cxx/input08.cpp
@@ -0,0 +1,13 @@
+class B { class Find {}; };
+
+typedef B T;
+
+class C : public T
+{
+ Find find;
+};
+
+
+
+
+
diff --git a/test/cxx/input09.cpp b/test/cxx/input09.cpp
new file mode 100644
index 00000000..373f4373
--- /dev/null
+++ b/test/cxx/input09.cpp
@@ -0,0 +1,7 @@
+template <class X> struct C
+{
+ X t;
+ void f();
+};
+
+template <class X> void C<X>::f();
diff --git a/test/cxx/input10.cpp b/test/cxx/input10.cpp
new file mode 100644
index 00000000..98b6a96a
--- /dev/null
+++ b/test/cxx/input10.cpp
@@ -0,0 +1,11 @@
+template <class X> struct C
+{
+ class Y {};
+};
+
+class C<int>
+{
+ int i;
+};
+
+//void f( class C<int> i, int j );
diff --git a/test/cxx/input11.cpp b/test/cxx/input11.cpp
new file mode 100644
index 00000000..d9714571
--- /dev/null
+++ b/test/cxx/input11.cpp
@@ -0,0 +1,2 @@
+
+int f( int (*) [](), void );
diff --git a/test/cxx/input12.cpp b/test/cxx/input12.cpp
new file mode 100644
index 00000000..c135c8da
--- /dev/null
+++ b/test/cxx/input12.cpp
@@ -0,0 +1,8 @@
+void f();
+class C
+{
+ class D {};
+ void g();
+};
+
+typename C c;
diff --git a/test/cxx/input13.cpp b/test/cxx/input13.cpp
new file mode 100644
index 00000000..67fd42fd
--- /dev/null
+++ b/test/cxx/input13.cpp
@@ -0,0 +1,14 @@
+
+class C
+{
+ class D {};
+ int f();
+};
+
+int f()
+{
+}
+int C::f()
+{
+ D d;
+}
diff --git a/test/cxx/preproc b/test/cxx/preproc
new file mode 100755
index 00000000..14e8fe6b
--- /dev/null
+++ b/test/cxx/preproc
@@ -0,0 +1,4 @@
+#!/bin/bash
+#
+
+g++ -E -undef -D'__attribute__(xyz)=' "$@" | sed 's/#.*$//'
diff --git a/test/diff/Makefile b/test/diff/Makefile
new file mode 100644
index 00000000..25a0b6d3
--- /dev/null
+++ b/test/diff/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/diff/diff.lm b/test/diff/diff.lm
new file mode 100644
index 00000000..37232025
--- /dev/null
+++ b/test/diff/diff.lm
@@ -0,0 +1,84 @@
+
+
+token newline / '\n' /
+token index / 'Index:' [ \t]* /
+token consume_line / [^\n]* /
+
+
+def index_stmt [index consume_line newline]
+
+token separator_line / '='+ '\n' /
+
+# Whitespace separated word list
+lex word_list
+{
+ token word /[^\t \n]+/
+ ignore /[\t ]+/
+
+ def word_list
+ [word word_list]
+ | []
+}
+
+token old_file_start / '---' [\t ]+ /
+token new_file_start / '+++' [\t ]+ /
+
+def old_file
+ [old_file_start word_list newline]
+
+def new_file
+ [new_file_start word_list newline]
+
+def file_header
+ [index_stmt separator_line old_file new_file]
+
+token hunk_header / '@@' any* :>> '@@' '\n' /
+token hunk_line / ( ' ' | '-' | '+' ) [^\n]* '\n' /
+
+def hunk_body
+ [hunk_line*]
+
+def hunk
+ [hunk_header hunk_body]
+
+# diff of a single file: header followed by a hunk list.
+def file_diff
+ [file_header hunk*]
+
+def start
+ [file_diff*]
+
+
+start S = parse start( stdin )
+
+for OF:old_file in S {
+ print( 'old file: ', OF )
+ # Get the first word and check if it is
+ # the file we are interested in.
+ if match OF [
+ "--- fsmrun.cpp"
+ Rest: word_list
+ "\n"
+ ]
+ {
+ OF = construct old_file
+ ["--- newfilename.cpp" Rest "\n"]
+ print_xml( OF )
+ }
+}
+
+print( S )
+
+# for Header: file_header in lhs {
+# old_file OF = old_file in Header
+# if match OF
+# [old_file_start "lmparse.kl" word_list newline]
+# {
+# Header = construct file_header
+# ~Index: rewritten
+# ~===================================================================
+# ~--- this is the file (asldkfj)
+# ~+++ this is the file (ewir)
+# }
+# }
+
diff --git a/test/diff/input1.diff b/test/diff/input1.diff
new file mode 100644
index 00000000..b0021f67
--- /dev/null
+++ b/test/diff/input1.diff
@@ -0,0 +1,86 @@
+Index: fsmrun.cpp
+===================================================================
+--- fsmrun.cpp (revision 4555)
++++ fsmrun.cpp (working copy)
+@@ -150,7 +150,7 @@
+ peof = 0;
+ if ( parser != 0 ) {
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ }
+ else {
+ region = 0;
+@@ -189,7 +189,7 @@
+
+ tokstart = 0;
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ }
+
+ void FsmRun::sendToken( int id )
+@@ -222,7 +222,7 @@
+ parser = newParser;
+
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ }
+ else {
+ #ifdef LOG_ACTIONS
+@@ -355,7 +355,7 @@
+
+ /* Set the current state from the next region. */
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ }
+ }
+
+@@ -452,7 +452,7 @@
+ /* First thing check for error. */
+ if ( cs == tables->errorState ) {
+ if ( parser != 0 ) {
+- if ( getStateFromNextRegion( 1 ) != 0 ) {
++ if ( parser->getNextRegion( 1 ) != 0 ) {
+ #ifdef LOG_BACKTRACK
+ cerr << "scanner failed, trying next region" << endl;
+ #endif
+@@ -462,7 +462,7 @@
+
+ parser->nextRegionInd += 1;
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ cerr << "new token region: " <<
+ parser->tables->gbl->regionInfo[region].name << endl;
+ continue;
+@@ -495,7 +495,7 @@
+ }
+ else {
+ region = parser->getNextRegion();
+- cs = getStateFromNextRegion();
++ cs = tables->entryByRegion[region];
+ cerr << "new token region: " <<
+ parser->tables->gbl->regionInfo[region].name << endl;
+ continue;
+Index: junk.cpp
+===================================================================
+---
++++ junk.cpp (working copy)
+Index: fsmrun.h
+===================================================================
+--- fsmrun.h (revision 4557)
++++ fsmrun.h (working copy)
+@@ -197,10 +197,6 @@
+ void runOnInputStream( PdaRun *parser, InputStream &in );
+ void execute();
+
+- /* Offset can be used to look at the next nextRegionInd. */
+- int getStateFromNextRegion( int offset = 0 )
+- { return tables->entryByRegion[parser->getNextRegion(offset)]; }
+-
+ FsmTables *tables;
+ PdaRun *parser;
+ InputStream *inputStream;
diff --git a/test/dns/Makefile b/test/dns/Makefile
new file mode 100644
index 00000000..27b04782
--- /dev/null
+++ b/test/dns/Makefile
@@ -0,0 +1,20 @@
+#
+# Copyright 2007 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+extract: extract.c
+ gcc -Wall -o extract extract.c -lpcap
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f dns.cpp dns.bin extract
diff --git a/test/dns/dns.lm b/test/dns/dns.lm
new file mode 100644
index 00000000..29f0cdbe
--- /dev/null
+++ b/test/dns/dns.lm
@@ -0,0 +1,488 @@
+
+# Used for most of the grammar.
+token octet /any/
+
+# Filled in during the parsing of resource records. Determine what RR_UNKNOWN
+# translates to.
+global int rr_type_value
+global int rr_class_value
+
+# Tokens generated from RR_UNKNOWN. Used to pick the kind
+# of resource record to attempt to parse.
+token RR_A // # 1 a host address
+token RR_NS // # 2 an authoritative name server
+token RR_MD // # 3 a mail destination (Obsolete - use MX)
+token RR_MF // # 4 a mail forwarder (Obsolete - use MX)
+token RR_CNAME // # 5 the canonical name for an alias
+token RR_SOA // # 6 marks the start of a zone of authority
+token RR_MB // # 7 a mailbox domain name (EXPERIMENTAL)
+token RR_MG // # 8 a mail group member (EXPERIMENTAL)
+token RR_MR // # 9 a mail rename domain name (EXPERIMENTAL)
+token RR_NULL // # 10 a null RR (EXPERIMENTAL)
+token RR_WKS // # 11 a well known service description
+token RR_PTR // # 12 a domain name pointer
+token RR_HINFO // # 13 host information
+token RR_MINFO // # 14 mailbox or mail list information
+token RR_MX // # 15 mail exchange
+token RR_TXT // # 16 text strings
+
+token RR_UNKNOWN
+ /''/
+ {
+ int id = typeid RR_UNKNOWN
+ if rr_type_value == 1
+ id = typeid RR_A
+ elsif rr_type_value == 2
+ id = typeid RR_NS
+ elsif rr_type_value == 5
+ id = typeid RR_CNAME
+ elsif rr_type_value == 12
+ id = typeid RR_PTR
+ elsif rr_type_value == 15
+ id = typeid RR_MX
+ elsif rr_type_value == 16
+ id = typeid RR_TXT
+
+ send( make_token( id, '' ) )
+ }
+
+# Convert two octets in network order into an unsigned 16 bit value.
+int network_uord16( octet o1, octet o2 )
+{
+ return o1.data.uord8() * 256 + o2.data.uord8()
+}
+
+
+def message
+ [header questions answers authorities additionals]
+
+global int question_count
+global int answer_count
+global int authority_count
+global int additional_count
+
+# Message Header
+#
+# 1 1 1 1 1 1
+# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | ID |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# |QR| Opcode |AA|TC|RD|RA| Z | RCODE |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | QDCOUNT |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | ANCOUNT |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | NSCOUNT |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | ARCOUNT |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+def header
+ [header_id header_fields count count count count]
+ {
+ question_count = r3.count
+ answer_count = r4.count
+ authority_count = r5.count
+ additional_count = r6.count
+ }
+
+def header_id
+ [octet octet]
+
+def header_fields
+ [octet octet]
+
+def count
+ int count
+
+ [octet octet]
+ {
+ lhs.count = network_uord16( r1, r2 )
+ }
+
+#
+# Counting Primitives
+#
+# Uses a global stack of lengths. Using a stack allows for counted lists to be
+# nested. As the list is consumed it brings the count down to zero. To use it,
+# push a new count value to the list and include it in a right-recursive list
+# like so:
+#
+# def LIST
+# [count_inc ITEM LIST]
+# [count_end]
+# end
+#
+
+list count_stack [int]
+global count_stack CL = construct count_stack []
+
+int start_list( int count )
+{
+ CL.push( count )
+}
+
+def count_inc
+ []
+ {
+ if CL.top == 0 {
+ reject
+ } else {
+ CL.top = CL.top - 1
+ }
+ }
+
+def count_end
+ []
+ {
+ if CL.top != 0 {
+ reject
+ } else {
+ CL.pop()
+ }
+ }
+
+#
+# Octet List
+#
+
+# General octet list. Length must be set to use this.
+def octet_list
+ [count_inc octet octet_list]
+| [count_end]
+
+
+#
+# Names
+#
+
+def name
+ [name_part* name_end]
+
+# Name part lists are terminated by a zero length or a pointer.
+def name_end
+ # Zero length ending
+ [octet]
+ {
+ int val = r1.data.uord8()
+ if val != 0 {
+ reject
+ }
+ }
+
+ # Pointer ending
+ # +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+ # | 1 1| OFFSET |
+ # +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+| [octet octet]
+ {
+ int val = r1.data.uord8()
+ if val < 64 {
+ reject
+ }
+ }
+
+#
+# Get some number of bytes.
+#
+
+# How many to get
+global int nbytes
+
+# We use this token to eliminate the lookahead that would be needed to cause a
+# reduce of part_len. This forces whatever comes before nbytes to be reduced before
+# nbytes_data token is fetched from the scanner. We need this because nbytes_data
+# depends on the nbytes global and we need to ensure that it is set.
+token nb_empty /''/
+
+# Fetch nbytes bytes.
+token nbytes_data
+ /''/
+ {
+ send( make_token( typeid nbytes_data, pull(stdin, nbytes) ) )
+ }
+
+def nbytes
+ [nb_empty nbytes_data]
+
+def name_part
+ [part_len nbytes]
+
+
+def part_len
+ [octet]
+ {
+ # A name part list is terminated either by a zero length or a pointer,
+ # which must have the two high bits set.
+ int count = r1.data.uord8()
+ if count == 0 || count >= 64 {
+ reject
+ } else {
+ # Set the number of bytes to get for the name part.
+ nbytes = count
+ }
+ }
+
+#
+# Resource Records
+#
+
+# 1 1 1 1 1 1
+# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | |
+# / /
+# / NAME /
+# | |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | TYPE |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | CLASS |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | TTL |
+# | |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | RDLENGTH |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--|
+# / RDATA /
+# / /
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+
+def resource_record
+ [name rr_type rr_class ttl rdlength rdata]
+
+def rr_type
+ [octet octet]
+ {
+ rr_type_value = network_uord16( r1, r2 )
+ }
+
+def rr_class
+ int value
+ [octet octet]
+ {
+ rr_class_value = network_uord16( r1, r2 )
+ }
+
+def ttl
+ [octet octet octet octet]
+
+token rdata_bytes
+ /''/
+ {
+ send( make_token( typeid rdata_bytes, pull(stdin, rdata_length) ) )
+ }
+
+def rdlength
+ [octet octet]
+ {
+ rdata_length = network_uord16( r1, r2 )
+ }
+
+global int rdata_length
+
+def rdata
+ [RR_UNKNOWN rdata_bytes]
+| [RR_A address]
+| [RR_NS name]
+| [RR_CNAME name]
+| [RR_PTR name]
+| [RR_MX octet octet name]
+| [RR_TXT rdata_bytes]
+
+
+#
+# Address
+#
+def address [octet octet octet octet]
+
+#
+# List of Questions
+#
+
+def questions
+ [load_question_count question_list]
+
+def load_question_count
+ []
+ {
+ start_list( question_count )
+ }
+
+def question_list
+ [count_inc question question_list]
+| [count_end]
+
+#
+# Question
+#
+
+# 1 1 1 1 1 1
+# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | |
+# / QNAME /
+# / /
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | QTYPE |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+# | QCLASS |
+# +--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+--+
+
+def question
+ [name qtype qclass]
+
+def qtype
+ [octet octet]
+
+def qclass
+ [octet octet]
+
+#
+# List of Answers
+#
+
+def answers
+ [load_answer_count answer_list]
+
+def load_answer_count
+ []
+ {
+ start_list( answer_count )
+ }
+
+def answer_list
+ [count_inc answer answer_list]
+| [count_end]
+
+#
+# Answer
+#
+
+def answer
+ [resource_record]
+
+#
+# List of Authorities
+#
+
+def authorities
+ [load_authority_count authority_list]
+
+def load_authority_count
+ []
+ {
+ start_list( authority_count )
+ }
+
+def authority_list
+ [count_inc authority authority_list]
+| [count_end]
+
+#
+# Authority
+#
+
+def authority
+ [resource_record]
+
+#
+# List of Additionals
+#
+
+def additionals
+ [load_additional_count additional_list]
+
+def load_additional_count
+ []
+ {
+ start_list( additional_count )
+ }
+
+def additional_list
+ [count_inc additional additional_list]
+| [count_end]
+
+#
+# Additional
+#
+
+def additional
+ [resource_record]
+
+
+def start
+ [message*]
+
+#
+# Grammar End.
+#
+
+int print_RR_UNKNOWN( start s )
+{
+ for I:rdata in s {
+ if match I [u:RR_UNKNOWN rdata_bytes] {
+ print( 'UNKNOWN TYPE\n' )
+ }
+ }
+}
+
+int print_RR_A( start s )
+{
+ for I:rdata in s {
+ if match I [RR_A o1:octet o2:octet o3:octet o4:octet] {
+ print( 'RR_A: ', o1.data.uord8(), '.', o2.data.uord8(), '.',
+ o3.data.uord8(), '.', o4.data.uord8(), '\n' )
+ }
+ }
+}
+
+map name_map [int name]
+
+int print_name( name n, name_map m )
+{
+ for P: name_part in n {
+ match P [part_len D:nbytes]
+ print( D, '.' )
+ }
+
+ for E:name_end in n {
+ if match E [o1:octet o2:octet] {
+ int val = (o1.data.uord8() - 192) * 256 + o2.data.uord8()
+ print( '[', val, ']' )
+ name nameInMap = m.find( val )
+ print_name( nameInMap, m )
+ }
+ }
+}
+
+int print_all_names( start s )
+{
+ for M:message in s {
+ name_map m = construct name_map []
+
+ octet O = octet in M
+
+ for N:name in M {
+ match N [name_part* E:name_end]
+
+ for NP: name_part* in N {
+ if match NP [L:octet nbytes name_part*] {
+ int messageOffset = L.pos - O.pos
+ name n = construct name [NP E]
+ m.insert( messageOffset, n )
+ }
+ }
+ }
+
+ for I:name in M {
+ print_name( I, m )
+ print( '\n' )
+ }
+ }
+}
+
+start S = parse start( stdin )
+print_all_names( S )
+print( '*** SUCCESS ***\n' )
diff --git a/test/dns/dumpdns b/test/dns/dumpdns
new file mode 100644
index 00000000..3409afee
--- /dev/null
+++ b/test/dns/dumpdns
@@ -0,0 +1,11 @@
+#!/bin/bash
+#
+
+# Use this script to capture dns packets to a dump file. Then use extract to
+# break up the packets and strip the headers, leaving just dns packets.
+#
+# usage: bash dumpdns <dumpfile>
+
+[ -z "$1" ] && exit
+
+tcpdump -s 0 -w $1 udp port 53
diff --git a/test/dns/extract.c b/test/dns/extract.c
new file mode 100644
index 00000000..8af026a3
--- /dev/null
+++ b/test/dns/extract.c
@@ -0,0 +1,48 @@
+#include <pcap.h>
+
+/*
+ * Break up a dump file and strip headers, leaving just
+ * the dns portion of packets.
+ */
+
+char outname[1024];
+char errbuf[PCAP_ERRBUF_SIZE];
+
+int main( int argc, char **argv )
+{
+ int packet;
+ pcap_t *p;
+ if ( argc != 3 ) {
+ fprintf( stderr, "usage: get <dumpfile> <rootname>\n" );
+ return 1;
+ }
+
+ p = pcap_open_offline( argv[1], errbuf );
+
+ for ( packet = 0; ; packet++ ) {
+ FILE *file;
+ unsigned long len;
+ struct pcap_pkthdr h;
+ const u_char *data;
+
+ data = pcap_next( p, &h );
+ if ( data == 0 )
+ break;
+
+ if ( h.caplen < h.len )
+ fprintf( stderr, "warning: packet number %02d is short\n", packet );
+
+ /* The magic number is the size of the headers we want to strip. */
+ data += 42;
+ len = h.caplen - 42;
+
+ sprintf( outname, "%s-%04d", argv[2], packet );
+ file = fopen( outname, "wb" );
+ fwrite( data, 1, len, file );
+ fclose( file );
+ }
+
+ pcap_close( p );
+
+ return 0;
+}
diff --git a/test/heredoc.in b/test/heredoc.in
new file mode 100644
index 00000000..c9638ca9
--- /dev/null
+++ b/test/heredoc.in
@@ -0,0 +1,3 @@
+hello
+random 9392af j9 stuff
+hello
diff --git a/test/heredoc.lm b/test/heredoc.lm
new file mode 100644
index 00000000..54dcc33d
--- /dev/null
+++ b/test/heredoc.lm
@@ -0,0 +1,45 @@
+rl ident_char /[a-zA-Z_]/
+
+lex start
+{
+ # Tokens
+ token other /(^(ident_char|0|'\n'))+/
+
+ token here_close //
+ token id
+ /ident_char+/
+ {
+ if HereId && HereId == match_text {
+ send( make_token(
+ typeid here_close,
+ pull(stdin, match_length - 1) ) )
+ } else {
+ send( make_token( typeid id, pull(stdin, match_length) ) )
+ }
+ }
+
+ token nl /'\n'/
+}
+
+def here_name
+ [id]
+ {
+ HereId = r1.data
+ }
+
+global str HereId
+
+def here_data
+ [here_data_item*]
+
+def here_data_item
+ [id]
+| [other]
+| [nl]
+
+def heredoc
+ [here_name here_data here_close id nl]
+
+
+heredoc S = parse heredoc( stdin )
+print_xml(S)
diff --git a/test/html/Makefile b/test/html/Makefile
new file mode 100644
index 00000000..25a0b6d3
--- /dev/null
+++ b/test/html/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/html/html-lextag.lm b/test/html/html-lextag.lm
new file mode 100644
index 00000000..0869538c
--- /dev/null
+++ b/test/html/html-lextag.lm
@@ -0,0 +1,324 @@
+#
+# Regular Definitions
+#
+rl def_name_char /[\-A-Za-z0-9._:?]/
+rl def_name /[A-Za-z_:] def_name_char*/
+rl def_system_literal /'"' [^"]* '"' | "'" [^']* "'"/
+
+#
+# Scanner for tag names.
+#
+lex TAG_NAME
+{
+ ignore /space+/
+ token tag_id /def_name/
+}
+
+#
+# Scanner for attributes names
+#
+lex ATTR_NAME
+{
+ ignore /space+/
+ token attr_name /def_name_char+/
+ literal '='
+}
+
+# Scanner for attribute values.
+lex ATTR_VAL
+{
+ ignore /space+/
+ literal '>', '/>'
+ token dquote_val /'"' ([^"] | '\\' any)* '"'/
+ token squote_val /"'" ([^'] | '\\' any)* "'"/
+ token unq_val /[^ \t\r\n<>"'] [^ \t\r\n<>]*/
+}
+
+#
+# Tokens
+#
+
+lex START
+{
+ ignore /space+/
+
+ literal '<', '</', '<!DOCTYPE'
+ token close_tag
+ /'</' [\t ]* id: [a-zA-Z]+ '>'/
+
+ token doc_data /[^<]+/
+ token comment /'<!--' any* :>> '-->'/
+}
+
+#
+# Tags
+#
+
+bool inTagStack( str id )
+{
+ tag_stack LocalTagStack = TagStack
+ for Tag:tag_id in LocalTagStack {
+ if id == Tag.data
+ return true
+ }
+ return false
+}
+
+# This scanner is just for the id in close tags. The id needs to be looked up
+# in the tag stack so we can determine if it is a stray.
+lex close_id
+{
+ # Ignore whitespace.
+ ignore /space+/
+
+ token stray_close_id //
+ token missing_close_id //
+
+ token close_id /def_name/
+ {
+ # If it is in the tag stack then it is a close_id. If not then it's a
+ # stray_close_id.
+ int send_id = typeid stray_close_id
+
+ if ( inTagStack( match_text ) ) {
+ print( 'CLOSE \'', match_text, '\' IN TAG STACK\n' )
+
+ # The tag is in the stack, send missing close tags until we get to it.
+ match TagStack [Top:tag_id Rest:tag_stack]
+ TagStack = Rest
+ while ( Top.data != match_text ) {
+ print( 'SENDING missing close\n' )
+ send( make_token( typeid missing_close_id, '' ) )
+ match TagStack [Top2:tag_id Rest2:tag_stack]
+ Top = Top2
+ TagStack = Rest2
+ }
+
+ print( 'SENDING close\n' )
+ send( make_token( typeid close_id, pull( stdin, match_length ) ) )
+ }
+ else {
+ print( 'CLOSE \'', match_text, '\' NOT IN TAG STACK\n' )
+ # The tag is not in the tag stack so send the id as a stray close.
+ send( make_token( typeid stray_close, pull( stdin, match_length ) ) )
+ }
+ }
+}
+
+#
+# Tag Stack
+#
+
+def tag_stack
+ [tag_id tag_stack]
+| []
+
+global tag_stack TagStack = construct tag_stack []
+
+#
+# Document Type
+#
+# This scanner handles inside DOCTYPE tags (except keywords).
+lex DOCTYPE
+{
+ ignore /space+/
+ token dt_name /def_name/
+ token dt_literal /def_system_literal/
+ token dt_bl /"[" [^\]]* "]"/
+ token dt_close /'>'/
+}
+
+# Using a separate scanner for the keywords in DOCTYPE prevents them from
+# covering dt_name
+lex DOCTYPE_KW
+{
+ ignore /space+/
+ literal 'SYSTEM', 'PUBLIC'
+}
+
+def DOCTYPE ['<!DOCTYPE' dt_name external_id dt_bl? dt_close]
+
+def external_id
+ ['SYSTEM' dt_literal?]
+| ['PUBLIC' dt_literal dt_literal?]
+
+#
+# Tags, with optionanal close.
+#
+
+def tag
+ [open_tag item* close_tag]
+
+def unclosed_tag
+ [open_tag item* missing_close_id]
+
+def open_tag
+ ['<' tag_id attr* '>']
+ {
+ TagStack = construct tag_stack
+ [r2 TagStack]
+ }
+
+#
+# Empty tags
+#
+def empty_tag
+ ['<' tag_id attr* '/>']
+
+#
+# Stray close tags
+#
+def stray_close
+ [close_tag]
+
+
+#
+# Attributes
+#
+
+def attr
+ [attr_name eql_attr_val?]
+
+def eql_attr_val ['=' attr_val]
+
+def attr_val
+ [squote_val]
+| [dquote_val]
+| [unq_val]
+| []
+
+#
+# Items
+#
+
+def item
+ [DOCTYPE]
+| [tag]
+| [unclosed_tag]
+| [empty_tag]
+| [stray_close]
+| [doc_data]
+| [comment]
+
+
+token trailing /any*/
+
+def start
+ [item* trailing]
+
+#
+# END GRAMMAR
+#
+
+int addDefaultAltTags( ref start Start )
+{
+ for T: open_tag in Start {
+ require T
+ ["<img" AttrList: attr* '>']
+
+ bool haveAlt = false
+ for A: attr in T {
+ if match A ["alt=" attr_val]
+ haveAlt = true
+ }
+
+ if !haveAlt {
+ for AL: attr* in T {
+ if match AL [] {
+ AL = construct attr*
+ [" alt=\"default alt\""]
+ break
+ }
+ }
+ }
+ }
+}
+
+int printLinks( start Start )
+{
+ for A:tag in Start {
+ require A
+ ["<a" AttrList: attr* ">" I: item* "</a>"]
+
+ for Attr: attr in AttrList {
+ if match Attr ["href = " AttrVal: attr_val]
+ print( 'link: ', I, '\ntarget: ', AttrVal, '\n\n' )
+ }
+ }
+}
+
+
+bool should_close( tag_id TI )
+{
+ return true
+}
+
+bool should_flatten( tag_id TI )
+{
+ return true
+}
+
+# Finds unclosed tags and puts the content after the tag. Afterwards
+# all unclosed tags will be empty 'inside'.
+#int flatten( ref start Start )
+#{
+# for TL: item* in Start {
+# require TL
+# [OT: open_tag Inside: item* Trailing: item*]
+#
+# match OT
+# ['<' TagId: tag_id attr* '>']
+#
+# if should_flatten( TagId )
+# {
+# require Inside
+# [item item*]
+#
+# # Put Trailing at the end of inside.
+# for END: item* in Inside {
+# if match END [] {
+# END = Trailing
+# break
+# }
+# }
+#
+# str empty = ''
+# missing_close_id Missing = construct missing_close_id [empty]
+# opt_close_tag EmptyCloseTag =
+# construct opt_close_tag [Missing]
+#
+# # Close the tag and put inside after it.
+# TL = construct item*
+# [OT EmptyCloseTag Inside]
+# }
+# }
+#}
+#
+#int close( ref start Start )
+#{
+# for TL: item in Start {
+# require TL
+# [OpenTag: open_tag Inside: item*]
+#
+# match OpenTag
+# ['<' TagId: tag_id attr* '>']
+#
+# if should_close( TagId )
+# {
+# close_id CloseId = construct close_id
+# [TagId.data]
+#
+# opt_close_tag CloseTag =
+# construct opt_close_tag ['</' CloseId '>']
+#
+# # Close the tag and put inside after it.
+# TL = construct item
+# [OpenTag Inside CloseTag]
+# }
+# }
+#}
+
+start HTML = parse start( stdin )
+print_xml( HTML )
+for C: close_tag in HTML
+ print( C.id, '\n' )
diff --git a/test/html/html.lm b/test/html/html.lm
new file mode 100644
index 00000000..98573f2e
--- /dev/null
+++ b/test/html/html.lm
@@ -0,0 +1,307 @@
+#
+# Regular Definitions
+#
+rl def_name_char /[\-A-Za-z0-9._:?]/
+rl def_name /[A-Za-z_:] def_name_char*/
+rl def_system_literal /'"' [^"]* '"' | "'" [^']* "'"/
+
+#
+# Scanner for tag names.
+#
+lex TAG_NAME
+{
+ ignore /space+/
+ token tag_id /def_name/
+}
+
+#
+# Scanner for attributes names
+#
+lex ATTR_NAME
+{
+ ignore /space+/
+ token attr_name /def_name_char+/
+ literal '='
+}
+
+# Scanner for attribute values.
+lex ATTR_VAL
+{
+ ignore /space+/
+ literal '>', '/>'
+ token dquote_val /'"' ([^"] | '\\' any)* '"'/
+ token squote_val /"'" ([^'] | '\\' any)* "'"/
+ token unq_val /[^ \t\r\n<>"'] [^ \t\r\n<>]*/
+}
+
+#
+# Tokens
+#
+
+lex START
+{
+ ignore /space+/
+ literal '<', '</', '<!DOCTYPE'
+ token doc_data /[^<]+/
+ token comment /"<!--" any* :>> "-->"/
+}
+
+#
+# Tags
+#
+
+# This scanner is just for the id in close tags. The id needs to be looked up
+# in the tag stack so we can determine if it is a stray.
+lex close_id
+{
+ # Ignore whitespace.
+ ignore /space+/
+
+ token stray_close_id //
+ token close_id /def_name/
+ {
+ # If it is in the tag stack then it is a close_id. If not then it's a
+ # stray_close_id.
+ int send_id = typeid stray_close_id
+
+ tag_stack LocalTagStack = TagStack
+ for Tag:tag_id in LocalTagStack {
+ tag_id T = Tag
+ if match_text == T.data {
+ send_id = typeid close_id
+ break
+ }
+ }
+
+ send( make_token( send_id, pull(stdin, match_length) ) )
+ }
+}
+
+#
+# Tag Stack
+#
+
+def tag_stack
+ [tag_id tag_stack]
+| []
+
+global tag_stack TagStack = construct tag_stack []
+
+#
+# Document Type
+#
+# This scanner handles inside DOCTYPE tags (except keywords).
+lex DOCTYPE
+{
+ ignore /space+/
+ token dt_name /def_name/
+ token dt_literal /def_system_literal/
+ token dt_bl /"[" [^\]]* "]"/
+ token dt_close /'>'/
+}
+
+# Using a separate scanner for the keywords in DOCTYPE prevents them from
+# covering dt_name
+lex DOCTYPE_KW
+{
+ ignore /space+/
+ literal 'SYSTEM', 'PUBLIC'
+}
+
+def DOCTYPE ['<!DOCTYPE' dt_name external_id dt_bl? dt_close]
+
+def external_id
+ ['SYSTEM' dt_literal?]
+| ['PUBLIC' dt_literal dt_literal?]
+
+#
+# Tags, with optionanal close.
+#
+
+def tag
+ [open_tag item* opt_close_tag]
+
+def open_tag
+ ['<' tag_id attr* '>']
+ {
+ TagStack = construct tag_stack
+ [r2 TagStack]
+ }
+
+def opt_close_tag
+ ['</' close_id '>']
+ {
+ match TagStack [Top:tag_id Rest:tag_stack]
+ if r2.data == Top.data
+ TagStack = Rest
+ else
+ reject
+ }
+
+| []
+ {
+ match TagStack [Top:tag_id Rest:tag_stack]
+ TagStack = Rest
+ }
+
+#
+# Empty tags
+#
+def empty_tag
+ ['<' tag_id attr* '/>']
+
+#
+# Stray close tags
+#
+def stray_close
+ ['</' stray_close_id '>']
+
+
+#
+# Attributes
+#
+
+def attr
+ [attr_name eql_attr_val?]
+
+def eql_attr_val ['=' attr_val]
+
+def attr_val
+ [squote_val]
+| [dquote_val]
+| [unq_val]
+| []
+
+#
+# Items
+#
+
+def item
+ [DOCTYPE]
+| [tag]
+| [empty_tag]
+| [stray_close]
+| [doc_data]
+| [comment]
+
+
+token trailing /any*/
+
+def start
+ [item* trailing]
+
+#
+# END GRAMMAR
+#
+
+int addDefaultAltTags( ref start Start )
+{
+ for T: open_tag in Start {
+ require T
+ ["<img" AttrList: attr* '>']
+
+ bool haveAlt = false
+ for A: attr in T {
+ if match A ["alt=" attr_val]
+ haveAlt = true
+ }
+
+ if !haveAlt {
+ for AL: attr* in T {
+ if match AL [] {
+ AL = construct attr*
+ [" alt=\"default alt\""]
+ break
+ }
+ }
+ }
+ }
+}
+
+int printLinks( start Start )
+{
+ for A:tag in Start {
+ require A
+ ["<a" AttrList: attr* ">" I: item* "</a>"]
+
+ for Attr: attr in AttrList {
+ if match Attr ["href = " AttrVal: attr_val]
+ print( 'link: ', I, '\ntarget: ', AttrVal, '\n\n' )
+ }
+ }
+}
+
+
+bool should_close( tag_id TI )
+{
+ return true
+}
+
+bool should_flatten( tag_id TI )
+{
+ return true
+}
+
+# Finds unclosed tags and puts the content after the tag. Afterwards
+# all unclosed tags will be empty 'inside'.
+int flatten( ref start Start )
+{
+ for TL: item* in Start {
+ require TL
+ [OT: open_tag Inside: item* Trailing: item*]
+
+ match OT
+ ['<' TagId: tag_id attr* '>']
+
+ if should_flatten( TagId )
+ {
+ require Inside
+ [item item*]
+
+ # Put Trailing at the end of inside.
+ for END: item* in Inside {
+ if match END [] {
+ END = Trailing
+ break
+ }
+ }
+
+ opt_close_tag EmptyCloseTag =
+ construct opt_close_tag []
+
+ # Close the tag and put inside after it.
+ TL = construct item*
+ [OT EmptyCloseTag Inside]
+ }
+ }
+}
+
+int close( ref start Start )
+{
+ for TL: item in Start {
+ require TL
+ [OpenTag: open_tag Inside: item*]
+
+ match OpenTag
+ ['<' TagId: tag_id attr* '>']
+
+ if should_close( TagId )
+ {
+ close_id CloseId = construct close_id
+ [TagId.data]
+
+ opt_close_tag CloseTag =
+ construct opt_close_tag ['</' CloseId '>']
+
+ # Close the tag and put inside after it.
+ TL = construct item
+ [OpenTag Inside CloseTag]
+ }
+ }
+}
+
+start HTML = parse start( stdin )
+flatten( HTML )
+#print_xml( HTML )
+printLinks( HTML )
+
diff --git a/test/html/input01.html b/test/html/input01.html
new file mode 100644
index 00000000..cf783d63
--- /dev/null
+++ b/test/html/input01.html
@@ -0,0 +1,8 @@
+<t1>
+
+ <t2>
+ <a href="foo">FOO</a>
+ <t3>
+ </t3>
+
+</t1>
diff --git a/test/http/Makefile b/test/http/Makefile
new file mode 100644
index 00000000..25a0b6d3
--- /dev/null
+++ b/test/http/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/http/http.lm b/test/http/http.lm
new file mode 100644
index 00000000..d914ab6f
--- /dev/null
+++ b/test/http/http.lm
@@ -0,0 +1,68 @@
+#
+# Character classes
+#
+rl CTL /0..31 | 127/
+rl CR /13/
+rl LF /10/
+rl SP /32/
+rl HT /9/
+rl CHAR /0..127/
+
+rl separators / '(' | ')' | '<' | '>'
+ | '@' | ',' | ';' | ':' | '\\'
+ | '"' | '/' | '[' | ']' | '?'
+ | '=' | '{' | '}' | SP | HT /
+
+rl token_char /CHAR - CTL - separators/
+
+#
+# Literal tokens
+#
+
+literal 'HTTP/', ' ', ':'
+token CRLF /CR LF/
+
+#
+# Request Line
+#
+
+token method /token_char+/
+
+token request_uri /(^SP)+/
+
+token http_number /digit+ '.' digit+/
+
+def http_version
+ [ 'HTTP/' http_number ]
+
+def request_line
+ [method ' ' request_uri
+ ' ' http_version CRLF]
+
+#
+# Header
+#
+
+token field_name /token_char+/
+
+token field_value
+ /(^(CR|LF) | CR LF (SP|HT))* CR LF/
+
+def header
+ [field_name ':' field_value]
+
+#
+# Request
+#
+
+def request
+ [request_line header* CRLF]
+
+request R = parse_stop request( stdin )
+
+print( 'HTTP/1.0 200 OK\r\n' )
+print( 'Date: Fri, 31 Dec 1999 23:59:59 GMT\r\n' )
+print( 'Content-Type: text/plain\r\n' )
+print( '\r\n' )
+print_xml( R )
+
diff --git a/test/http/input1 b/test/http/input1
new file mode 100644
index 00000000..c1416f84
--- /dev/null
+++ b/test/http/input1
@@ -0,0 +1,2 @@
+GET /hi/there/ HTTP/1.1
+
diff --git a/test/http/input2 b/test/http/input2
new file mode 100644
index 00000000..076222d5
--- /dev/null
+++ b/test/http/input2
@@ -0,0 +1,13 @@
+GET /hithere/ HTTP/1.1
+Host: localhost:3535
+User-Agent: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.8.1.12) Gecko/20080207 Ubuntu/7.10 (gutsy) Firefox/2.0.0.12
+Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5
+Accept-Language: en-us,en;q=0.5
+Accept-Encoding: gzip,deflate
+Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7
+Keep-Alive: 300
+Connection: keep-alive
+Cache-Control: max-age=0
+
+adslfkj
+alkfj
diff --git a/test/http/input3 b/test/http/input3
new file mode 100644
index 00000000..16b817f1
--- /dev/null
+++ b/test/http/input3
@@ -0,0 +1,8 @@
+GET foo HTTP/1.1
+hello: foo
+hi: there
+ my
+ friend
+
+ from outter space
+
diff --git a/test/http/xinetd.conf b/test/http/xinetd.conf
new file mode 100644
index 00000000..5c95545c
--- /dev/null
+++ b/test/http/xinetd.conf
@@ -0,0 +1,10 @@
+service colm_http
+{
+ type = unlisted
+ socket_type = stream
+ protocol = tcp
+ port = 3535
+ wait = no
+ user = thurston
+ server = /home/thurston/devel/colm/test/http/http.bin
+}
diff --git a/test/island.in b/test/island.in
new file mode 100644
index 00000000..d34467bb
--- /dev/null
+++ b/test/island.in
@@ -0,0 +1,19 @@
+class
+{
+ 1;
+ "string";
+ foo;
+ func()
+ {
+ func()
+ {
+ 1+{2}
+ }
+ }
+}
+
+func()
+{
+ "data"
+ {a}
+}
diff --git a/test/island.lm b/test/island.lm
new file mode 100644
index 00000000..c407aa28
--- /dev/null
+++ b/test/island.lm
@@ -0,0 +1,57 @@
+
+lex function_body
+{
+ token func_chr /[^{}]+/
+ token func_open /'{'/
+ token func_close /'}'/
+}
+
+def func_item
+ [func_chr]
+| [func_open func_body func_close]
+
+def func_body
+ [func_item*]
+
+def func
+ [ident '(' ')' '{' func_body func_close ]
+
+lex start
+{
+ token ident /[a-zA-Z_]+/
+ token number /[0-9]+/
+
+ rl s_string / "'" ([^'\\\n] | '\\' any )* "'" /
+ rl d_string / '"' ([^"\\\n] | '\\' any )* '"' /
+ token string /s_string | d_string/
+
+ literal '+', '*', ';', '(', ')', '{', '}'
+
+ ignore wp / [ \t\n]+ /
+}
+
+def class_item
+ [func]
+| [class]
+| [ident ';']
+| [number ';']
+| [string ';']
+
+def class_body
+ [class_item*]
+
+def class
+ [ident '{' class_body '}' ]
+
+def top_item
+ [func]
+| [class]
+
+def start
+ [top_item*]
+ {
+ print_xml(lhs)
+ }
+
+#pattern start
+# ~class { func() { func() { 1+{2}} } } func() {{a}}
diff --git a/test/liftattrs.in b/test/liftattrs.in
new file mode 100644
index 00000000..5a50f377
--- /dev/null
+++ b/test/liftattrs.in
@@ -0,0 +1,3 @@
+<t1 a=b foo=bar1 c=d>
+ <t2 foo=bar2 e=f></t2>
+</t1>
diff --git a/test/liftattrs.lm b/test/liftattrs.lm
new file mode 100644
index 00000000..305a805b
--- /dev/null
+++ b/test/liftattrs.lm
@@ -0,0 +1,74 @@
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+
+#
+# Tokens
+#
+
+lex start
+{
+ literal '=', '<', '>', '/'
+
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ # Open and close id
+ token id /rl_id/
+}
+
+#
+# Productions
+#
+
+def attr [id '=' id]
+
+def attr_list
+ [attr_list attr]
+| []
+
+def open_tag
+ ['<' id attr_list '>']
+
+def close_tag
+ ['<' '/' id '>']
+
+def tag
+ [open_tag item_list close_tag]
+
+def item_list
+ [item_list tag]
+| []
+
+item_list IL = parse item_list(stdin)
+
+# Get the item list
+match IL [RootItemList: item_list]
+
+# List for collecting the attrs we pull out.
+attr_list CollectedAttrs = construct attr_list []
+
+# Iterate through all attributes
+for AttrListIter:attr_list in RootItemList {
+ # If the name of the attr is foo, remove it.
+ if match AttrListIter
+ [SubAttrList:attr_list "foo=" Val:id]
+ {
+ # Remove the attribute
+ AttrListIter = construct attr_list
+ [SubAttrList]
+
+ # Add it to the colection
+ CollectedAttrs = construct attr_list
+ [CollectedAttrs " foo=" Val]
+ }
+}
+
+# Reconstruct the left hand side with the
+IL = construct item_list
+ ["<wrapper" CollectedAttrs ">" RootItemList "</wrapper>"]
+
+print( IL, '\n' )
diff --git a/test/mailbox.in b/test/mailbox.in
new file mode 100644
index 00000000..412f8bed
--- /dev/null
+++ b/test/mailbox.in
@@ -0,0 +1,29 @@
+From thurston Tue Jan 2 21:16:50 2007
+Return-Path: <unknown>
+X-Spam-Level: *
+Received: from [109.111.71.111] (helo=twfmtr)
+ by zifreax with smtp (Exim 4.43)
+ id 1H1vfs-0005LN-HW; Tue, 2 Jan 2007 21:16:16 -0500
+Message-ID: <459B113F.8050903@immoarthabitatge.com>
+X-Keywords:
+X-UID: 1
+
+Content-Type: text/html; charset=ISO-8859-1
+</body>
+</html>
+
+From thurston Wed Jan 3 02:35:48 2007
+Return-Path: <unknown>
+X-Spam-Checker-Version: SpamAssassin 3.1.1 (2006-03-10) on mambo.cs.queensu.ca
+X-Spam-Level: **
+X-Spam-Status: No, score=2.9 required=5.0 tests=BAYES_20,EXTRA_MPART_TYPE,
+ HTML_40_50,HTML_IMAGE_ONLY_16,HTML_MESSAGE,RCVD_IN_BL_SPAMCOP_NET
+ autolearn=no version=3.1.1
+X-Bogosity: Unsure, tests=bogofilter, spamicity=0.971708, version=1.0.2
+Status: RO
+X-UID: 2
+
+------=_NextPart_000_0010_01C72F11.F137BD60
+ charset="windows-1252"
+Content-Transfer-Encoding: quoted-printable
+
diff --git a/test/mailbox.lm b/test/mailbox.lm
new file mode 100644
index 00000000..3387fcff
--- /dev/null
+++ b/test/mailbox.lm
@@ -0,0 +1,44 @@
+
+# lines, and fromlines
+lex lines
+{
+ rl day /[A-Z][a-z][a-z]/
+ rl month /[A-Z][a-z][a-z]/
+ rl year /[0-9][0-9][0-9][0-9]/
+ rl time /[0-9][0-9] ':' [0-9][0-9] ( ':' [0-9][0-9] )? /
+ rl letterZone /[A-Z][A-Z][A-Z]/
+ rl numZone /[+\-][0-9][0-9][0-9][0-9]/
+ rl zone / letterZone | numZone/
+ rl dayNum /[0-9 ][0-9]/
+
+ # These are the different formats of the date minus an obscure
+ # type that has a funny string 'remote from xxx' on the end. Taken
+ # from c-client in the imap-2000 distribution.
+ rl date / day ' ' month ' ' dayNum ' ' time ' '
+ ( year | year ' ' zone | zone ' ' year ) /
+
+ # From lines separate messages. We will exclude from_line from a message
+ # body line. This will cause us to stay in message line up until an
+ # entirely correct from line is matched.
+ token from_line / 'From ' (any-'\n')* ' ' date '\n' /
+ token simple_line / [^\n]* '\n' /
+}
+
+rl hchar /print - [ :]/
+token header_name /hchar+/
+
+token colon /':' ' '*/
+token header_content / ([^\n] | '\n' [ \t])* '\n'/
+token blank_line / '\n' /
+
+def header
+ [header_name colon header_content]
+
+def message
+ [from_line header* blank_line simple_line*]
+
+def start
+ [message*]
+ {
+ print_xml( lhs )
+ }
diff --git a/test/matchex.in b/test/matchex.in
new file mode 100644
index 00000000..f458f2ad
--- /dev/null
+++ b/test/matchex.in
@@ -0,0 +1,3 @@
+<person name=adrian hometown=kingston>
+ <t1 foo=bar2 e=f></t2>
+</person> \ No newline at end of file
diff --git a/test/matchex.lm b/test/matchex.lm
new file mode 100644
index 00000000..67b69238
--- /dev/null
+++ b/test/matchex.lm
@@ -0,0 +1,34 @@
+lex start
+{
+ token id /[a-zA-Z_][a-zA-Z0-9_]*/
+ literal '=', '<', '>', '/'
+ ignore /[ \t\n\r\v]+/
+}
+
+def attr
+ [id '=' id]
+
+def open_tag
+ ['<' id attr* '>']
+
+def close_tag
+ ['<' '/' id '>']
+
+def tag
+ [open_tag item* close_tag]
+
+def item
+ [tag]
+| [id]
+
+tag Tag = parse tag( stdin )
+
+# Style: List of literal text and types.
+match Tag ["<person name=" Val1:id attr*">" item* "</person>"]
+
+# Style: Literal text with embedded lists of types.
+match Tag "<person name=[Val2:id attr*]>[item*]</person>"
+
+print( Val1, '\n' )
+print( Val2, '\n' )
+
diff --git a/test/maxlen.lm b/test/maxlen.lm
new file mode 100644
index 00000000..19869634
--- /dev/null
+++ b/test/maxlen.lm
@@ -0,0 +1,44 @@
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+
+#
+# Tokens
+#
+
+lex start
+{
+ ignore /rl_ws/
+ token id /rl_id/
+}
+
+global int num
+global int allow = 3
+
+def item
+ [id]
+ {
+ num = num + 1
+ int toomuch = allow+1
+ if num == toomuch {
+ reject
+ }
+ }
+
+def open
+ []
+ {
+ num = 0
+ }
+
+def close []
+
+def restricted_list
+ [open item*]
+
+def start
+ [restricted_list id*]
+
diff --git a/test/nestedcomm.in b/test/nestedcomm.in
new file mode 100644
index 00000000..11789576
--- /dev/null
+++ b/test/nestedcomm.in
@@ -0,0 +1 @@
+hello there ( (this is a nested comment /*sdf;asd_++_stuff) ) and this is not
diff --git a/test/nestedcomm.lm b/test/nestedcomm.lm
new file mode 100644
index 00000000..cc28726e
--- /dev/null
+++ b/test/nestedcomm.lm
@@ -0,0 +1,41 @@
+#
+# Tokens
+#
+
+# Any single character can be a literal
+lex start
+{
+ # Ignore whitespace.
+ ignore /[ \t\n\r\v]+/
+
+ # Open and close id
+ token id /[a-zA-Z_][a-zA-Z0-9_]*/
+
+ token open_paren /'('/
+ {
+ send_ignore( parse_stop nested_comment( stdin ) )
+ }
+}
+
+#
+# Token translation
+#
+
+lex nc_scan
+{
+ literal '(', ')'
+ token nc_data /[^()]+/
+}
+
+def nc_item
+ [nc_data]
+| [nested_comment]
+
+def nested_comment
+ ['(' nc_item* ')']
+
+def nested [id*]
+
+nested P = parse nested( stdin )
+print_xml( P )
+print( P, '\n' )
diff --git a/test/python/Makefile b/test/python/Makefile
new file mode 100644
index 00000000..eeab1e53
--- /dev/null
+++ b/test/python/Makefile
@@ -0,0 +1,18 @@
+#
+# Copyright 2007 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/python/input1.py b/test/python/input1.py
new file mode 100644
index 00000000..22ffd2e1
--- /dev/null
+++ b/test/python/input1.py
@@ -0,0 +1,18 @@
+
+# dude, this is a comment
+ # some more
+hello
+def dude():
+ yes
+ awesome;
+
+ # Here we have a comment
+ def realy_awesome(): # hi there
+ in_more
+
+ same_level
+ def one_liner(): first; second # both inside one_liner
+
+ back_down
+
+last_statement
diff --git a/test/python/input2.py b/test/python/input2.py
new file mode 100644
index 00000000..063825e1
--- /dev/null
+++ b/test/python/input2.py
@@ -0,0 +1,20 @@
+
+# dude, this is a comment
+ # some more
+hello
+if 1:
+ yes
+ awesome;
+
+ # Here we have a comment
+ if ('hello'): # hi there
+ in_more
+
+ same_level
+ if ['dude', 'dudess'].horsie(): first; second # both inside one_liner
+ 1
+
+ back_down
+
+last_statement
+
diff --git a/test/python/input3.py b/test/python/input3.py
new file mode 100644
index 00000000..90ecf3f9
--- /dev/null
+++ b/test/python/input3.py
@@ -0,0 +1 @@
+hello = 1.1(20);
diff --git a/test/python/input4.py b/test/python/input4.py
new file mode 100644
index 00000000..1a281c46
--- /dev/null
+++ b/test/python/input4.py
@@ -0,0 +1,10 @@
+
+# subscription
+a[1] = b[2];
+
+# simple slicing
+a[1:1] = b[2:2];
+
+# simple slicing
+a[1:1, 2:2] = b[3:3, 4:4];
+
diff --git a/test/python/python.lm b/test/python/python.lm
new file mode 100644
index 00000000..7d579955
--- /dev/null
+++ b/test/python/python.lm
@@ -0,0 +1,726 @@
+# Regular definitions
+rl ident_char /[a-zA-Z_]/
+
+# List used as a stack of indentations.
+list indent_stack [int]
+global indent_stack IndentStack = construct indent_stack []
+IndentStack.push( 0 )
+
+# Has a newline been sent for this '\n' .. whitespace match.
+global int newline_sent = 0
+
+# Tokens.
+lex start
+{
+ # Python keywords.
+ literal 'and', 'del', 'from', 'not', 'while', 'as', 'elif', 'global', 'or',
+ 'with', 'assert', 'else', 'if', 'pass', 'yield', 'break', 'except',
+ 'import', 'print', 'class', 'exec', 'in', 'raise', 'continue',
+ 'finally', 'is', 'return', 'def', 'for', 'lambda', 'try'
+
+ # Identifiers
+ rl lowercase /'a'..'z'/
+ rl uppercase /'A'..'Z'/
+ rl letter /lowercase | uppercase/
+ token identifier /(letter|'_') (letter | digit | '_')*/
+
+ # Literals
+ rl escapeseq /'\\' any /
+ rl longstringchar /[^\\]/
+ rl shortstringchar_s /[^\\\n']/
+ rl shortstringchar_d /[^\\\n"]/
+ rl longstringitem /longstringchar | escapeseq/
+ rl shortstringitem_s /shortstringchar_s | escapeseq/
+ rl shortstringitem_d /shortstringchar_d | escapeseq/
+ rl longstring /"'''" longstringitem* :>> "'''" | '"""' longstringitem* :>> '"""'/
+ rl shortstring /"'" shortstringitem_s* "'" | '"' shortstringitem_d* '"'/
+ rl stringprefix /"r" | "u" | "ur" | "R" | "U" | "UR" | "Ur" | "uR"/
+ token stringliteral /stringprefix? (shortstring | longstring)/
+
+ # Integers
+ rl hexdigit /digit | 'a'..'f' | 'A'..'F'/
+ rl octdigit /'0'..'7'/
+ rl nonzerodigit /'1'..'9'/
+ rl hexinteger /'0' ('x' | 'X') hexdigit+/
+ rl octinteger /'0' octdigit+/
+ rl decimalinteger /nonzerodigit digit* | '0'/
+ token integer /decimalinteger | octinteger | hexinteger/
+ token longinteger /integer ('l' | 'L')/
+
+ # Floats.
+ rl exponent /('e' | 'E') ('+' | '-')? digit+/
+ rl fraction /'.' digit+/
+ rl intpart /digit+/
+ rl pointfloat /intpart? fraction | intpart '.'/
+ rl exponentfloat /(intpart | pointfloat) exponent/
+ token floatnumber /pointfloat | exponentfloat/
+
+ # Imaginaries.
+ token imagnumber /(floatnumber | intpart) ("j" | "J")/
+
+ # Operators.
+ literal '+', '-', '*', '**', '/', '//', '%', '<<', '>>', '&', '|', '^',
+ '~', '<', '>', '<=', '>=', '==', '!=', '<>'
+
+ # Delimiters
+ literal '(', ')', '[', ']', '{', '}', '@', ',', ':', '.', '`', '=', ';',
+ '+=', '-=', '*=', '/=', '//=', '%=', '&=', '|=', '^=', '>>=', '<<=',
+ '**='
+
+ literal '...'
+
+ # In general whitespace is ignored.
+ ignore WS /' '+/
+
+ # Find and ignore entire blank lines.
+ token BLANK_LINE
+ / '\n' [ \t]* ('#' [^\n]*)? '\n' /
+ {
+ # Need to shorten to take off the newline.
+ # Turn it into ignore.
+ send_ignore( make_token( typeid WS, pull(stdin, match_length - 1) ) )
+ }
+
+ # Find and ignore comments.
+ token COMMENT
+ / '#' [^\n]* '\n' /
+ {
+ # Need to shorten to take off the newline. Turn it into ignore.
+ send_ignore( make_token( typeid WS, pull(stdin, match_length - 1) ) )
+ }
+
+ # These tokens are generated
+ token INDENT //
+ token DEDENT //
+ token NEWLINE //
+ ignore IND_WS //
+
+ token INDENTATION
+ /'\n' [ \t]*/
+ {
+ # First the newline.
+ send( make_token( typeid NEWLINE, '' ) )
+
+ # We have already sent the newline, compute the indentation level.
+ int data_length = match_length - 1
+
+ if data_length > IndentStack.top {
+ # The indentation level is more than the level on the top
+ # of the stack. This is an indent event. Send as an INDENT.
+ send( make_token( typeid INDENT, '' ) )
+
+ # Push to the stack as per python manual.
+ IndentStack.push( data_length )
+ } else {
+ while data_length < IndentStack.top {
+ # The indentation level is less than the level on the top of
+ # the stack. Pop the level and send one dedent. This flow of
+ # control will execute until we find the right indentation level
+ # to match up with.
+ IndentStack.pop()
+
+ # Send as a DEDENT
+ send( make_token( typeid DEDENT, '' ) )
+ }
+ }
+
+ # FIXME: if data.length is now > top of stack then error. This
+ # means the outdent does not match anything.
+
+ # We have squared up INDENTs and DEDENTs. Ignore the entire match.
+ send_ignore( make_token( typeid WS, pull(stdin, match_length) ) )
+ }
+}
+
+# Blank lines or comment lines at the beginning of the file.
+token LEADER / ( [ \t]* ('#' [^\n]*)? '\n' )* /
+
+int print_target_subscriptions_and_slicings( start Start )
+{
+ for TI: target_ext in Start {
+ if match TI [subscription] {
+ print( 'TARGET SUBSCRIPTION: ', TI, '\n' )
+ }
+
+ if match TI [simple_slicing] {
+ print( 'TARGET SIMPLE SLICING: ', TI, '\n' )
+ }
+
+ if match TI [extended_slicing] {
+ print( 'TARGET EXTENDED SLICING: ', TI, '\n' )
+ }
+ }
+
+}
+
+int print_primary_subscriptions_and_slicings( start Start )
+{
+ for PI:primary_ext in Start {
+ if match PI [subscription] {
+ print( 'PRIMARY SUBSCRIPTION: ', PI, '\n' )
+ }
+
+ if match PI [simple_slicing] {
+ print( 'PRIMARY SIMPLE SLICING: ', PI, '\n' )
+ }
+
+ if match PI [extended_slicing] {
+ print( 'PRIMARY EXTENDED SLICING: ', PI, '\n' )
+ }
+ }
+}
+
+def start
+ [file_input]
+
+def file_input
+ [file_input_forms*]
+
+def file_input_forms
+ [statement]
+| [NEWLINE]
+
+def statement
+ [stmt_list NEWLINE]
+| [compound_stmt]
+
+def stmt_list
+ [simple_stmt another_stmt* opt_semi]
+
+def another_stmt
+ [';' simple_stmt]
+
+def opt_semi
+ [';']
+| []
+
+def suite
+ [stmt_list NEWLINE]
+| [NEWLINE INDENT statement_seq DEDENT]
+
+def statement_seq
+ [statement_seq statement]
+| [statement]
+
+def compound_stmt
+ [if_stmt]
+| [while_stmt]
+| [for_stmt]
+| [try_stmt]
+| [with_stmt]
+| [funcdef]
+| [classdef]
+
+def if_stmt
+ ['if' expression ':' suite elif_part* opt_else_part]
+
+def elif_part
+ ['elif' expression ':' suite]
+
+def opt_else_part
+ ['else' ':' suite]
+| []
+
+def while_stmt
+ ['while' expression ':' suite opt_else_part]
+
+def for_stmt
+ ['for' target_list 'in' expression_list ':' suite opt_else_part]
+
+def try_stmt
+ ['try' ':' suite except_list opt_else_part opt_finally_part]
+| ['try' ':' suite 'finally' ':' suite]
+
+def except_list
+ [except_list except_part]
+| [except_part]
+
+def except_part
+ ['except' ':' suite]
+| ['except' expression ':' suite]
+| ['except' expression ',' target ':' suite]
+
+def opt_finally_part
+ ['finally' ':' suite]
+| []
+
+def with_stmt
+ ['with' expression ':' suite]
+| ['with' expression 'as' target ':' suite]
+
+def funcdef
+ [decorators 'def' funcname '(' opt_parameter_list ')' ':' suite]
+
+def funcname
+ [identifier]
+
+def decorators
+ [decorators decorator]
+| []
+
+def decorator
+ ['@' dotted_name opt_decorator_pal NEWLINE]
+
+def opt_decorator_pal
+ []
+| ['(' ')']
+| ['(' argument_list ')']
+| ['(' argument_list ',' ')']
+
+def dotted_name
+ [dotted_name '.' identifier]
+| [identifier]
+
+def opt_parameter_list
+ [parameter_list]
+| []
+
+def parameter_list
+ [defparameter_list defparameter opt_comma]
+| [defparameter_list '*' identifier]
+| [defparameter_list '*' identifier '**' identifier]
+| [defparameter_list '**' identifier]
+
+def defparameter_list
+ [defparameter_list defparameter ',']
+| []
+
+def defparameter
+ [parameter]
+| [parameter '=' expression]
+
+def sublist
+ [parameter_list opt_comma]
+
+def parameter_list
+ [parameter_list ',' parameter]
+| [parameter]
+
+def parameter
+ [identifier]
+| ['(' sublist ')']
+
+def classdef
+ ['class' classname opt_inheritance ':' suite]
+
+def classname
+ [identifier]
+
+def opt_inheritance
+ ['(' ')']
+| ['(' expression_list ')']
+| []
+
+def simple_stmt
+ [expression_stmt]
+| [assert_stmt]
+| [assignment_stmt]
+| [augmented_assignment_stmt]
+| [pass_stmt]
+| [del_stmt]
+| [print_stmt]
+| [return_stmt]
+| [yield_stmt]
+| [raise_stmt]
+| [break_stmt]
+| [continue_stmt]
+| [import_stmt]
+| [global_stmt]
+| [exec_stmt]
+
+def expression_stmt
+ [expression_list]
+
+def assert_stmt
+ ['assert' expression_list_core]
+
+def assignment_stmt
+ [target_equals_list expression_list]
+
+def target_equals_list
+ [target_equals_list target_equals]
+| [target_equals]
+
+def target_equals
+ [target_list '=']
+
+def target_list
+ [target_list_core opt_comma]
+
+def target_list_core
+ [target_list_core ',' target]
+| [target]
+
+def target
+ [target_atom target_ext_rep]
+
+def target_atom
+ [identifier]
+| ['(' target_list ')']
+| ['[' target_list ']']
+
+def target_ext_rep [target_ext target_ext_rep]
+def target_ext_rep []
+
+def target_ext
+ [attributeref]
+| [subscription]
+| [slicing]
+
+def augmented_assignment_stmt
+ [target augop expression_list]
+
+def augop
+ ['+='] | ['-='] | ['*='] | ['/=']
+| ['\%='] | ['**='] | ['>>='] | ['<<='] | ['\&=']
+| ['^'] | ['|=']
+
+def pass_stmt
+ ['pass']
+
+def del_stmt
+ ['del' target_list]
+
+def print_stmt
+ ['print' opt_expression_list]
+| ['print' '>>' expression_list]
+
+def return_stmt
+ ['return' opt_expression_list]
+
+def yield_stmt
+ ['yield' expression_list]
+
+def raise_stmt
+ ['raise']
+| ['raise' expression]
+| ['raise' expression ',' expression]
+| ['raise' expression ',' expression ',' expression]
+
+def break_stmt
+ ['break']
+
+def continue_stmt
+ ['continue']
+
+def import_stmt
+ ['import' module opt_as_name more_imports]
+| ['from' module 'import' identifier opt_as_name more_imports]
+| ['from' module 'import' '(' identifier opt_as_name more_imports opt_comma ')']
+| ['from' module 'import' '*']
+
+def more_imports
+ [more_imports ',' identifier opt_as_name]
+| []
+
+def module
+ [module '.' identifier]
+| [identifier]
+
+def opt_as_name
+ ['as' identifier]
+| []
+
+def global_stmt
+ ['global' identifer_list]
+
+def identifer_list
+ [identifer_list ',' identifier]
+| [identifier]
+
+def exec_stmt
+ ['exec' expression]
+| ['exec' expression 'in' expression]
+| ['exec' expression 'in' expression ',' expression]
+
+def opt_expression_list
+ [expression_list]
+| []
+
+def expression_list
+ [expression_list_core opt_comma]
+
+def expression_list_core
+ [expression_list_core ',' expression]
+| [expression]
+
+def opt_comma
+ [',']
+| []
+
+def expression
+ [or_test 'if' or_test 'else' test]
+| [or_test]
+| [lambda_form]
+
+def or_test
+ [or_test 'or' and_test]
+| [and_test]
+
+def and_test
+ [and_test 'and' not_test]
+| [not_test]
+
+def not_test
+ [comparison]
+| ['not' not_test]
+
+def lambda_form
+ ['lambda' opt_parameter_list ':' expression]
+
+def test
+ [or_test]
+| [lambda_form]
+
+def comparison
+ [or_expr comparison_part*]
+
+def comparison_part
+ [comp_operator or_expr]
+
+def comp_operator
+ ['<'] | ['>'] | ['=='] | ['>='] | ['<='] | ['<>'] | ['!='] | ['is'] |
+ ['is' 'not'] | ['in'] | ['not' 'in']
+
+def or_expr
+ [or_expr '|' xor_expr]
+| [xor_expr]
+
+def xor_expr
+ [xor_expr '^' and_expr]
+| [and_expr]
+
+def and_expr
+ [and_expr '&' shift_expr]
+| [shift_expr]
+
+def shift_expr
+ [shift_expr '<<' a_expr]
+| [shift_expr '>>' a_expr]
+| [a_expr]
+
+def a_expr
+ [a_expr '+' m_expr]
+| [a_expr '-' m_expr]
+| [m_expr]
+
+def m_expr
+ [m_expr '*' u_expr]
+| [m_expr '//' u_expr]
+| [m_expr '/' u_expr]
+| [m_expr '\%' u_expr]
+| [u_expr]
+
+def u_expr
+ [power]
+| ['-' u_expr]
+| ['+' u_expr]
+| ['\~' u_expr]
+
+def power
+ [primary '**' u_expr]
+| [primary]
+
+def primary
+ [atom primary_ext_rep]
+
+def atom
+ [identifier]
+| [pyliteral]
+| [enclosure]
+
+def primary_ext_rep
+ [primary_ext primary_ext_rep]
+| []
+
+def primary_ext
+ [attributeref]
+| [subscription]
+| [slicing]
+| [call]
+
+def pyliteral
+ [stringliteral]
+| [integer]
+| [longinteger]
+| [floatnumber]
+| [imagnumber]
+
+def enclosure
+ [parenth_form]
+| [list_display]
+| [generator_expression]
+| [dict_display]
+| [string_conversion]
+
+def parenth_form
+ ['(' opt_expression_list ')']
+
+def list_display
+ ['[' opt_listmaker ']']
+
+def opt_listmaker
+ [listmaker]
+| []
+
+def listmaker
+ [expression list_for]
+| [expression listmaker_ext* opt_comma]
+
+def listmaker_ext
+ [',' expression]
+
+def opt_list_iter
+ [list_iter]
+| []
+
+def list_iter
+ [list_for]
+| [list_if]
+
+def list_if
+ ['if' test opt_list_iter]
+
+def list_for
+ ['for' expression_list 'in' testlist opt_list_iter]
+
+def testlist
+ [test testlist_ext* opt_comma]
+
+def testlist_ext
+ [',' test ]
+
+def generator_expression
+ ['(' test genexpr_for ')']
+
+def genexpr_for
+ ['for' expression_list 'in' test opt_genexpr_iter]
+
+def opt_genexpr_iter
+ [genexpr_iter]
+| []
+
+def genexpr_iter
+ [genexpr_for]
+| [genexpr_if]
+
+def genexpr_if
+ ['if' test opt_genexpr_iter]
+
+def dict_display
+ ['\{' opt_key_datum_list '\}']
+
+def opt_key_datum_list
+ [key_datum_list]
+| []
+
+def key_datum_list
+ [key_datum key_datum_list_ext* opt_comma]
+
+def key_datum_list_ext
+ [',' key_datum]
+
+def key_datum
+ [expression ':' expression]
+
+def string_conversion
+ ['`' expression_list '`']
+
+def attributeref
+ ['.' identifier]
+
+def subscription
+ ['[' expression_list ']']
+
+# The natural ordered choice does not suffice here. Must force it.
+
+def slicing
+ [simple_slicing]
+| [extended_slicing]
+
+def simple_slicing
+ ['[' short_slice ']']
+
+def extended_slicing
+ ['[' slice_list ']']
+
+def slice_list
+ [slice_item slice_list_ext* opt_comma]
+
+def slice_list_ext
+ [',' slice_item]
+
+def slice_item
+ [expression]
+| [proper_slice]
+| [ellipsis]
+
+def proper_slice
+ [short_slice]
+| [long_slice]
+
+def short_slice
+ [':']
+| [':' upper_bound]
+| [lower_bound ':']
+| [lower_bound ':' upper_bound]
+
+def long_slice
+ [short_slice ':' stride]
+| [short_slice ':']
+
+def lower_bound
+ [expression]
+
+def upper_bound
+ [expression]
+
+def stride
+ [expression]
+
+def ellipsis
+ ['...']
+
+def call
+ ['(' opt_argument_list ')']
+
+def opt_argument_list
+ [argument_list opt_comma]
+| []
+
+def argument_list
+ [positional_arguments opt_comma_keyword_arguments
+ opt_comma_star_expr opt_comma_dstar_expr]
+| [keyword_arguments opt_comma_star_expr opt_comma_dstar_expr]
+| ['*' expression opt_comma_dstar_expr]
+| ['**' expression]
+
+def opt_comma_star_expr
+ [',' '*' expression]
+| []
+
+def opt_comma_dstar_expr
+ [',' '**' expression]
+| []
+
+def positional_arguments
+ [positional_arguments ',' expression]
+| [expression]
+
+def opt_comma_keyword_arguments
+ [',' keyword_arguments]
+| []
+
+def keyword_arguments
+ [keyword_arguments ',' keyword_item]
+| [keyword_item]
+
+def keyword_item
+ [identifier '=' expression]
+
+
+start S = parse start( stdin )
+#print_xml( S )
+print_target_subscriptions_and_slicings( S )
+print_primary_subscriptions_and_slicings( S )
+print( '*** SUCCESS ***\n' )
diff --git a/test/ragelambig.in b/test/ragelambig.in
new file mode 100644
index 00000000..0b4439e5
--- /dev/null
+++ b/test/ragelambig.in
@@ -0,0 +1 @@
+1 - 1
diff --git a/test/ragelambig1.lm b/test/ragelambig1.lm
new file mode 100644
index 00000000..1c292fd1
--- /dev/null
+++ b/test/ragelambig1.lm
@@ -0,0 +1,65 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start
+ [expression]
+ {
+ print_xml( lhs )
+ }
+
+def expression
+ [expression '|' term]
+| [expression '&' term]
+| [expression '-' term]
+| [expression '--' term]
+| [term]
+
+def term
+ [term factor_with_rep]
+ {
+ if match lhs [term '-' uint] {
+ reject
+ }
+ }
+| [term '.' factor_with_rep]
+| [term ':>' factor_with_rep]
+| [term ':>>' factor_with_rep]
+| [term '<:' factor_with_rep]
+| [factor_with_rep]
+
+def factor_with_rep
+ [factor_with_rep '*']
+| [factor_with_rep '**']
+| [factor_with_rep '?']
+| [factor_with_rep '+']
+| [factor_with_rep '{' factor_rep_num '}']
+| [factor_with_rep '{' ',' factor_rep_num '}']
+| [factor_with_rep '{' factor_rep_num ',' '}']
+| [factor_with_rep '{' factor_rep_num ',' factor_rep_num '}']
+| [factor_with_neg]
+
+def factor_rep_num [uint]
+
+def factor_with_neg
+ ['!' factor_with_neg]
+| ['^' factor_with_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+| ['-' uint]
diff --git a/test/ragelambig2.lm b/test/ragelambig2.lm
new file mode 100644
index 00000000..70e97c66
--- /dev/null
+++ b/test/ragelambig2.lm
@@ -0,0 +1,65 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start
+ [expression]
+ {
+ print_xml( lhs )
+ }
+
+def expression
+ [expression '|' term]
+| [expression '&' term]
+| [expression '-' term]
+| [expression '--' term]
+| [term]
+
+def term
+ [factor_with_rep more_term]
+
+# Can resolve the ambiguity by making more_term shortest match.
+def more_term
+ []
+| [factor_with_rep more_term]
+| ['.' factor_with_rep more_term]
+| [':>' factor_with_rep more_term]
+| [':>>' factor_with_rep more_term]
+| ['<:' factor_with_rep more_term]
+
+def factor_with_rep
+ [factor_with_rep '*']
+| [factor_with_rep '**']
+| [factor_with_rep '?']
+| [factor_with_rep '+']
+| [factor_with_rep '{' factor_rep_num '}']
+| [factor_with_rep '{' ',' factor_rep_num '}']
+| [factor_with_rep '{' factor_rep_num ',' '}']
+| [factor_with_rep '{' factor_rep_num ',' factor_rep_num '}']
+| [factor_with_neg]
+
+def factor_rep_num
+ [uint]
+
+def factor_with_neg
+ ['!' factor_with_neg]
+| ['^' factor_with_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+| ['-' uint]
diff --git a/test/ragelambig3.lm b/test/ragelambig3.lm
new file mode 100644
index 00000000..649038e5
--- /dev/null
+++ b/test/ragelambig3.lm
@@ -0,0 +1,64 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start
+ [expression]
+ {
+ print_xml( lhs )
+ }
+
+def expression
+ [expression '|' term_short]
+| [expression '&' term_short]
+| [expression '-' term_short]
+| [expression '--' term_short]
+| [term_short]
+
+def term_short
+ reducefirst
+ [term]
+
+def term
+ [term factor_with_rep]
+| [term '.' factor_with_rep]
+| [term ':>' factor_with_rep]
+| [term ':>>' factor_with_rep]
+| [term '<:' factor_with_rep]
+| [factor_with_rep]
+
+def factor_with_rep
+ [factor_with_rep '*']
+| [factor_with_rep '**']
+| [factor_with_rep '?']
+| [factor_with_rep '+']
+| [factor_with_rep '{' factor_rep_num '}']
+| [factor_with_rep '{' ',' factor_rep_num '}']
+| [factor_with_rep '{' factor_rep_num ',' '}']
+| [factor_with_rep '{' factor_rep_num ',' factor_rep_num '}']
+| [factor_with_neg]
+
+def factor_rep_num [uint]
+
+def factor_with_neg
+ ['!' factor_with_neg]
+| ['^' factor_with_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+| ['-' uint]
diff --git a/test/ragelambig4.lm b/test/ragelambig4.lm
new file mode 100644
index 00000000..d489bca3
--- /dev/null
+++ b/test/ragelambig4.lm
@@ -0,0 +1,69 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start
+ [expression]
+ {
+ print_xml( lhs )
+ }
+
+def expression [term expression_op*]
+
+def expression_op
+ ['|' term]
+| ['&' term]
+| ['-' term]
+| ['--' term]
+
+def term [factor_rep term_op_list_short]
+
+# This list is done manually to get shortest match.
+def term_op_list_short
+ []
+| [term_op term_op_list_short]
+
+def term_op
+ [factor_rep]
+| ['.' factor_rep]
+| [':>' factor_rep]
+| [':>>' factor_rep]
+| ['<:' factor_rep]
+
+def factor_rep
+ [factor_neg factor_rep_op*]
+
+def factor_rep_op
+ ['*']
+| ['**']
+| ['?']
+| ['+']
+| ['{' factor_rep_num '}']
+| ['{' ',' factor_rep_num '}']
+| ['{' factor_rep_num ',' '}']
+| ['{' factor_rep_num ',' factor_rep_num '}']
+
+def factor_rep_num [uint]
+
+def factor_neg
+ ['!' factor_neg]
+| ['^' factor_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+| ['-' uint]
diff --git a/test/rediv.in b/test/rediv.in
new file mode 100644
index 00000000..f1ef2a38
--- /dev/null
+++ b/test/rediv.in
@@ -0,0 +1 @@
+2 / /[^gu-zy].*o[\d-xa]*/;
diff --git a/test/rediv.lm b/test/rediv.lm
new file mode 100644
index 00000000..c3750351
--- /dev/null
+++ b/test/rediv.lm
@@ -0,0 +1,92 @@
+# Or-literal scanner
+lex orlit
+{
+ token orlit_dash /'-' /
+ token orlit_close /']'/
+
+ rl orlit_specials /[\-\]]/
+ token orlit_chr /^orlit_specials | '\\' any/
+}
+
+def orlit_item
+ [orlit_chr]
+| [orlit_chr orlit_dash orlit_chr]
+
+def orlit
+ [orlit_item*]
+
+# Regex scanner
+lex regex
+{
+ token orlit_open /'['/
+ token orlit_neg_open /'[^'/
+ token regex_dot /'.'/
+ token regex_star /'*'/
+ token regex_close /'/'/
+
+ rl regex_specials /[\[\.\*\/\\]/
+ token regex_chr /(^regex_specials)* | '\\' any/
+}
+
+def regex_rep
+ [regex_star]
+| []
+
+def regex_base
+ [regex_chr]
+| [regex_dot]
+| [orlit_open orlit orlit_close]
+| [orlit_neg_open orlit orlit_close]
+
+def regex_item
+ [regex_base regex_rep]
+
+def regex_body
+ [regex_item*]
+
+rl s_string /"'" ([^'\\\n] | '\\' any )* "'"/
+rl d_string /'"' ([^"\\\n] | '\\' any )* '"'/
+
+# Root scanner
+lex start
+{
+ token ident /[a-zA-Z_]+/
+ token number /[0-9]+/
+ token string /s_string | d_string/
+
+ literal '+', '-', '*', ';', '/'
+ token slash /'/'/
+ token semi /';'/
+
+ ignore wp /[ \t\n]+/
+}
+
+def factor
+ [ident]
+| [number]
+| [string]
+| ['/' regex_body regex_close]
+
+def term
+ [term '*' factor]
+| [term '/' factor]
+| [factor]
+
+def expr
+ [expr '+' term]
+| [expr '-' term]
+| [term]
+
+def statement
+ [expr ';']
+
+def start
+ [statement*]
+ {
+ for I:orlit_item in lhs {
+ if match I [orlit_chr] {
+ print( I, '\n' )
+ }
+ }
+ print_xml( lhs )
+ }
diff --git a/test/ruby/Makefile b/test/ruby/Makefile
new file mode 100644
index 00000000..22647b42
--- /dev/null
+++ b/test/ruby/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2008 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/ruby/ruby.lm b/test/ruby/ruby.lm
new file mode 100644
index 00000000..b9505515
--- /dev/null
+++ b/test/ruby/ruby.lm
@@ -0,0 +1,627 @@
+#
+# Grammar
+#
+
+# The items in this scanner may have newline in front of them.
+lex start
+{
+ # Reserved Words.
+ literal '__LINE__', '__FILE__', '__ENCODING__', 'BEGIN', 'END', 'alias',
+ 'and', 'begin', 'break', 'case', 'class', 'def', 'defined?', 'do',
+ 'else', 'elsif', 'end', 'ensure', 'false', 'for', 'in', 'module',
+ 'next', 'nil', 'not', 'or', 'redo', 'rescue', 'retry', 'return',
+ 'self', 'super', 'then', 'true', 'undef', 'when', 'yield', 'if',
+ 'unless', 'while', 'until'
+
+ token tNTH_REF /'$' [0-9]+/
+ token tBACK_REF /'$' ( '&' | '`' | '\'' | '+' ) /
+
+ literal ')', ',', ']'
+ literal '{', '}', ':'
+ literal '.', '::'
+ literal '->'
+
+ # Unary operators.
+ literal '!', '~'
+ token tUPLUS /'+'/
+ token tUMINUS /'-'/
+
+ token tLBRACK /'['/
+ token tLPAREN /'('/
+ token tSTAR /'*'/
+ token tBAR /'|'/
+ token tAMPER /'&'/
+
+ token tIDENTIFIER /[a-z][a-zA-Z_]*/
+ token tFID /[a-z][a-zA-Z_]* ('!'|'?')/
+ token tCONSTANT /[A-Z][a-zA-Z_]*/
+ token tGVAR /'$' [a-zA-Z_]+/
+ token tIVAR /'@' [a-zA-Z_]+/
+ token tCVAR /'@@' [a-zA-Z_]+/
+
+ token tINTEGER /[0-9]+/
+ token tFLOAT /[0-9]+ '.' [0-9]+/
+
+ token tDSTRING_BEG /'"'/
+ token tSSTRING_BEG /'\''/
+ token tXSTRING_BEG /'`'/
+
+ ignore /[ \t\n]+/
+ ignore comment /'#' [^\n]* '\n'/
+}
+
+# These items cannot appear at the beginning of a line (except maybe the first).
+lex expr_cont_ops
+{
+ ignore /[\t ]+/
+
+ literal '+', '-', '*', '**', '/', '%', '^'
+ literal '|', '&', '||', '&&'
+ literal '[', '('
+ literal '='
+ literal '<<', '>>'
+ literal '?'
+ literal '<=>'
+ literal '=>'
+ literal '[]', '[]='
+ literal '=~', '!~'
+ literal '<', '>', '>=', '<='
+ literal '!=', '==', '==='
+ literal '..', '...'
+}
+lex terms
+{
+ ignore /[\t ]+/
+ ignore /'#' [^\n]*/
+ literal ';'
+ literal '\n'
+}
+
+
+lex dstring_contents
+{
+ token dstring_contents /[^"]+/
+ token tDSTRING_END /'"'/
+}
+
+lex sstring_contents
+{
+ token sstring_contents /[^']+/
+ token tSSTRING_END /'\''/
+}
+
+lex xstring_contents
+{
+ token xstring_contents /[^`]+/
+ token tXSTRING_END /'`'/
+}
+
+def ruby
+ [compstmt]
+
+def compstmt
+ [stmts opt_terms]
+
+def bodystmt
+ [compstmt opt_rescue opt_else opt_ensure]
+
+def opt_rescue
+# ['rescue' exc_list exc_var then compstmt opt_rescue] |
+ []
+
+def then
+ [term]
+| ['then']
+| [term 'then']
+
+def do
+ [term]
+| ['do']
+
+def if_tail
+ [opt_else]
+| ['elsif' expr_value then compstmt if_tail]
+
+def opt_else
+ ['else' compstmt]
+| []
+
+def opt_ensure
+ ['ensure' compstmt]
+| []
+
+def stmts
+ [stmts terms stmt]
+| [stmt]
+| []
+
+def opt_terms
+ [terms]
+| []
+
+def terms
+ [term]
+| [terms ';']
+
+def term
+ [';']
+| ['\n']
+
+def stmt
+ ['alias' fitem fitem]
+| ['undef' undef_list]
+| [stmt 'if' expr_value]
+| [stmt 'unless' expr_value]
+| [stmt 'while' expr_value]
+| [stmt 'until' expr_value]
+| [stmt 'rescue' stmt]
+| ['BEGIN' '{' compstmt '}']
+| ['END' '{' compstmt '}']
+| [lhs '=' mrhs]
+| [mlhs '=' arg_value]
+| [mlhs '=' mrhs]
+| [expr]
+
+def mlhs
+ [mlhs_basic]
+| [tLPAREN mlhs ')']
+
+def mlhs_basic
+ [mlhs_head]
+
+def mlhs_head
+ [mlhs_item ',' mlhs_head]
+| [mlhs_item]
+
+def mlhs_item
+ [variable]
+| ['*' mlhs_item]
+| ['*']
+| [primary_value '[' opt_call_args ']']
+| [primary_value '.' tIDENTIFIER]
+| [primary_value '.' tCONSTANT]
+| [primary_value '::' tIDENTIFIER]
+| [primary_value '::' tCONSTANT]
+| ['::' tCONSTANT]
+| [backref]
+| [tLPAREN mlhs ')']
+
+def lhs
+ [variable]
+| [primary_value '[' opt_call_args ']']
+| [primary_value '.' tIDENTIFIER]
+| [primary_value '.' tCONSTANT]
+| [primary_value '::' tIDENTIFIER]
+| [primary_value '::' tCONSTANT]
+| ['::' tCONSTANT]
+| [backref]
+
+def mrhs
+ [args ',' arg_value]
+| [args ',' '*' arg_value]
+| ['*' arg_value]
+
+def expr
+ [expr 'and' expr]
+| [expr 'or' expr]
+| ['not' expr]
+| [arg]
+
+def expr_value
+ [expr]
+
+def opt_brace_block
+ [brace_block]
+| []
+
+def block_param_def
+ [tBAR opt_bv_decl tBAR]
+| [tBAR block_param opt_bv_decl tBAR]
+
+def block_param
+ [block_arg_list]
+| []
+
+def block_arg_list
+ [block_arg_list ',' block_arg_item]
+| [block_arg_item]
+
+def block_arg_item
+ [f_norm_arg]
+| [f_rest_arg]
+| [f_block_arg]
+| ['(' f_args ')']
+
+def opt_bv_decl
+ [';' bv_decls]
+| []
+
+def bv_decls
+ [bvar]
+| [bv_decls ',' bvar]
+
+def bvar
+ [tIDENTIFIER]
+
+def opt_block_param
+ [block_param_def]
+| []
+
+def operation
+ [tIDENTIFIER]
+| [tCONSTANT]
+| [tFID]
+
+def operation2
+ [tIDENTIFIER]
+| [tCONSTANT]
+| [tFID]
+| [op]
+
+def operation3
+ [tIDENTIFIER]
+| [tFID]
+| [op]
+
+def op
+ ['|'] | ['^'] | ['&'] | ['<=>'] | ['=='] | ['==='] | ['=~'] | ['!~'] |
+ ['>'] | ['>='] | ['<'] | ['<='] | ['!='] | ['<<'] | ['>>'] | ['+'] |
+ ['-'] | ['*'] | ['/'] | ['%'] | ['**'] | ['!'] | ['~'] | ['[]'] | ['[]='] |
+ [tXSTRING_BEG]
+
+def opt_call_args
+ [call_args]
+| []
+
+def call_args
+ [args opt_block_arg]
+| [assocs opt_block_arg]
+| [args ',' assocs opt_block_arg]
+| [block_arg]
+
+def args
+ [arg_value]
+| ['*' arg_value]
+| [args ',' arg_value]
+| [args ',' '*' arg_value]
+
+def arg_value
+ [arg]
+
+def opt_block_arg
+ [',' block_arg]
+| []
+
+def block_arg
+ [tAMPER arg_value]
+
+def arg
+ ['defined?' arg]
+| [arg_assign]
+
+def arg_assign
+ [lhs '=' arg_assign]
+| [lhs '=' arg_assign 'rescue' arg]
+| [arg_sel]
+
+def arg_sel
+ [arg_dot '?' arg_sel ':' arg_sel]
+| [arg_dot]
+
+def arg_dot
+ [arg_logical '..' arg_dot]
+| [arg_logical '...' arg_dot]
+| [arg_logical]
+
+def arg_logical
+ [arg_eq '&&' arg_logical]
+| [arg_eq '||' arg_logical]
+| [arg_eq]
+
+def arg_eq
+ [arg_cmp '<=>' arg_eq]
+| [arg_cmp '==' arg_eq]
+| [arg_cmp '===' arg_eq]
+| [arg_cmp '!=' arg_eq]
+| [arg_cmp '=~' arg_eq]
+| [arg_cmp '!~' arg_eq]
+| [arg_cmp]
+
+def arg_cmp
+ [arg_bitor '>=' arg_cmp]
+| [arg_bitor '<=' arg_cmp]
+| [arg_bitor '>' arg_cmp]
+| [arg_bitor '<' arg_cmp]
+| [arg_bitor]
+
+def arg_bitor
+ [arg_bitand '|' arg_bitor]
+| [arg_bitand '^' arg_bitor]
+| [arg_bitand]
+
+def arg_bitand
+ [arg_shift '&' arg_bitand]
+| [arg_shift]
+
+def arg_shift
+ [arg_add '<<' arg_shift]
+| [arg_add '>>' arg_shift]
+| [arg_add]
+
+def arg_add
+ [arg_mult '+' arg_add]
+| [arg_mult '-' arg_add]
+| [arg_mult]
+
+def arg_mult
+ [arg_pow '*' arg_mult]
+| [arg_pow '/' arg_mult]
+| [arg_pow '%' arg_mult]
+| [arg_pow]
+
+def arg_pow
+ [arg_unary '**' arg_pow]
+| [arg_unary]
+
+def arg_unary
+ ['!' primary]
+| ['~' primary]
+| [tUPLUS primary]
+| [tUMINUS primary]
+| [primary]
+
+def primary_value
+ [primary]
+
+def primary
+ [pliteral]
+| [strings]
+| [xstring]
+#| [regexp]
+#| [words]
+#| [qwords]
+| [var_ref]
+| [backref]
+| [tFID]
+| ['begin' bodystmt 'end']
+| [tLPAREN compstmt ')']
+| [primary_value '::' tCONSTANT]
+| ['::' tCONSTANT]
+| [tLBRACK aref_args ']']
+| ['{' assoc_list '}']
+| ['defined?' '(' expr ')']
+| [operation brace_block]
+| [method_call]
+| [method_call brace_block]
+| ['->' lambda]
+| ['if' expr_value then compstmt if_tail 'end']
+| ['unless' expr_value then compstmt opt_else 'end']
+| ['while' expr_value do compstmt 'end']
+| ['until' expr_value do compstmt 'end']
+#| ['case' expr_value opt_terms case_body 'end']
+#| ['case' opt_terms case_body 'end']
+| ['for' for_var 'in' expr_value do compstmt 'end']
+| ['class' cpath superclass bodystmt 'end']
+| ['class' '<<' expr term bodystmt 'end']
+| ['module' cpath bodystmt 'end']
+| ['def' fname f_arglist bodystmt 'end']
+| ['def' singleton dot_or_colon fname f_arglist bodystmt 'end']
+| ['break']
+| ['next']
+| ['redo']
+| ['retry']
+
+def for_var
+ [lhs]
+| [mlhs]
+
+def lambda
+ [f_larglist lambda_body]
+
+def f_larglist
+ ['(' f_args opt_bv_decl ')']
+| [f_args opt_bv_decl]
+
+def lambda_body
+ ['{' compstmt '}']
+| ['do' compstmt 'end']
+
+def assoc_list
+ [assocs trailer]
+| []
+
+def assocs
+ [assocs ',' assoc]
+| [assoc]
+
+def assoc
+ [arg_value '=>' arg_value]
+| [':' arg_value]
+
+def singleton
+ [var_ref]
+| ['(' expr ')']
+
+def dot_or_colon
+ ['.']
+| ['::']
+
+def aref_args
+ [args trailer]
+| [args ',' assocs trailer]
+| [assocs trailer]
+| []
+
+def trailer
+ [',']
+| []
+
+def brace_block
+ ['{' opt_block_param compstmt '}']
+| ['do' opt_block_param compstmt 'end']
+
+def f_arglist
+ ['(' f_args ')']
+| [f_args term]
+
+def f_args
+ [f_arg_list]
+| []
+
+def f_arg_list
+ [f_arg_list ',' f_arg_item]
+| [f_arg_item]
+
+def f_arg_item
+ [f_norm_arg]
+| [f_opt]
+| [f_rest_arg]
+| [f_block_arg]
+| ['(' f_args ')']
+
+def f_opt
+ [tIDENTIFIER '=' arg_value]
+
+def f_rest_arg
+ ['*' tIDENTIFIER]
+| ['*']
+
+def f_block_arg
+ [tAMPER tIDENTIFIER]
+
+def f_norm_arg
+ [tIDENTIFIER]
+
+def backref
+ [tNTH_REF] | [tBACK_REF]
+
+def superclass
+ [term]
+| ['<' expr_value term]
+
+def cpath
+ ['::' cname]
+| [cname]
+| [primary_value '::' cname]
+
+def fname
+ [tIDENTIFIER]
+| [tCONSTANT]
+| [tFID]
+| [op]
+| [reswords]
+
+def reswords
+ ['__LINE__'] | ['__FILE__'] | ['__ENCODING__'] | ['BEGIN'] | ['END'] |
+ ['alias'] | ['and'] | ['begin'] | ['break'] | ['case'] | ['class'] |
+ ['def'] | ['defined?'] | ['do'] | ['else'] | ['elsif'] | ['end'] |
+ ['ensure'] | ['false'] | ['for'] | ['in'] | ['module'] |
+ ['next'] | ['nil'] | ['not'] | ['or'] | ['redo'] | ['rescue'] |
+ ['retry'] | ['return'] | ['self'] | ['super'] | ['then'] | ['true'] |
+ ['undef'] | ['when'] | ['yield'] | ['if'] | ['unless'] | ['while'] |
+ ['until']
+
+def cname
+ [tIDENTIFIER]
+| [tCONSTANT]
+
+def pliteral
+ [numeric]
+| [symbol]
+#| [dsym]
+
+def strings
+ [string]
+
+def string
+# [tCHAR]
+ [string1]
+| [string string1]
+
+def string1
+ [tSSTRING_BEG sstring_contents? tSSTRING_END]
+| [tDSTRING_BEG dstring_contents? tDSTRING_END]
+
+def xstring
+ [tXSTRING_BEG xstring_contents? tXSTRING_END]
+
+def numeric
+ [tINTEGER]
+| [tFLOAT]
+
+def symbol
+ [':' sym]
+
+def sym
+ [fname]
+| [tIVAR]
+| [tGVAR]
+| [tCVAR]
+
+def fitem
+ [fsym]
+#| [dsym]
+
+def undef_list
+ [fitem]
+| [undef_list ',' fitem]
+
+def fsym
+ [fname]
+| [symbol]
+
+#def dsym
+# [':' xstring_contents tDSTRING_END]
+
+def var_ref
+ [variable]
+
+def variable
+ [tIDENTIFIER] | [tIVAR] | [tGVAR] | [tCONSTANT] | [tCVAR] | ['nil'] |
+ ['self'] | ['true'] | ['false'] | ['__FILE__'] | ['__LINE__'] |
+ ['__ENCODING__']
+
+
+# Required whitespace, but newline is not allowed.
+token ws_no_nl
+ /[ \t]+[^ \t\n]/
+ {
+ send( make_token( typeid ws_no_nl, pull(stdin, match_length-1) ) )
+ }
+
+def method_call
+ [operation paren_args]
+| [operation ws_no_nl call_args]
+| [primary_value '.' operation2 opt_paren_args]
+| [primary_value '.' operation2 ws_no_nl call_args]
+| [primary_value '::' operation2 opt_paren_args]
+| [primary_value '::' operation2 ws_no_nl call_args]
+| [primary_value '.' paren_args]
+| [primary_value '::' paren_args]
+| ['super' paren_args]
+| ['super' ws_no_nl call_args]
+| ['super']
+| ['yield' paren_args]
+| ['yield' ws_no_nl call_args]
+| ['yield']
+| ['return' call_args]
+| ['return']
+| [primary_value '[' opt_call_args ']']
+
+def opt_paren_args
+ [paren_args]
+| []
+
+def paren_args
+ ['(' opt_call_args ')']
+
+#
+# Grammar finished
+#
+
+ruby R = parse ruby(stdin)
+
+print_xml( R )
+
+#for T: primary in R
+# print_xml( T, '\n\n' )
diff --git a/test/rubyhere.in b/test/rubyhere.in
new file mode 100644
index 00000000..a23dfead
--- /dev/null
+++ b/test/rubyhere.in
@@ -0,0 +1,8 @@
+print( <<DATA1, more, <<DATA2, 99 )
+"&^#(@ almost
+!arbitrary text!
+DATA1
+hello
+world
+DATA2
+. error here
diff --git a/test/rubyhere.lm b/test/rubyhere.lm
new file mode 100644
index 00000000..33fff4b7
--- /dev/null
+++ b/test/rubyhere.lm
@@ -0,0 +1,89 @@
+rl ident_pattern /[a-zA-Z_][a-zA-Z_0-9]*/
+rl number_pattern /[0-9]+/
+
+lex start
+{
+ ignore /[ \t\n]+/
+ token id /ident_pattern/
+ token number /number_pattern/
+ literal '<<', '*', ',', '(', ')'
+}
+
+global str HereId
+
+token rest_of_line /[^\n]*'\n'/
+
+lex here_start
+{
+ ignore /[ \t\n]+/
+ token here_id
+ here_data HereData
+ /ident_pattern/
+ {
+ # Take the text of the here_id from the input stream.
+ HereId = pull( stdin, match_length )
+
+ # Get the data up to the rest of the line.
+ rest_of_line ROL = parse_stop rest_of_line( stdin )
+
+ # Parse the heredoc data.
+ here_data HereData = parse_stop here_data( stdin )
+
+ # Push the rest-of-line data back to the input stream.
+ push( stdin, ROL )
+
+ # Send the here_id token. Attach the heredoc data as an attribute.
+ send( make_token( typeid here_id, HereId, HereData ) )
+ }
+}
+
+lex here_data
+{
+ token here_close_id
+ / ident_pattern '\n' /
+ {
+ if match_text == HereId + '\n' {
+ send( make_token(
+ typeid here_close_id,
+ pull(stdin, match_length) ) )
+ }
+ else
+ send( make_token( typeid here_line, pull(stdin, match_length) ) )
+ }
+
+ token here_line
+ / [^\n]* '\n' /
+}
+
+def here_data
+ [here_line* here_close_id]
+
+def heredoc
+ ['<<' here_id]
+
+def primary
+ [id]
+| [number]
+| [heredoc]
+
+def arglist
+ [primary arglist_more*]
+
+def arglist_more
+ [',' primary]
+
+def call
+ [id '(' arglist? ')']
+
+def statement
+ [primary]
+| [call]
+
+token foobar /any*/
+
+def start
+ [statement*]
+| [foobar]
+
+start S = parse start( stdin )
+print_xml(S)
diff --git a/test/string.in b/test/string.in
new file mode 100644
index 00000000..8aef536f
--- /dev/null
+++ b/test/string.in
@@ -0,0 +1,2 @@
+a + "%{{"; 1 * 2;
+
diff --git a/test/string.lm b/test/string.lm
new file mode 100644
index 00000000..7da88215
--- /dev/null
+++ b/test/string.lm
@@ -0,0 +1,54 @@
+lex string
+{
+ token str_escape /'\\' any/
+ token str_chr /[^\\"]+/
+}
+
+def str_item
+ [str_escape]
+| [str_chr]
+
+def string
+ ['"' str_item* '"']
+
+lex start
+{
+ token ident /[a-zA-Z_]+/
+ token number /[0-9]+/
+
+ literal '+', '*', ';', '"', '\'', '(', ')'
+ literal '+=', '-=', '*='
+
+ ignore wp /[ \t\n]+/
+}
+
+def expr
+ [expr '+' term]
+| [term]
+
+def term
+ [term '*' primary]
+| [primary]
+
+def primary
+ [number]
+| [ident]
+| [string]
+| ['(' expr ')']
+
+def expr_list
+ [expr_list expr ';']
+| []
+
+def start
+ [expr_list]
+ {
+ if match lhs
+ ~a + "%{{"; 1 * 2;
+ {
+ print( 'yes\n' )
+ }
+ }
+
+start S = parse start(stdin)
+print_xml( S )
diff --git a/test/superid.in b/test/superid.in
new file mode 100644
index 00000000..4002630f
--- /dev/null
+++ b/test/superid.in
@@ -0,0 +1 @@
+!a b b a;
diff --git a/test/superid.lm b/test/superid.lm
new file mode 100644
index 00000000..3a3eef59
--- /dev/null
+++ b/test/superid.lm
@@ -0,0 +1,59 @@
+
+lex start
+{
+ literal '!', 'a', ';\n'
+
+ token id /'a'|'b'/
+ {
+ #tok.id = trans_id_to
+ }
+
+ token super_id //
+ token foo //
+
+ ignore ws / [ \n\t]+ /
+}
+
+global int trans_id_to
+
+def e1
+ []
+ {
+ print( 'old_id = ', trans_id_to, '\n' )
+ #trans_id_to = type_id foo
+ print( 'new_id = ', trans_id_to, '\n' )
+ }
+
+def item1
+ str msg
+
+ [ e1 '!' 'a' super_id super_id 'a']
+ {
+ lhs.msg = 'this is item1\n'
+ }
+
+def e2
+ []
+ {
+ print( 'old_id = ', trans_id_to, '\n' )
+ #trans_id_to = type_id super_id
+ print( 'new_id = ', trans_id_to, '\n' )
+ }
+
+def item2
+ str msg
+
+ [ e2 '!' 'a' super_id super_id 'a']
+ {
+ lhs.msg = 'this is item2\n'
+ }
+
+
+def start
+ [item1 ';\n']
+| [item2 ';\n']
+ {
+ match lhs [Item2:item2 ';\n']
+ print( Item2.msg )
+ }
+
diff --git a/test/tags.in b/test/tags.in
new file mode 100644
index 00000000..939f9b48
--- /dev/null
+++ b/test/tags.in
@@ -0,0 +1 @@
+a b b a;
diff --git a/test/tags.lm b/test/tags.lm
new file mode 100644
index 00000000..9e13ddd8
--- /dev/null
+++ b/test/tags.lm
@@ -0,0 +1,82 @@
+# Open and close tags by rewriting to generic close tags. Won't work if
+# interested in unclosed tags because a token can start as not close_id, but
+# then become a close id during the course of parsing.
+
+#
+# Regular Definitions
+#
+rl rl_ws /[ \t\n\r\v]+/
+rl rl_id /[a-zA-Z_][a-zA-Z0-9_]*/
+
+#
+# Tokens
+#
+
+# Any single character can be a literal
+lex start
+{
+ literal '!\n', ';\n'
+
+ # Ignore whitespace.
+ ignore /rl_ws/
+
+ # Open and close id
+ token id /rl_id/
+}
+
+#
+# Global Data
+#
+
+def tag_stack
+ [id tag_stack]
+| []
+
+global tag_stack TS = construct tag_stack ["sentinal"]
+
+#
+# Productions
+#
+
+def open_tag
+ [id]
+ {
+ match lhs [Id:id]
+ match TS [Top:id Rest:tag_stack]
+ if Id.data == Top.data {
+ reject
+ } else {
+ TS = construct tag_stack [Id TS]
+ }
+ }
+
+def close_tag
+ [id]
+ {
+ match lhs [Id:id]
+ match TS [Top:id Rest:tag_stack]
+
+ if Id.data == Top.data {
+ TS = construct tag_stack [Rest]
+ } else {
+ reject
+ }
+ }
+
+def tag
+ [open_tag tag* close_tag]
+
+def start
+ [tag* ';\n']
+ {
+ print_xml( TS )
+ print_xml( lhs )
+ print( 'got structure\n' )
+ }
+
+| [id* ';\n']
+ {
+ print_xml( TS )
+ print_xml( lhs )
+ print( 'failed\n' )
+ }
diff --git a/test/til.in b/test/til.in
new file mode 100644
index 00000000..19b7bb19
--- /dev/null
+++ b/test/til.in
@@ -0,0 +1,14 @@
+
+var a;
+a := 1;
+
+head:
+
+a := a + 1;
+c := d;
+
+if a = 10 then
+ goto head;
+end
+
+hi := there;
diff --git a/test/til.lm b/test/til.lm
new file mode 100644
index 00000000..346fc50f
--- /dev/null
+++ b/test/til.lm
@@ -0,0 +1,124 @@
+lex start
+{
+ literal 'var', 'if', 'then', 'else', 'while', 'do', 'for', 'read', 'write',
+ 'end', 'to', 'goto'
+ literal ':=', '!=', ';', '+', '-', '*', '/', '=', '(', ')', ':'
+
+ ignore /'//' [^\n]* '\n'/
+ ignore /[\n\t ]+/
+ token id /[a-zA-Z_]+/
+ token integernumber /[0-9]+/
+ token stringlit /'"' [^"]* '"'/
+}
+
+def program
+ [statement*]
+
+def statement
+ [declaration]
+| [assignment_statement]
+| [if_statement]
+| [while_statement]
+| [do_statement]
+| [for_statement]
+| [read_statement]
+| [write_statement]
+| [labelled_statement]
+| [goto_statement]
+
+def declaration
+ ['var' id ';']
+
+def assignment_statement
+ [id ':=' expression ';']
+
+def if_statement
+ ['if' expression 'then' statement* opt_else_statement 'end']
+
+def opt_else_statement
+ ['else' statement*]
+| []
+
+def while_statement
+ ['while' expression 'do' statement* 'end']
+
+def do_statement
+ ['do' statement* 'while' expression ';']
+
+def for_statement
+ ['for' id ':=' expression 'to' expression 'do' statement* 'end']
+
+def read_statement
+ ['read' id ';']
+
+def write_statement
+ ['write' expression ';']
+
+def expression
+ [term]
+| [expression eqop term]
+
+def eqop ['='] | ['!=']
+
+def term
+ [factor]
+| [term addop factor]
+
+def addop ['+'] | ['-']
+
+def factor
+ [primary]
+| [factor mulop primary]
+
+def mulop ['*'] | ['/']
+
+def primary
+ [id]
+| [lit]
+| ['(' expression ')']
+
+def lit
+ [integernumber]
+| [stringlit]
+
+def labelled_statement
+ [id ':' statement]
+
+def goto_statement
+ ['goto' id ';']
+
+program P = parse program(stdin)
+
+for S:statement* in P
+{
+ if match S [L0: id ':'
+ First: statement
+ Rest: statement*]
+ {
+ for Check: statement* in Rest
+ {
+ if match Check
+ ['if' E: expression 'then'
+ 'goto' Targ: id ';'
+ 'end'
+ T: statement*] &&
+ Targ == L0
+ {
+ # This truncates Rest
+ Check = construct statement* []
+
+ # Replace the labeled statement through to the goto with a
+ # do ... while.
+ S = construct statement*
+ ['do'
+ First
+ Rest
+ 'while' E ';'
+ T]
+ break
+ }
+ }
+ }
+}
+
+print(P, '\n')
diff --git a/test/travs1.in b/test/travs1.in
new file mode 100644
index 00000000..e6cf5020
--- /dev/null
+++ b/test/travs1.in
@@ -0,0 +1 @@
+1 | 2 3
diff --git a/test/travs1.lm b/test/travs1.lm
new file mode 100644
index 00000000..e5820d8f
--- /dev/null
+++ b/test/travs1.lm
@@ -0,0 +1,144 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start [expression]
+
+def expression [term expression_op*]
+
+def expression_op
+ ['|' term]
+| ['&' term]
+| ['-' term]
+| ['--' term]
+
+def term [factor_rep term_rest]
+
+# This list is done manually to get shortest match.
+def term_rest
+ []
+| [term_op term_rest]
+
+def term_op
+ [factor_rep]
+| ['.' factor_rep]
+| [':>' factor_rep]
+| [':>>' factor_rep]
+| ['<:' factor_rep]
+
+def factor_rep
+ [factor_neg factor_rep_op*]
+
+def factor_rep_op
+ ['*']
+| ['**']
+| ['?']
+| ['+']
+| ['{' factor_rep_num '}']
+| ['{' ',' factor_rep_num '}']
+| ['{' factor_rep_num ',' '}']
+| ['{' factor_rep_num ',' factor_rep_num '}']
+
+def factor_rep_num [uint]
+
+def factor_neg
+ ['!' factor_neg]
+| ['^' factor_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+
+start S = parse start(stdin)
+
+#
+# Top-Down, Left-Right
+#
+
+int do_topdown_leftright( ref any T )
+{
+ for C:any in child(T) {
+ yield C
+ do_topdown_leftright( C )
+ }
+}
+
+iter topdown_leftright( ref any T )
+{
+ do_topdown_leftright( T )
+}
+
+#
+# Bottom-Up, Left-Right
+#
+
+int do_bottomup_leftright( ref any T )
+{
+ for C:any in child(T) {
+ do_bottomup_leftright( C )
+ yield C
+ }
+}
+
+iter bottomup_leftright( ref any T )
+{
+ do_bottomup_leftright( T )
+}
+
+
+#
+# Top-Down, Right-Left
+#
+
+int do_topdown_rightleft( ref any T )
+{
+ for C:any in rev_child(T) {
+ yield C
+ do_topdown_rightleft( C )
+ }
+}
+
+iter topdown_rightleft( ref any T )
+{
+ do_topdown_rightleft( T )
+}
+
+#
+# Bottom-Up, Right-Left
+#
+
+int do_bottomup_rightleft( ref any T )
+{
+ for C:any in rev_child(T) {
+ do_bottomup_rightleft( C )
+ yield C
+ }
+}
+
+iter bottomup_rightleft( ref any T )
+{
+ do_bottomup_rightleft( T )
+}
+
+#
+# Testing
+#
+
+for C: expression in bottomup_leftright( S )
+{
+ print_xml( C )
+}
diff --git a/test/travs2.in b/test/travs2.in
new file mode 100644
index 00000000..81d197f3
--- /dev/null
+++ b/test/travs2.in
@@ -0,0 +1 @@
+1 2 | 3 4
diff --git a/test/travs2.lm b/test/travs2.lm
new file mode 100644
index 00000000..06facf3b
--- /dev/null
+++ b/test/travs2.lm
@@ -0,0 +1,93 @@
+lex start
+{
+ ignore /[\t\n ]+/
+ literal '^', '|', '-', ',', ':', '!', '?', '.'
+ literal '(', ')', '{', '}', '*', '&', '+'
+
+ literal '--', ':>', ':>>', '<:', '->', '**'
+
+ token word /[a-zA-Z_][a-zA-Z0-9_]*/
+ token uint /[0-9]+/
+}
+
+
+def start
+ [expression]
+
+def expression [term expression_op*]
+
+def expression_op
+ ['|' term]
+| ['&' term]
+| ['-' term]
+| ['--' term]
+
+def term [factor_rep term_rest]
+
+# This list is done manually to get shortest match.
+def term_rest
+ []
+| [term_op term_rest]
+
+def term_op
+ [factor_rep]
+| ['.' factor_rep]
+| [':>' factor_rep]
+| [':>>' factor_rep]
+| ['<:' factor_rep]
+
+def factor_rep
+ [factor_neg factor_rep_op*]
+
+def factor_rep_op
+ ['*']
+| ['**']
+| ['?']
+| ['+']
+| ['{' factor_rep_num '}']
+| ['{' ',' factor_rep_num '}']
+| ['{' factor_rep_num ',' '}']
+| ['{' factor_rep_num ',' factor_rep_num '}']
+
+def factor_rep_num [uint]
+
+def factor_neg
+ ['!' factor_neg]
+| ['^' factor_neg]
+| [factor]
+
+def factor
+ [alphabet_num]
+| [word]
+| ['(' expression ')']
+
+def alphabet_num
+ [uint]
+
+start S = parse start(stdin)
+
+#
+# Fixed point iteration
+#
+
+bool this_iter_modified()
+ { return true }
+
+iter fixed_point( ref any T )
+{
+ bool modified = true
+ while modified {
+ modified = false
+ for S:any in T {
+ yield S
+
+ if this_iter_modified() {
+ modified = true
+ break
+ }
+ }
+ }
+}
+
+print( S, '\n' )
+
diff --git a/test/xml/Makefile b/test/xml/Makefile
new file mode 100644
index 00000000..25a0b6d3
--- /dev/null
+++ b/test/xml/Makefile
@@ -0,0 +1,34 @@
+#
+# Copyright 2002-2006 Adrian Thurston <thurston@cs.queensu.ca>
+#
+
+# This file is part of Ragel.
+#
+# Ragel is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# Ragel is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ragel; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+
+SRC = $(wildcard *.lm)
+BIN = $(SRC:%.lm=%.bin)
+COLM = ../../colm/colm
+
+all: $(BIN)
+
+$(BIN): $(COLM)
+
+$(BIN): %.bin: %.lm
+ $(COLM) $<
+
+clean:
+ rm -f *.cpp *.bin
diff --git a/test/xml/xml.in b/test/xml/xml.in
new file mode 100644
index 00000000..3c024f80
--- /dev/null
+++ b/test/xml/xml.in
@@ -0,0 +1,3962 @@
+<ragel version="5.24" filename="../colm/lmscan.rl" lang="C">
+<ragel_def name="rlscan">
+ <alphtype>char</alphtype>
+ <machine>
+ <action_list length="166">
+ <action id="0" name="inc_nl" line="217" col="16"><text>
+ lastnl = p;
+ column = 0;
+ line++;
+ </text></action>
+ <action id="1" name="initts" line="1" col="1"><init_tokstart></init_tokstart></action>
+ <action id="2" name="tokstart" line="1" col="1"><set_tokstart></set_tokstart></action>
+ <action id="3" name="tokend" line="1" col="1"><set_tokend>1</set_tokend></action>
+ <action id="4" name="last1" line="238" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\0' ); </text></sub_action></action>
+ <action id="5" name="last2" line="239" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\a' ); </text></sub_action></action>
+ <action id="6" name="last3" line="240" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\b' ); </text></sub_action></action>
+ <action id="7" name="last4" line="241" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\t' ); </text></sub_action></action>
+ <action id="8" name="last5" line="242" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\n' ); </text></sub_action></action>
+ <action id="9" name="last6" line="243" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\v' ); </text></sub_action></action>
+ <action id="10" name="last7" line="244" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\f' ); </text></sub_action></action>
+ <action id="11" name="last8" line="245" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, '\r' ); </text></sub_action></action>
+ <action id="12" name="last9" line="246" col="13"><set_tokend>1</set_tokend><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="13" name="last10" line="247" col="15"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, tokstart+1, tokend ); </text></sub_action></action>
+ <action id="14" name="last11" line="250" col="10"><set_tokend>1</set_tokend><sub_action><text> token( RE_Dash, 0, 0 ); </text></sub_action></action>
+ <action id="15" name="last12" line="253" col="10"><set_tokend>1</set_tokend><sub_action><text> token( RE_SqClose ); </text><ret></ret><text> </text></sub_action></action>
+ <action id="16" name="last13" line="255" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ scan_error() &lt;&lt; "unterminated OR literal" &lt;&lt; endl;
+ </text></sub_action></action>
+ <action id="17" name="last14" line="260" col="12"><set_tokend>1</set_tokend><sub_action><text> token( RE_Char, tokstart, tokend ); </text></sub_action></action>
+ <action id="18" name="store15" line="265" col="13"><set_act>15</set_act></action>
+ <action id="19" name="store16" line="266" col="12"><set_act>16</set_act></action>
+ <action id="20" name="store17" line="267" col="12"><set_act>17</set_act></action>
+ <action id="21" name="store18" line="268" col="13"><set_act>18</set_act></action>
+ <action id="22" name="store19" line="269" col="11"><set_act>19</set_act></action>
+ <action id="23" name="store20" line="270" col="13"><set_act>20</set_act></action>
+ <action id="24" name="store21" line="273" col="12"><set_act>21</set_act></action>
+ <action id="25" name="last24" line="281" col="7"><set_tokend>1</set_tokend><sub_action><text> token( TK_Literal, tokstart, tokend ); </text></sub_action></action>
+ <action id="26" name="last26" line="284" col="11"><set_tokend>1</set_tokend><sub_action><text> token( RE_SqOpenNeg ); </text><call>166</call><text> </text></sub_action></action>
+ <action id="27" name="last27" line="286" col="10"><set_tokend>1</set_tokend><sub_action><text> token( '/'); </text><ret></ret><text> </text></sub_action></action>
+ <action id="28" name="last28" line="289" col="20"><set_tokend>1</set_tokend><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="29" name="last29" line="291" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_ColonEquals ); </text></sub_action></action>
+ <action id="30" name="last30" line="294" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartToState ); </text></sub_action></action>
+ <action id="31" name="last31" line="295" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllToState ); </text></sub_action></action>
+ <action id="32" name="last32" line="296" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_FinalToState ); </text></sub_action></action>
+ <action id="33" name="last33" line="297" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotStartToState ); </text></sub_action></action>
+ <action id="34" name="last34" line="298" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotFinalToState ); </text></sub_action></action>
+ <action id="35" name="last35" line="299" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_MiddleToState ); </text></sub_action></action>
+ <action id="36" name="last36" line="302" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartFromState ); </text></sub_action></action>
+ <action id="37" name="last37" line="303" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllFromState ); </text></sub_action></action>
+ <action id="38" name="last38" line="304" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_FinalFromState ); </text></sub_action></action>
+ <action id="39" name="last39" line="305" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotStartFromState ); </text></sub_action></action>
+ <action id="40" name="last40" line="306" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotFinalFromState ); </text></sub_action></action>
+ <action id="41" name="last41" line="307" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_MiddleFromState ); </text></sub_action></action>
+ <action id="42" name="last42" line="310" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartEOF ); </text></sub_action></action>
+ <action id="43" name="last43" line="311" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllEOF ); </text></sub_action></action>
+ <action id="44" name="last44" line="312" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_FinalEOF ); </text></sub_action></action>
+ <action id="45" name="last45" line="313" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotStartEOF ); </text></sub_action></action>
+ <action id="46" name="last46" line="314" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotFinalEOF ); </text></sub_action></action>
+ <action id="47" name="last47" line="315" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_MiddleEOF ); </text></sub_action></action>
+ <action id="48" name="last48" line="318" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartGblError ); </text></sub_action></action>
+ <action id="49" name="last49" line="319" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllGblError ); </text></sub_action></action>
+ <action id="50" name="last50" line="320" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_FinalGblError ); </text></sub_action></action>
+ <action id="51" name="last51" line="321" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotStartGblError ); </text></sub_action></action>
+ <action id="52" name="last52" line="322" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotFinalGblError ); </text></sub_action></action>
+ <action id="53" name="last53" line="323" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_MiddleGblError ); </text></sub_action></action>
+ <action id="54" name="last54" line="326" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartLocalError ); </text></sub_action></action>
+ <action id="55" name="last55" line="327" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllLocalError ); </text></sub_action></action>
+ <action id="56" name="last56" line="328" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_FinalLocalError ); </text></sub_action></action>
+ <action id="57" name="last57" line="329" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotStartLocalError ); </text></sub_action></action>
+ <action id="58" name="last58" line="330" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotFinalLocalError ); </text></sub_action></action>
+ <action id="59" name="last59" line="331" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_MiddleLocalError ); </text></sub_action></action>
+ <action id="60" name="last61" line="337" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StartCond ); </text></sub_action></action>
+ <action id="61" name="last62" line="338" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AllCond ); </text></sub_action></action>
+ <action id="62" name="last63" line="339" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_LeavingCond ); </text></sub_action></action>
+ <action id="63" name="last64" line="341" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_DotDot ); </text></sub_action></action>
+ <action id="64" name="last65" line="342" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_StarStar ); </text></sub_action></action>
+ <action id="65" name="last66" line="343" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_DashDash ); </text></sub_action></action>
+ <action id="66" name="last67" line="344" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_Arrow ); </text></sub_action></action>
+ <action id="67" name="last69" line="347" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_ColonGtGt ); </text></sub_action></action>
+ <action id="68" name="last70" line="348" col="12"><set_tokend>1</set_tokend><sub_action><text> token( TK_LtColon ); </text></sub_action></action>
+ <action id="69" name="last72" line="354" col="9"><set_tokend>1</set_tokend><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="70" name="last73" line="357" col="6"><set_tokend>1</set_tokend></action>
+ <action id="71" name="last74" line="359" col="10"><set_tokend>1</set_tokend><sub_action><text> token( *tokstart ); </text></sub_action></action>
+ <action id="72" name="next21" line="273" col="12"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Word, tokstart, tokend ); </text></sub_action></action>
+ <action id="73" name="next22" line="276" col="13"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_UInt, tokstart, tokend ); </text></sub_action></action>
+ <action id="74" name="next23" line="277" col="17"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Hex, tokstart, tokend ); </text></sub_action></action>
+ <action id="75" name="next24" line="281" col="7"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Literal, tokstart, tokend ); </text></sub_action></action>
+ <action id="76" name="next25" line="283" col="10"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( RE_SqOpen ); </text><call>166</call><text> </text></sub_action></action>
+ <action id="77" name="next60" line="334" col="11"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Middle ); </text></sub_action></action>
+ <action id="78" name="next68" line="346" col="12"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_ColonGt ); </text></sub_action></action>
+ <action id="79" name="next71" line="351" col="15"><set_tokend>0</set_tokend><hold></hold><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="80" name="next74" line="359" col="10"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( *tokstart ); </text></sub_action></action>
+ <action id="81" name="lag22" line="276" col="13"><exec><get_tokend></get_tokend></exec><sub_action><text> token( TK_UInt, tokstart, tokend ); </text></sub_action></action>
+ <action id="82" name="switch" line="1" col="1"><lm_switch>
+ <sub_action id="15"><exec><get_tokend></get_tokend></exec><text> token( KW_When ); </text></sub_action>
+ <sub_action id="16"><exec><get_tokend></get_tokend></exec><text> token( KW_Eof ); </text></sub_action>
+ <sub_action id="17"><exec><get_tokend></get_tokend></exec><text> token( KW_Err ); </text></sub_action>
+ <sub_action id="18"><exec><get_tokend></get_tokend></exec><text> token( KW_Lerr ); </text></sub_action>
+ <sub_action id="19"><exec><get_tokend></get_tokend></exec><text> token( KW_To ); </text></sub_action>
+ <sub_action id="20"><exec><get_tokend></get_tokend></exec><text> token( KW_From ); </text></sub_action>
+ <sub_action id="21"><exec><get_tokend></get_tokend></exec><text> token( TK_Word, tokstart, tokend ); </text></sub_action>
+ </lm_switch></action>
+ <action id="83" name="last75" line="363" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\a' ); </text></sub_action></action>
+ <action id="84" name="last76" line="364" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\b' ); </text></sub_action></action>
+ <action id="85" name="last77" line="365" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\t' ); </text></sub_action></action>
+ <action id="86" name="last78" line="366" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\n' ); </text></sub_action></action>
+ <action id="87" name="last79" line="367" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\v' ); </text></sub_action></action>
+ <action id="88" name="last80" line="368" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\f' ); </text></sub_action></action>
+ <action id="89" name="last81" line="369" col="12"><set_tokend>1</set_tokend><sub_action><text> litBuf.append( '\r' ); </text></sub_action></action>
+ <action id="90" name="last82" line="371" col="12"><set_tokend>1</set_tokend><sub_action><text>
+ litBuf.append( tokstart[1] );
+ </text></sub_action></action>
+ <action id="91" name="last83" line="374" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ if ( litBuf.length &gt; 0 ) {
+ token( TK_LitPat, litBuf.data, litBuf.data+litBuf.length );
+ litBuf.clear();
+ }
+ token( '"' );
+ </text><ret></ret><text>
+ </text></sub_action></action>
+ <action id="92" name="last84" line="382" col="9"><set_tokend>1</set_tokend><sub_action><text>
+ if ( litBuf.length &gt; 0 ) {
+ litBuf.append( '\n' );
+ token( TK_LitPat, litBuf.data, litBuf.data+litBuf.length );
+ litBuf.clear();
+ }
+ token( '"' );
+ </text><ret></ret><text>
+ </text></sub_action></action>
+ <action id="93" name="last85" line="391" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ if ( litBuf.length &gt; 0 ) {
+ token( TK_LitPat, litBuf.data, litBuf.data+litBuf.length );
+ litBuf.clear();
+ }
+ token( '[' );
+ </text><call>10</call><text>
+ </text></sub_action></action>
+ <action id="94" name="last86" line="399" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ litBuf.append( *tokstart );
+ </text></sub_action></action>
+ <action id="95" name="store87" line="406" col="12"><set_act>87</set_act></action>
+ <action id="96" name="store88" line="407" col="15"><set_act>88</set_act></action>
+ <action id="97" name="store89" line="408" col="17"><set_act>89</set_act></action>
+ <action id="98" name="store90" line="409" col="15"><set_act>90</set_act></action>
+ <action id="99" name="store91" line="410" col="13"><set_act>91</set_act></action>
+ <action id="100" name="store92" line="411" col="14"><set_act>92</set_act></action>
+ <action id="101" name="store93" line="412" col="18"><set_act>93</set_act></action>
+ <action id="102" name="store94" line="413" col="14"><set_act>94</set_act></action>
+ <action id="103" name="store95" line="414" col="16"><set_act>95</set_act></action>
+ <action id="104" name="store96" line="415" col="16"><set_act>96</set_act></action>
+ <action id="105" name="store97" line="416" col="13"><set_act>97</set_act></action>
+ <action id="106" name="store98" line="417" col="15"><set_act>98</set_act></action>
+ <action id="107" name="store99" line="418" col="16"><set_act>99</set_act></action>
+ <action id="108" name="store101" line="420" col="14"><set_act>101</set_act></action>
+ <action id="109" name="store102" line="421" col="12"><set_act>102</set_act></action>
+ <action id="110" name="store103" line="422" col="12"><set_act>103</set_act></action>
+ <action id="111" name="store104" line="424" col="11"><set_act>104</set_act></action>
+ <action id="112" name="store105" line="425" col="12"><set_act>105</set_act></action>
+ <action id="113" name="store106" line="426" col="15"><set_act>106</set_act></action>
+ <action id="114" name="store107" line="427" col="12"><set_act>107</set_act></action>
+ <action id="115" name="store108" line="428" col="16"><set_act>108</set_act></action>
+ <action id="116" name="store109" line="429" col="18"><set_act>109</set_act></action>
+ <action id="117" name="store110" line="430" col="12"><set_act>110</set_act></action>
+ <action id="118" name="store112" line="432" col="16"><set_act>112</set_act></action>
+ <action id="119" name="store113" line="433" col="17"><set_act>113</set_act></action>
+ <action id="120" name="store114" line="434" col="11"><set_act>114</set_act></action>
+ <action id="121" name="store115" line="435" col="13"><set_act>115</set_act></action>
+ <action id="122" name="store116" line="436" col="15"><set_act>116</set_act></action>
+ <action id="123" name="store117" line="437" col="14"><set_act>117</set_act></action>
+ <action id="124" name="store118" line="438" col="13"><set_act>118</set_act></action>
+ <action id="125" name="store119" line="439" col="18"><set_act>119</set_act></action>
+ <action id="126" name="store120" line="440" col="13"><set_act>120</set_act></action>
+ <action id="127" name="store121" line="441" col="14"><set_act>121</set_act></action>
+ <action id="128" name="store122" line="442" col="12"><set_act>122</set_act></action>
+ <action id="129" name="store123" line="443" col="13"><set_act>123</set_act></action>
+ <action id="130" name="store124" line="444" col="13"><set_act>124</set_act></action>
+ <action id="131" name="store125" line="445" col="13"><set_act>125</set_act></action>
+ <action id="132" name="store126" line="446" col="18"><set_act>126</set_act></action>
+ <action id="133" name="store127" line="447" col="13"><set_act>127</set_act></action>
+ <action id="134" name="store128" line="448" col="11"><set_act>128</set_act></action>
+ <action id="135" name="store129" line="449" col="18"><set_act>129</set_act></action>
+ <action id="136" name="store130" line="450" col="16"><set_act>130</set_act></action>
+ <action id="137" name="store131" line="453" col="12"><set_act>131</set_act></action>
+ <action id="138" name="last133" line="457" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ token( '/' );
+ </text><call>168</call><text>
+ </text></sub_action></action>
+ <action id="139" name="last134" line="462" col="20"><set_tokend>1</set_tokend><sub_action><text>
+ token( '"' );
+ token( TK_LitPat, tokstart+1, tokend );
+ token( '"' );
+ </text></sub_action></action>
+ <action id="140" name="last135" line="468" col="16"><set_tokend>1</set_tokend><sub_action><text>
+ token( TK_Literal, tokstart, tokend );
+ </text></sub_action></action>
+ <action id="141" name="last136" line="472" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ token( '"' );
+ litBuf.clear();
+ </text><call>203</call><text>
+ </text></sub_action></action>
+ <action id="142" name="last137" line="477" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ token( '[' );
+ </text><call>10</call><text>
+ </text></sub_action></action>
+ <action id="143" name="last138" line="482" col="10"><set_tokend>1</set_tokend><sub_action><text>
+ token( ']' );
+ if ( top &gt; 0 )
+ </text><ret></ret><text>
+ </text></sub_action></action>
+ <action id="144" name="last139" line="489" col="20"><set_tokend>1</set_tokend><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="145" name="last140" line="491" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_ColonEquals ); </text></sub_action></action>
+ <action id="146" name="last141" line="492" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_DoubleArrow ); </text></sub_action></action>
+ <action id="147" name="last142" line="493" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_DoubleEquals ); </text></sub_action></action>
+ <action id="148" name="last143" line="494" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_NotEquals ); </text></sub_action></action>
+ <action id="149" name="last144" line="495" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_DoubleColon ); </text></sub_action></action>
+ <action id="150" name="last145" line="496" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_LessEquals ); </text></sub_action></action>
+ <action id="151" name="last146" line="497" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_GreaterEquals ); </text></sub_action></action>
+ <action id="152" name="last147" line="498" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_LeftArrow ); </text></sub_action></action>
+ <action id="153" name="last148" line="499" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_AmpAmp ); </text></sub_action></action>
+ <action id="154" name="last149" line="500" col="11"><set_tokend>1</set_tokend><sub_action><text> token( TK_BarBar ); </text></sub_action></action>
+ <action id="155" name="last150" line="502" col="43"><set_tokend>1</set_tokend><sub_action><text> token( *tokstart ); </text></sub_action></action>
+ <action id="156" name="last152" line="509" col="9"><set_tokend>1</set_tokend><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="157" name="last153" line="512" col="6"><set_tokend>1</set_tokend></action>
+ <action id="158" name="last154" line="514" col="10"><set_tokend>1</set_tokend><sub_action><text> token( *tokstart ); </text></sub_action></action>
+ <action id="159" name="next100" line="419" col="12"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( KW_Pri ); </text></sub_action></action>
+ <action id="160" name="next111" line="431" col="14"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( KW_Print ); </text></sub_action></action>
+ <action id="161" name="next131" line="453" col="12"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Word, tokstart, tokend ); </text></sub_action></action>
+ <action id="162" name="next132" line="455" col="13"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( TK_Number, tokstart, tokend ); </text></sub_action></action>
+ <action id="163" name="next151" line="506" col="15"><set_tokend>0</set_tokend><hold></hold><sub_action><text> updateCol(); </text></sub_action></action>
+ <action id="164" name="next154" line="514" col="10"><set_tokend>0</set_tokend><hold></hold><sub_action><text> token( *tokstart ); </text></sub_action></action>
+ <action id="165" name="switch" line="1" col="1"><lm_switch>
+ <sub_action id="87"><exec><get_tokend></get_tokend></exec><text> token( KW_Lex ); </text></sub_action>
+ <sub_action id="88"><exec><get_tokend></get_tokend></exec><text> token( KW_Action ); </text></sub_action>
+ <sub_action id="89"><exec><get_tokend></get_tokend></exec><text> token( KW_AlphType ); </text></sub_action>
+ <sub_action id="90"><exec><get_tokend></get_tokend></exec><text> token( KW_Commit ); </text></sub_action>
+ <sub_action id="91"><exec><get_tokend></get_tokend></exec><text> token( KW_Undo ); </text></sub_action>
+ <sub_action id="92"><exec><get_tokend></get_tokend></exec><text> token( KW_Final ); </text></sub_action>
+ <sub_action id="93"><exec><get_tokend></get_tokend></exec><text> token( KW_Translate ); </text></sub_action>
+ <sub_action id="94"><exec><get_tokend></get_tokend></exec><text> token( KW_Token ); </text></sub_action>
+ <sub_action id="95"><exec><get_tokend></get_tokend></exec><text> token( KW_Literal ); </text></sub_action>
+ <sub_action id="96"><exec><get_tokend></get_tokend></exec><text> token( KW_NonTerm ); </text></sub_action>
+ <sub_action id="97"><exec><get_tokend></get_tokend></exec><text> token( KW_Uses ); </text></sub_action>
+ <sub_action id="98"><exec><get_tokend></get_tokend></exec><text> token( KW_Parser ); </text></sub_action>
+ <sub_action id="99"><exec><get_tokend></get_tokend></exec><text> token( KW_Include ); </text></sub_action>
+ <sub_action id="101"><exec><get_tokend></get_tokend></exec><text> token( KW_Write ); </text></sub_action>
+ <sub_action id="102"><exec><get_tokend></get_tokend></exec><text> token( KW_Nfa ); </text></sub_action>
+ <sub_action id="103"><exec><get_tokend></get_tokend></exec><text> token( KW_Pda ); </text></sub_action>
+ <sub_action id="104"><exec><get_tokend></get_tokend></exec><text> token( KW_Rl ); </text></sub_action>
+ <sub_action id="105"><exec><get_tokend></get_tokend></exec><text> token( KW_Cfl ); </text></sub_action>
+ <sub_action id="106"><exec><get_tokend></get_tokend></exec><text> token( KW_Ignore ); </text></sub_action>
+ <sub_action id="107"><exec><get_tokend></get_tokend></exec><text> token( KW_End ); </text></sub_action>
+ <sub_action id="108"><exec><get_tokend></get_tokend></exec><text> token( KW_Pattern ); </text></sub_action>
+ <sub_action id="109"><exec><get_tokend></get_tokend></exec><text> token( KW_Construct ); </text></sub_action>
+ <sub_action id="110"><exec><get_tokend></get_tokend></exec><text> token( KW_Red ); </text></sub_action>
+ <sub_action id="112"><exec><get_tokend></get_tokend></exec><text> token( KW_TypeId ); </text></sub_action>
+ <sub_action id="113"><exec><get_tokend></get_tokend></exec><text> token( KW_TypeDef ); </text></sub_action>
+ <sub_action id="114"><exec><get_tokend></get_tokend></exec><text> token( KW_If ); </text></sub_action>
+ <sub_action id="115"><exec><get_tokend></get_tokend></exec><text> token( KW_Init ); </text></sub_action>
+ <sub_action id="116"><exec><get_tokend></get_tokend></exec><text> token( KW_Reject ); </text></sub_action>
+ <sub_action id="117"><exec><get_tokend></get_tokend></exec><text> token( KW_While ); </text></sub_action>
+ <sub_action id="118"><exec><get_tokend></get_tokend></exec><text> token( KW_Else ); </text></sub_action>
+ <sub_action id="119"><exec><get_tokend></get_tokend></exec><text> token( KW_SubParser ); </text></sub_action>
+ <sub_action id="120"><exec><get_tokend></get_tokend></exec><text> token( KW_Next ); </text></sub_action>
+ <sub_action id="121"><exec><get_tokend></get_tokend></exec><text> token( KW_Match ); </text></sub_action>
+ <sub_action id="122"><exec><get_tokend></get_tokend></exec><text> token( KW_For ); </text></sub_action>
+ <sub_action id="123"><exec><get_tokend></get_tokend></exec><text> token( KW_Iter ); </text></sub_action>
+ <sub_action id="124"><exec><get_tokend></get_tokend></exec><text> token( KW_Find ); </text></sub_action>
+ <sub_action id="125"><exec><get_tokend></get_tokend></exec><text> token( KW_Root ); </text></sub_action>
+ <sub_action id="126"><exec><get_tokend></get_tokend></exec><text> token( KW_PrintXML ); </text></sub_action>
+ <sub_action id="127"><exec><get_tokend></get_tokend></exec><text> token( KW_Then ); </text></sub_action>
+ <sub_action id="128"><exec><get_tokend></get_tokend></exec><text> token( KW_Do ); </text></sub_action>
+ <sub_action id="129"><exec><get_tokend></get_tokend></exec><text> token( KW_Namespace ); </text></sub_action>
+ <sub_action id="130"><exec><get_tokend></get_tokend></exec><text> token( KW_Scanner ); </text></sub_action>
+ <sub_action id="131"><exec><get_tokend></get_tokend></exec><text> token( TK_Word, tokstart, tokend ); </text></sub_action>
+ </lm_switch></action>
+ </action_list>
+ <action_table_list length="166">
+ <action_table id="0" length="2">0 144</action_table>
+ <action_table id="1" length="1">0</action_table>
+ <action_table id="2" length="1">140</action_table>
+ <action_table id="3" length="2">0 139</action_table>
+ <action_table id="4" length="2">0 28</action_table>
+ <action_table id="5" length="1">81</action_table>
+ <action_table id="6" length="1">1</action_table>
+ <action_table id="7" length="1">2</action_table>
+ <action_table id="8" length="1">158</action_table>
+ <action_table id="9" length="1">157</action_table>
+ <action_table id="10" length="2">0 156</action_table>
+ <action_table id="11" length="1">141</action_table>
+ <action_table id="12" length="1">3</action_table>
+ <action_table id="13" length="1">155</action_table>
+ <action_table id="14" length="1">138</action_table>
+ <action_table id="15" length="2">3 137</action_table>
+ <action_table id="16" length="1">142</action_table>
+ <action_table id="17" length="1">143</action_table>
+ <action_table id="18" length="1">163</action_table>
+ <action_table id="19" length="1">164</action_table>
+ <action_table id="20" length="1">148</action_table>
+ <action_table id="21" length="1">153</action_table>
+ <action_table id="22" length="1">162</action_table>
+ <action_table id="23" length="1">149</action_table>
+ <action_table id="24" length="1">145</action_table>
+ <action_table id="25" length="1">152</action_table>
+ <action_table id="26" length="1">150</action_table>
+ <action_table id="27" length="1">147</action_table>
+ <action_table id="28" length="1">146</action_table>
+ <action_table id="29" length="1">151</action_table>
+ <action_table id="30" length="1">165</action_table>
+ <action_table id="31" length="1">161</action_table>
+ <action_table id="32" length="2">3 96</action_table>
+ <action_table id="33" length="2">3 97</action_table>
+ <action_table id="34" length="2">3 112</action_table>
+ <action_table id="35" length="2">3 98</action_table>
+ <action_table id="36" length="2">3 116</action_table>
+ <action_table id="37" length="2">3 134</action_table>
+ <action_table id="38" length="2">3 124</action_table>
+ <action_table id="39" length="2">3 114</action_table>
+ <action_table id="40" length="2">3 130</action_table>
+ <action_table id="41" length="2">3 100</action_table>
+ <action_table id="42" length="2">3 128</action_table>
+ <action_table id="43" length="2">3 120</action_table>
+ <action_table id="44" length="2">3 113</action_table>
+ <action_table id="45" length="2">3 107</action_table>
+ <action_table id="46" length="2">3 121</action_table>
+ <action_table id="47" length="2">3 129</action_table>
+ <action_table id="48" length="2">3 95</action_table>
+ <action_table id="49" length="2">3 103</action_table>
+ <action_table id="50" length="2">3 127</action_table>
+ <action_table id="51" length="2">3 135</action_table>
+ <action_table id="52" length="2">3 126</action_table>
+ <action_table id="53" length="2">3 109</action_table>
+ <action_table id="54" length="2">3 104</action_table>
+ <action_table id="55" length="2">3 106</action_table>
+ <action_table id="56" length="2">3 115</action_table>
+ <action_table id="57" length="2">3 110</action_table>
+ <action_table id="58" length="1">159</action_table>
+ <action_table id="59" length="1">160</action_table>
+ <action_table id="60" length="2">3 132</action_table>
+ <action_table id="61" length="2">3 111</action_table>
+ <action_table id="62" length="2">3 117</action_table>
+ <action_table id="63" length="2">3 122</action_table>
+ <action_table id="64" length="2">3 131</action_table>
+ <action_table id="65" length="2">3 136</action_table>
+ <action_table id="66" length="2">3 125</action_table>
+ <action_table id="67" length="2">3 133</action_table>
+ <action_table id="68" length="2">3 102</action_table>
+ <action_table id="69" length="2">3 101</action_table>
+ <action_table id="70" length="2">3 119</action_table>
+ <action_table id="71" length="2">3 118</action_table>
+ <action_table id="72" length="2">3 99</action_table>
+ <action_table id="73" length="2">3 105</action_table>
+ <action_table id="74" length="2">3 123</action_table>
+ <action_table id="75" length="2">3 108</action_table>
+ <action_table id="76" length="1">154</action_table>
+ <action_table id="77" length="1">17</action_table>
+ <action_table id="78" length="1">16</action_table>
+ <action_table id="79" length="1">14</action_table>
+ <action_table id="80" length="1">15</action_table>
+ <action_table id="81" length="1">13</action_table>
+ <action_table id="82" length="1">12</action_table>
+ <action_table id="83" length="1">4</action_table>
+ <action_table id="84" length="1">5</action_table>
+ <action_table id="85" length="1">6</action_table>
+ <action_table id="86" length="1">10</action_table>
+ <action_table id="87" length="1">8</action_table>
+ <action_table id="88" length="1">11</action_table>
+ <action_table id="89" length="1">7</action_table>
+ <action_table id="90" length="1">9</action_table>
+ <action_table id="91" length="1">71</action_table>
+ <action_table id="92" length="1">70</action_table>
+ <action_table id="93" length="2">0 69</action_table>
+ <action_table id="94" length="1">27</action_table>
+ <action_table id="95" length="2">3 24</action_table>
+ <action_table id="96" length="1">79</action_table>
+ <action_table id="97" length="1">75</action_table>
+ <action_table id="98" length="1">25</action_table>
+ <action_table id="99" length="1">80</action_table>
+ <action_table id="100" length="1">49</action_table>
+ <action_table id="101" length="1">37</action_table>
+ <action_table id="102" length="1">43</action_table>
+ <action_table id="103" length="1">61</action_table>
+ <action_table id="104" length="1">55</action_table>
+ <action_table id="105" length="1">31</action_table>
+ <action_table id="106" length="1">50</action_table>
+ <action_table id="107" length="1">38</action_table>
+ <action_table id="108" length="1">44</action_table>
+ <action_table id="109" length="1">62</action_table>
+ <action_table id="110" length="1">56</action_table>
+ <action_table id="111" length="1">32</action_table>
+ <action_table id="112" length="1">64</action_table>
+ <action_table id="113" length="1">65</action_table>
+ <action_table id="114" length="1">66</action_table>
+ <action_table id="115" length="1">63</action_table>
+ <action_table id="116" length="1">73</action_table>
+ <action_table id="117" length="1">74</action_table>
+ <action_table id="118" length="1">29</action_table>
+ <action_table id="119" length="1">78</action_table>
+ <action_table id="120" length="1">67</action_table>
+ <action_table id="121" length="1">51</action_table>
+ <action_table id="122" length="1">39</action_table>
+ <action_table id="123" length="1">45</action_table>
+ <action_table id="124" length="1">68</action_table>
+ <action_table id="125" length="1">57</action_table>
+ <action_table id="126" length="1">33</action_table>
+ <action_table id="127" length="1">77</action_table>
+ <action_table id="128" length="1">53</action_table>
+ <action_table id="129" length="1">41</action_table>
+ <action_table id="130" length="1">47</action_table>
+ <action_table id="131" length="1">59</action_table>
+ <action_table id="132" length="1">35</action_table>
+ <action_table id="133" length="1">48</action_table>
+ <action_table id="134" length="1">36</action_table>
+ <action_table id="135" length="1">42</action_table>
+ <action_table id="136" length="1">60</action_table>
+ <action_table id="137" length="1">54</action_table>
+ <action_table id="138" length="1">30</action_table>
+ <action_table id="139" length="1">52</action_table>
+ <action_table id="140" length="1">40</action_table>
+ <action_table id="141" length="1">46</action_table>
+ <action_table id="142" length="1">58</action_table>
+ <action_table id="143" length="1">34</action_table>
+ <action_table id="144" length="1">82</action_table>
+ <action_table id="145" length="1">76</action_table>
+ <action_table id="146" length="1">26</action_table>
+ <action_table id="147" length="1">72</action_table>
+ <action_table id="148" length="2">3 19</action_table>
+ <action_table id="149" length="2">3 20</action_table>
+ <action_table id="150" length="2">3 23</action_table>
+ <action_table id="151" length="2">3 21</action_table>
+ <action_table id="152" length="2">3 22</action_table>
+ <action_table id="153" length="2">3 18</action_table>
+ <action_table id="154" length="1">94</action_table>
+ <action_table id="155" length="2">0 92</action_table>
+ <action_table id="156" length="1">91</action_table>
+ <action_table id="157" length="1">93</action_table>
+ <action_table id="158" length="1">90</action_table>
+ <action_table id="159" length="1">83</action_table>
+ <action_table id="160" length="1">84</action_table>
+ <action_table id="161" length="1">88</action_table>
+ <action_table id="162" length="1">86</action_table>
+ <action_table id="163" length="1">89</action_table>
+ <action_table id="164" length="1">85</action_table>
+ <action_table id="165" length="1">87</action_table>
+ </action_table_list>
+ <start_state>10</start_state>
+ <entry_points>
+ <entry name="or_literal">166</entry>
+ <entry name="regular_type">168</entry>
+ <entry name="literal_pattern">203</entry>
+ <entry name="main">10</entry>
+ </entry_points>
+ <state_list length="205">
+ <state id="0">
+ <trans_list length="3">
+ <t>-128 9 0 x</t>
+ <t>10 10 10 0</t>
+ <t>11 127 0 x</t>
+ </trans_list>
+ </state>
+
+ <state id="1">
+ <trans_list length="7">
+ <t>-128 9 1 x</t>
+ <t>10 10 1 1</t>
+ <t>11 38 1 x</t>
+ <t>39 39 10 2</t>
+ <t>40 91 1 x</t>
+ <t>92 92 2 x</t>
+ <t>93 127 1 x</t>
+ </trans_list>
+ </state>
+
+ <state id="2">
+ <trans_list length="3">
+ <t>-128 9 1 x</t>
+ <t>10 10 1 1</t>
+ <t>11 127 1 x</t>
+ </trans_list>
+ </state>
+
+ <state id="3">
+ <trans_list length="3">
+ <t>-128 9 3 x</t>
+ <t>10 10 10 3</t>
+ <t>11 127 3 x</t>
+ </trans_list>
+ </state>
+
+ <state id="4">
+ <trans_list length="7">
+ <t>-128 9 4 x</t>
+ <t>10 10 4 1</t>
+ <t>11 33 4 x</t>
+ <t>34 34 171 x</t>
+ <t>35 91 4 x</t>
+ <t>92 92 5 x</t>
+ <t>93 127 4 x</t>
+ </trans_list>
+ </state>
+
+ <state id="5">
+ <trans_list length="3">
+ <t>-128 9 4 x</t>
+ <t>10 10 4 1</t>
+ <t>11 127 4 x</t>
+ </trans_list>
+ </state>
+
+ <state id="6">
+ <trans_list length="3">
+ <t>-128 9 6 x</t>
+ <t>10 10 168 4</t>
+ <t>11 127 6 x</t>
+ </trans_list>
+ </state>
+
+ <state id="7">
+ <trans_list length="7">
+ <t>-128 9 7 x</t>
+ <t>10 10 7 1</t>
+ <t>11 38 7 x</t>
+ <t>39 39 171 x</t>
+ <t>40 91 7 x</t>
+ <t>92 92 8 x</t>
+ <t>93 127 7 x</t>
+ </trans_list>
+ </state>
+
+ <state id="8">
+ <trans_list length="3">
+ <t>-128 9 7 x</t>
+ <t>10 10 7 1</t>
+ <t>11 127 7 x</t>
+ </trans_list>
+ </state>
+
+ <state id="9">
+ <trans_list length="7">
+ <t>-128 47 168 5</t>
+ <t>48 57 181 x</t>
+ <t>58 64 168 5</t>
+ <t>65 70 181 x</t>
+ <t>71 96 168 5</t>
+ <t>97 102 181 x</t>
+ <t>103 127 168 5</t>
+ </trans_list>
+ </state>
+
+ <state id="10" final="t">
+ <state_actions>6 7 x</state_actions>
+ <trans_list length="61">
+ <t>-128 -1 10 8</t>
+ <t>0 0 10 9</t>
+ <t>1 8 10 8</t>
+ <t>9 9 11 x</t>
+ <t>10 10 10 10</t>
+ <t>11 12 10 8</t>
+ <t>13 13 11 x</t>
+ <t>14 31 10 8</t>
+ <t>32 32 11 x</t>
+ <t>33 33 12 x</t>
+ <t>34 34 10 11</t>
+ <t>35 35 13 12</t>
+ <t>36 37 10 8</t>
+ <t>38 38 14 x</t>
+ <t>39 39 15 12</t>
+ <t>40 43 10 13</t>
+ <t>44 44 10 8</t>
+ <t>45 45 10 13</t>
+ <t>46 46 10 8</t>
+ <t>47 47 10 14</t>
+ <t>48 57 16 x</t>
+ <t>58 58 17 x</t>
+ <t>59 59 10 8</t>
+ <t>60 60 18 x</t>
+ <t>61 61 19 x</t>
+ <t>62 62 20 x</t>
+ <t>63 64 10 8</t>
+ <t>65 90 21 15</t>
+ <t>91 91 10 16</t>
+ <t>92 92 10 8</t>
+ <t>93 93 10 17</t>
+ <t>94 94 10 8</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 8</t>
+ <t>97 97 22 x</t>
+ <t>98 98 21 15</t>
+ <t>99 99 33 x</t>
+ <t>100 100 45 x</t>
+ <t>101 101 46 x</t>
+ <t>102 102 50 x</t>
+ <t>103 104 21 15</t>
+ <t>105 105 55 x</t>
+ <t>106 107 21 15</t>
+ <t>108 108 68 x</t>
+ <t>109 109 75 x</t>
+ <t>110 110 79 x</t>
+ <t>111 111 21 15</t>
+ <t>112 112 95 x</t>
+ <t>113 113 21 15</t>
+ <t>114 114 112 x</t>
+ <t>115 115 119 x</t>
+ <t>116 116 132 x</t>
+ <t>117 117 152 x</t>
+ <t>118 118 21 15</t>
+ <t>119 119 157 x</t>
+ <t>120 122 21 15</t>
+ <t>123 123 10 8</t>
+ <t>124 124 164 x</t>
+ <t>125 125 10 8</t>
+ <t>126 126 165 12</t>
+ <t>127 127 10 8</t>
+ </trans_list>
+ </state>
+
+ <state id="11" final="t">
+ <trans_list length="7">
+ <t>-128 8 10 18</t>
+ <t>9 9 11 x</t>
+ <t>10 12 10 18</t>
+ <t>13 13 11 x</t>
+ <t>14 31 10 18</t>
+ <t>32 32 11 x</t>
+ <t>33 127 10 18</t>
+ </trans_list>
+ </state>
+
+ <state id="12" final="t">
+ <trans_list length="3">
+ <t>-128 60 10 19</t>
+ <t>61 61 10 20</t>
+ <t>62 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="13" final="t">
+ <trans_list length="3">
+ <t>-128 9 0 x</t>
+ <t>10 10 10 0</t>
+ <t>11 127 0 x</t>
+ </trans_list>
+ </state>
+
+ <state id="14" final="t">
+ <trans_list length="3">
+ <t>-128 37 10 19</t>
+ <t>38 38 10 21</t>
+ <t>39 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="15" final="t">
+ <trans_list length="7">
+ <t>-128 9 1 x</t>
+ <t>10 10 1 1</t>
+ <t>11 38 1 x</t>
+ <t>39 39 10 2</t>
+ <t>40 91 1 x</t>
+ <t>92 92 2 x</t>
+ <t>93 127 1 x</t>
+ </trans_list>
+ </state>
+
+ <state id="16" final="t">
+ <trans_list length="3">
+ <t>-128 47 10 22</t>
+ <t>48 57 16 x</t>
+ <t>58 127 10 22</t>
+ </trans_list>
+ </state>
+
+ <state id="17" final="t">
+ <trans_list length="5">
+ <t>-128 57 10 19</t>
+ <t>58 58 10 23</t>
+ <t>59 60 10 19</t>
+ <t>61 61 10 24</t>
+ <t>62 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="18" final="t">
+ <trans_list length="5">
+ <t>-128 44 10 19</t>
+ <t>45 45 10 25</t>
+ <t>46 60 10 19</t>
+ <t>61 61 10 26</t>
+ <t>62 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="19" final="t">
+ <trans_list length="4">
+ <t>-128 60 10 19</t>
+ <t>61 61 10 27</t>
+ <t>62 62 10 28</t>
+ <t>63 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="20" final="t">
+ <trans_list length="3">
+ <t>-128 60 10 19</t>
+ <t>61 61 10 29</t>
+ <t>62 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="21" final="t">
+ <trans_list length="9">
+ <t>-128 47 10 30</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 30</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 30</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 30</t>
+ <t>97 122 21 15</t>
+ <t>123 127 10 30</t>
+ </trans_list>
+ </state>
+
+ <state id="22" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 23 x</t>
+ <t>100 107 21 15</t>
+ <t>108 108 27 x</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="23" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 24 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="24" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 25 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="25" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 110 21 15</t>
+ <t>111 111 26 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="26" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 21 32</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="27" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 111 21 15</t>
+ <t>112 112 28 x</t>
+ <t>113 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="28" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 103 21 15</t>
+ <t>104 104 29 x</t>
+ <t>105 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="29" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 30 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="30" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 120 21 15</t>
+ <t>121 121 31 x</t>
+ <t>122 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="31" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 111 21 15</t>
+ <t>112 112 32 x</t>
+ <t>113 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="32" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 33</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="33" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 101 21 15</t>
+ <t>102 102 34 x</t>
+ <t>103 110 21 15</t>
+ <t>111 111 35 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="34" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 21 34</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="35" final="t">
+ <trans_list length="12">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 108 21 15</t>
+ <t>109 109 36 x</t>
+ <t>110 110 39 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="36" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 108 21 15</t>
+ <t>109 109 37 x</t>
+ <t>110 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="37" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 38 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="38" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 35</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="39" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 40 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="40" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 41 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="41" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 42 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="42" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 116 21 15</t>
+ <t>117 117 43 x</t>
+ <t>118 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="43" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 44 x</t>
+ <t>100 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="44" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 36</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="45" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 110 21 15</t>
+ <t>111 111 21 37</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="46" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 47 x</t>
+ <t>109 109 21 15</t>
+ <t>110 110 49 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="47" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 48 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="48" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 38</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="49" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 21 39</t>
+ <t>101 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="50" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 51 x</t>
+ <t>106 110 21 15</t>
+ <t>111 111 54 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="51" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 52 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="52" final="t">
+ <trans_list length="12">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 53 x</t>
+ <t>98 99 21 15</t>
+ <t>100 100 21 40</t>
+ <t>101 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="53" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 21 41</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="54" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 21 42</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="55" final="t">
+ <trans_list length="16">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 101 21 15</t>
+ <t>102 102 21 43</t>
+ <t>103 103 56 x</t>
+ <t>104 109 21 15</t>
+ <t>110 110 60 x</t>
+ <t>111 115 21 15</t>
+ <t>116 116 66 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="56" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 57 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="57" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 110 21 15</t>
+ <t>111 111 58 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="58" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 59 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="59" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 44</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="60" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 61 x</t>
+ <t>100 104 21 15</t>
+ <t>105 105 65 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="61" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 62 x</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="62" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 116 21 15</t>
+ <t>117 117 63 x</t>
+ <t>118 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="63" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 64 x</t>
+ <t>101 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="64" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 45</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="65" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 46</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="66" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 67 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="67" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 21 47</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="68" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 69 x</t>
+ <t>102 104 21 15</t>
+ <t>105 105 70 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="69" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 119 21 15</t>
+ <t>120 120 21 48</t>
+ <t>121 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="70" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 71 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="71" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 72 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="72" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 73 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="73" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 74 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="74" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 21 49</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="75" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 76 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="76" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 77 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="77" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 78 x</t>
+ <t>100 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="78" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 103 21 15</t>
+ <t>104 104 21 50</t>
+ <t>105 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="79" final="t">
+ <trans_list length="15">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 80 x</t>
+ <t>98 100 21 15</t>
+ <t>101 101 87 x</t>
+ <t>102 102 89 x</t>
+ <t>103 110 21 15</t>
+ <t>111 111 90 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="80" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 108 21 15</t>
+ <t>109 109 81 x</t>
+ <t>110 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="81" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 82 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="82" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 83 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="83" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 111 21 15</t>
+ <t>112 112 84 x</t>
+ <t>113 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="84" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 85 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="85" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 86 x</t>
+ <t>100 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="86" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 51</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="87" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 119 21 15</t>
+ <t>120 120 88 x</t>
+ <t>121 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="88" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 52</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="89" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 21 53</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="90" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 91 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="91" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 92 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="92" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 93 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="93" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 94 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="94" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 108 21 15</t>
+ <t>109 109 21 54</t>
+ <t>110 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="95" final="t">
+ <trans_list length="14">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 96 x</t>
+ <t>98 99 21 15</t>
+ <t>100 100 104 x</t>
+ <t>101 113 21 15</t>
+ <t>114 114 105 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="96" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 97 x</t>
+ <t>115 115 21 15</t>
+ <t>116 116 100 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="97" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 98 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="98" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 99 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="99" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 21 55</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="100" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 101 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="101" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 102 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="102" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 103 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="103" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 21 56</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="104" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 21 57</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="105" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 106 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="106" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 58</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 58</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 58</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 58</t>
+ <t>97 109 21 15</t>
+ <t>110 110 107 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 58</t>
+ </trans_list>
+ </state>
+
+ <state id="107" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 108 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="108" final="t">
+ <trans_list length="9">
+ <t>-128 47 10 59</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 59</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 59</t>
+ <t>95 95 109 x</t>
+ <t>96 96 10 59</t>
+ <t>97 122 21 15</t>
+ <t>123 127 10 59</t>
+ </trans_list>
+ </state>
+
+ <state id="109" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 119 21 15</t>
+ <t>120 120 110 x</t>
+ <t>121 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="110" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 108 21 15</t>
+ <t>109 109 111 x</t>
+ <t>110 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="111" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 21 60</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="112" final="t">
+ <trans_list length="15">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 113 x</t>
+ <t>102 107 21 15</t>
+ <t>108 108 21 61</t>
+ <t>109 110 21 15</t>
+ <t>111 111 117 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="113" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 21 62</t>
+ <t>101 105 21 15</t>
+ <t>106 106 114 x</t>
+ <t>107 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="114" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 115 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="115" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 116 x</t>
+ <t>100 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="116" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 63</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="117" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 110 21 15</t>
+ <t>111 111 118 x</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="118" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 21 64</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="119" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 98 21 15</t>
+ <t>99 99 120 x</t>
+ <t>100 116 21 15</t>
+ <t>117 117 125 x</t>
+ <t>118 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="120" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 121 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="121" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 122 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="122" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 123 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="123" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 124 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="124" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 21 65</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="125" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 21 15</t>
+ <t>98 98 126 x</t>
+ <t>99 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="126" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 111 21 15</t>
+ <t>112 112 127 x</t>
+ <t>113 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="127" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 128 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="128" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 129 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="129" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 130 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="130" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 131 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="131" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 113 21 15</t>
+ <t>114 114 21 66</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="132" final="t">
+ <trans_list length="17">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 103 21 15</t>
+ <t>104 104 133 x</t>
+ <t>105 110 21 15</t>
+ <t>111 111 135 x</t>
+ <t>112 113 21 15</t>
+ <t>114 114 138 x</t>
+ <t>115 120 21 15</t>
+ <t>121 121 145 x</t>
+ <t>122 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="133" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 134 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="134" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 21 67</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="135" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 106 21 15</t>
+ <t>107 107 136 x</t>
+ <t>108 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="136" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 137 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="137" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 21 68</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="138" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 139 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="139" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 140 x</t>
+ <t>111 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="140" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 141 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="141" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 142 x</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="142" final="t">
+ <trans_list length="10">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 97 143 x</t>
+ <t>98 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="143" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 144 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="144" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 69</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="145" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 111 21 15</t>
+ <t>112 112 146 x</t>
+ <t>113 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="146" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 147 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="147" final="t">
+ <trans_list length="9">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 148 x</t>
+ <t>96 96 10 31</t>
+ <t>97 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="148" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 149 x</t>
+ <t>101 104 21 15</t>
+ <t>105 105 151 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="149" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 150 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="150" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 101 21 15</t>
+ <t>102 102 21 70</t>
+ <t>103 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="151" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 21 71</t>
+ <t>101 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="152" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 109 21 15</t>
+ <t>110 110 153 x</t>
+ <t>111 114 21 15</t>
+ <t>115 115 155 x</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="153" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 99 21 15</t>
+ <t>100 100 154 x</t>
+ <t>101 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="154" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 110 21 15</t>
+ <t>111 111 21 72</t>
+ <t>112 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="155" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 156 x</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="156" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 114 21 15</t>
+ <t>115 115 21 73</t>
+ <t>116 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="157" final="t">
+ <trans_list length="13">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 103 21 15</t>
+ <t>104 104 158 x</t>
+ <t>105 113 21 15</t>
+ <t>114 114 161 x</t>
+ <t>115 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="158" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 159 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="159" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 107 21 15</t>
+ <t>108 108 160 x</t>
+ <t>109 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="160" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 74</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="161" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 104 21 15</t>
+ <t>105 105 162 x</t>
+ <t>106 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="162" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 115 21 15</t>
+ <t>116 116 163 x</t>
+ <t>117 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="163" final="t">
+ <trans_list length="11">
+ <t>-128 47 10 31</t>
+ <t>48 57 21 15</t>
+ <t>58 64 10 31</t>
+ <t>65 90 21 15</t>
+ <t>91 94 10 31</t>
+ <t>95 95 21 15</t>
+ <t>96 96 10 31</t>
+ <t>97 100 21 15</t>
+ <t>101 101 21 75</t>
+ <t>102 122 21 15</t>
+ <t>123 127 10 31</t>
+ </trans_list>
+ </state>
+
+ <state id="164" final="t">
+ <trans_list length="3">
+ <t>-128 123 10 19</t>
+ <t>124 124 10 76</t>
+ <t>125 127 10 19</t>
+ </trans_list>
+ </state>
+
+ <state id="165" final="t">
+ <trans_list length="3">
+ <t>-128 9 3 x</t>
+ <t>10 10 10 3</t>
+ <t>11 127 3 x</t>
+ </trans_list>
+ </state>
+
+ <state id="166" final="t">
+ <state_actions>6 7 x</state_actions>
+ <trans_list length="8">
+ <t>-128 -1 166 77</t>
+ <t>0 0 166 78</t>
+ <t>1 44 166 77</t>
+ <t>45 45 166 79</t>
+ <t>46 91 166 77</t>
+ <t>92 92 167 x</t>
+ <t>93 93 166 80</t>
+ <t>94 127 166 77</t>
+ </trans_list>
+ </state>
+
+ <state id="167" final="t">
+ <trans_list length="18">
+ <t>-128 9 166 81</t>
+ <t>10 10 166 82</t>
+ <t>11 47 166 81</t>
+ <t>48 48 166 83</t>
+ <t>49 96 166 81</t>
+ <t>97 97 166 84</t>
+ <t>98 98 166 85</t>
+ <t>99 101 166 81</t>
+ <t>102 102 166 86</t>
+ <t>103 109 166 81</t>
+ <t>110 110 166 87</t>
+ <t>111 113 166 81</t>
+ <t>114 114 166 88</t>
+ <t>115 115 166 81</t>
+ <t>116 116 166 89</t>
+ <t>117 117 166 81</t>
+ <t>118 118 166 90</t>
+ <t>119 127 166 81</t>
+ </trans_list>
+ </state>
+
+ <state id="168" final="t">
+ <state_actions>6 7 x</state_actions>
+ <trans_list length="47">
+ <t>-128 -1 168 91</t>
+ <t>0 0 168 92</t>
+ <t>1 8 168 91</t>
+ <t>9 9 169 x</t>
+ <t>10 10 168 93</t>
+ <t>11 12 168 91</t>
+ <t>13 13 169 x</t>
+ <t>14 31 168 91</t>
+ <t>32 32 169 x</t>
+ <t>33 33 168 91</t>
+ <t>34 34 170 12</t>
+ <t>35 35 172 12</t>
+ <t>36 36 173 x</t>
+ <t>37 37 174 x</t>
+ <t>38 38 168 91</t>
+ <t>39 39 175 12</t>
+ <t>40 41 168 91</t>
+ <t>42 42 176 x</t>
+ <t>43 44 168 91</t>
+ <t>45 45 177 x</t>
+ <t>46 46 178 x</t>
+ <t>47 47 168 94</t>
+ <t>48 48 179 12</t>
+ <t>49 57 180 x</t>
+ <t>58 58 182 x</t>
+ <t>59 59 168 91</t>
+ <t>60 60 184 x</t>
+ <t>61 61 168 91</t>
+ <t>62 62 186 x</t>
+ <t>63 63 168 91</t>
+ <t>64 64 187 x</t>
+ <t>65 90 188 95</t>
+ <t>91 91 189 x</t>
+ <t>92 94 168 91</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 91</t>
+ <t>97 100 188 95</t>
+ <t>101 101 190 x</t>
+ <t>102 102 193 x</t>
+ <t>103 107 188 95</t>
+ <t>108 108 196 x</t>
+ <t>109 115 188 95</t>
+ <t>116 116 199 x</t>
+ <t>117 118 188 95</t>
+ <t>119 119 200 x</t>
+ <t>120 122 188 95</t>
+ <t>123 127 168 91</t>
+ </trans_list>
+ </state>
+
+ <state id="169" final="t">
+ <trans_list length="7">
+ <t>-128 8 168 96</t>
+ <t>9 9 169 x</t>
+ <t>10 12 168 96</t>
+ <t>13 13 169 x</t>
+ <t>14 31 168 96</t>
+ <t>32 32 169 x</t>
+ <t>33 127 168 96</t>
+ </trans_list>
+ </state>
+
+ <state id="170" final="t">
+ <trans_list length="7">
+ <t>-128 9 4 x</t>
+ <t>10 10 4 1</t>
+ <t>11 33 4 x</t>
+ <t>34 34 171 x</t>
+ <t>35 91 4 x</t>
+ <t>92 92 5 x</t>
+ <t>93 127 4 x</t>
+ </trans_list>
+ </state>
+
+ <state id="171" final="t">
+ <trans_list length="3">
+ <t>-128 104 168 97</t>
+ <t>105 105 168 98</t>
+ <t>106 127 168 97</t>
+ </trans_list>
+ </state>
+
+ <state id="172" final="t">
+ <trans_list length="3">
+ <t>-128 9 6 x</t>
+ <t>10 10 168 4</t>
+ <t>11 127 6 x</t>
+ </trans_list>
+ </state>
+
+ <state id="173" final="t">
+ <trans_list length="13">
+ <t>-128 32 168 99</t>
+ <t>33 33 168 100</t>
+ <t>34 41 168 99</t>
+ <t>42 42 168 101</t>
+ <t>43 46 168 99</t>
+ <t>47 47 168 102</t>
+ <t>48 62 168 99</t>
+ <t>63 63 168 103</t>
+ <t>64 93 168 99</t>
+ <t>94 94 168 104</t>
+ <t>95 125 168 99</t>
+ <t>126 126 168 105</t>
+ <t>127 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="174" final="t">
+ <trans_list length="13">
+ <t>-128 32 168 99</t>
+ <t>33 33 168 106</t>
+ <t>34 41 168 99</t>
+ <t>42 42 168 107</t>
+ <t>43 46 168 99</t>
+ <t>47 47 168 108</t>
+ <t>48 62 168 99</t>
+ <t>63 63 168 109</t>
+ <t>64 93 168 99</t>
+ <t>94 94 168 110</t>
+ <t>95 125 168 99</t>
+ <t>126 126 168 111</t>
+ <t>127 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="175" final="t">
+ <trans_list length="7">
+ <t>-128 9 7 x</t>
+ <t>10 10 7 1</t>
+ <t>11 38 7 x</t>
+ <t>39 39 171 x</t>
+ <t>40 91 7 x</t>
+ <t>92 92 8 x</t>
+ <t>93 127 7 x</t>
+ </trans_list>
+ </state>
+
+ <state id="176" final="t">
+ <trans_list length="3">
+ <t>-128 41 168 99</t>
+ <t>42 42 168 112</t>
+ <t>43 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="177" final="t">
+ <trans_list length="5">
+ <t>-128 44 168 99</t>
+ <t>45 45 168 113</t>
+ <t>46 61 168 99</t>
+ <t>62 62 168 114</t>
+ <t>63 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="178" final="t">
+ <trans_list length="3">
+ <t>-128 45 168 99</t>
+ <t>46 46 168 115</t>
+ <t>47 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="179" final="t">
+ <trans_list length="5">
+ <t>-128 47 168 116</t>
+ <t>48 57 180 x</t>
+ <t>58 119 168 116</t>
+ <t>120 120 9 x</t>
+ <t>121 127 168 116</t>
+ </trans_list>
+ </state>
+
+ <state id="180" final="t">
+ <trans_list length="3">
+ <t>-128 47 168 116</t>
+ <t>48 57 180 x</t>
+ <t>58 127 168 116</t>
+ </trans_list>
+ </state>
+
+ <state id="181" final="t">
+ <trans_list length="7">
+ <t>-128 47 168 117</t>
+ <t>48 57 181 x</t>
+ <t>58 64 168 117</t>
+ <t>65 70 181 x</t>
+ <t>71 96 168 117</t>
+ <t>97 102 181 x</t>
+ <t>103 127 168 117</t>
+ </trans_list>
+ </state>
+
+ <state id="182" final="t">
+ <trans_list length="4">
+ <t>-128 60 168 99</t>
+ <t>61 61 168 118</t>
+ <t>62 62 183 x</t>
+ <t>63 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="183" final="t">
+ <trans_list length="3">
+ <t>-128 61 168 119</t>
+ <t>62 62 168 120</t>
+ <t>63 127 168 119</t>
+ </trans_list>
+ </state>
+
+ <state id="184" final="t">
+ <trans_list length="15">
+ <t>-128 32 168 99</t>
+ <t>33 33 168 121</t>
+ <t>34 41 168 99</t>
+ <t>42 42 168 122</t>
+ <t>43 46 168 99</t>
+ <t>47 47 168 123</t>
+ <t>48 57 168 99</t>
+ <t>58 58 168 124</t>
+ <t>59 61 168 99</t>
+ <t>62 62 185 x</t>
+ <t>63 93 168 99</t>
+ <t>94 94 168 125</t>
+ <t>95 125 168 99</t>
+ <t>126 126 168 126</t>
+ <t>127 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="185" final="t">
+ <trans_list length="11">
+ <t>-128 32 168 127</t>
+ <t>33 33 168 128</t>
+ <t>34 41 168 127</t>
+ <t>42 42 168 129</t>
+ <t>43 46 168 127</t>
+ <t>47 47 168 130</t>
+ <t>48 93 168 127</t>
+ <t>94 94 168 131</t>
+ <t>95 125 168 127</t>
+ <t>126 126 168 132</t>
+ <t>127 127 168 127</t>
+ </trans_list>
+ </state>
+
+ <state id="186" final="t">
+ <trans_list length="13">
+ <t>-128 32 168 99</t>
+ <t>33 33 168 133</t>
+ <t>34 41 168 99</t>
+ <t>42 42 168 134</t>
+ <t>43 46 168 99</t>
+ <t>47 47 168 135</t>
+ <t>48 62 168 99</t>
+ <t>63 63 168 136</t>
+ <t>64 93 168 99</t>
+ <t>94 94 168 137</t>
+ <t>95 125 168 99</t>
+ <t>126 126 168 138</t>
+ <t>127 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="187" final="t">
+ <trans_list length="11">
+ <t>-128 32 168 99</t>
+ <t>33 33 168 139</t>
+ <t>34 41 168 99</t>
+ <t>42 42 168 140</t>
+ <t>43 46 168 99</t>
+ <t>47 47 168 141</t>
+ <t>48 93 168 99</t>
+ <t>94 94 168 142</t>
+ <t>95 125 168 99</t>
+ <t>126 126 168 143</t>
+ <t>127 127 168 99</t>
+ </trans_list>
+ </state>
+
+ <state id="188" final="t">
+ <trans_list length="9">
+ <t>-128 47 168 144</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 144</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 144</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 144</t>
+ <t>97 122 188 95</t>
+ <t>123 127 168 144</t>
+ </trans_list>
+ </state>
+
+ <state id="189" final="t">
+ <trans_list length="3">
+ <t>-128 93 168 145</t>
+ <t>94 94 168 146</t>
+ <t>95 127 168 145</t>
+ </trans_list>
+ </state>
+
+ <state id="190" final="t">
+ <trans_list length="13">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 110 188 95</t>
+ <t>111 111 191 x</t>
+ <t>112 113 188 95</t>
+ <t>114 114 192 x</t>
+ <t>115 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="191" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 101 188 95</t>
+ <t>102 102 188 148</t>
+ <t>103 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="192" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 113 188 95</t>
+ <t>114 114 188 149</t>
+ <t>115 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="193" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 113 188 95</t>
+ <t>114 114 194 x</t>
+ <t>115 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="194" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 110 188 95</t>
+ <t>111 111 195 x</t>
+ <t>112 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="195" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 108 188 95</t>
+ <t>109 109 188 150</t>
+ <t>110 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="196" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 100 188 95</t>
+ <t>101 101 197 x</t>
+ <t>102 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="197" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 113 188 95</t>
+ <t>114 114 198 x</t>
+ <t>115 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="198" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 113 188 95</t>
+ <t>114 114 188 151</t>
+ <t>115 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="199" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 110 188 95</t>
+ <t>111 111 188 152</t>
+ <t>112 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="200" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 103 188 95</t>
+ <t>104 104 201 x</t>
+ <t>105 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="201" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 100 188 95</t>
+ <t>101 101 202 x</t>
+ <t>102 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="202" final="t">
+ <trans_list length="11">
+ <t>-128 47 168 147</t>
+ <t>48 57 188 95</t>
+ <t>58 64 168 147</t>
+ <t>65 90 188 95</t>
+ <t>91 94 168 147</t>
+ <t>95 95 188 95</t>
+ <t>96 96 168 147</t>
+ <t>97 109 188 95</t>
+ <t>110 110 188 153</t>
+ <t>111 122 188 95</t>
+ <t>123 127 168 147</t>
+ </trans_list>
+ </state>
+
+ <state id="203" final="t">
+ <state_actions>6 7 x</state_actions>
+ <trans_list length="8">
+ <t>-128 9 203 154</t>
+ <t>10 10 203 155</t>
+ <t>11 33 203 154</t>
+ <t>34 34 203 156</t>
+ <t>35 90 203 154</t>
+ <t>91 91 203 157</t>
+ <t>92 92 204 x</t>
+ <t>93 127 203 154</t>
+ </trans_list>
+ </state>
+
+ <state id="204" final="t">
+ <trans_list length="14">
+ <t>-128 96 203 158</t>
+ <t>97 97 203 159</t>
+ <t>98 98 203 160</t>
+ <t>99 101 203 158</t>
+ <t>102 102 203 161</t>
+ <t>103 109 203 158</t>
+ <t>110 110 203 162</t>
+ <t>111 113 203 158</t>
+ <t>114 114 203 163</t>
+ <t>115 115 203 158</t>
+ <t>116 116 203 164</t>
+ <t>117 117 203 158</t>
+ <t>118 118 203 165</t>
+ <t>119 127 203 158</t>
+ </trans_list>
+ </state>
+ </state_list>
+ </machine>
+</ragel_def>
+<ragel_def name="section_parse">
+ <alphtype>int</alphtype>
+ <machine>
+ <action_list length="5">
+ <action id="0" name="clear_words" line="97" col="21"><text> word = lit = 0; word_len = lit_len = 0; </text></action>
+ <action id="1" name="store_lit" line="98" col="19"><text> lit = tokdata; lit_len = toklen; </text></action>
+ <action id="2" name="incl_err" line="101" col="18"><text> scan_error() &lt;&lt; "bad include statement" &lt;&lt; endl; </text></action>
+ <action id="3" name="handle_include" line="105" col="2"><text>
+ #if 0
+ char *inclSectionName = word;
+ char *inclFileName = 0;
+
+ /* Implement defaults for the input file and section name. */
+ if ( inclSectionName == 0 )
+ inclSectionName = parser-&gt;sectionName;
+
+ if ( lit != 0 )
+ inclFileName = prepareFileName( lit, lit_len );
+ else
+ inclFileName = fileName;
+
+ /* Check for a recursive include structure. Add the current file/section
+ * name then check if what we are including is already in the stack. */
+ includeStack.append( IncludeStackItem( fileName, parser-&gt;sectionName ) );
+
+ if ( recursiveInclude( inclFileName, inclSectionName ) )
+ scan_error() &lt;&lt; "include: this is a recursive include operation" &lt;&lt; endl;
+ else {
+ /* Open the input file for reading. */
+ ifstream *inFile = new ifstream( inclFileName );
+ if ( ! inFile-&gt;is_open() ) {
+ scan_error() &lt;&lt; "include: could not open " &lt;&lt;
+ inclFileName &lt;&lt; " for reading" &lt;&lt; endl;
+ }
+
+ Scanner scanner( inclFileName, *inFile, output, parser,
+ inclSectionName, includeDepth+1 );
+ scanner.do_scan( );
+ delete inFile;
+ }
+
+ /* Remove the last element (len-1) */
+ includeStack.remove( -1 );
+ #endif
+ </text></action>
+ <action id="4" name="handle_token" line="152" col="2"><text>
+ InputLoc loc;
+
+ #ifdef PRINT_TOKENS
+ cerr &lt;&lt; "scanner:" &lt;&lt; line &lt;&lt; ":" &lt;&lt; column &lt;&lt;
+ ": sending token to the parser " &lt;&lt; Parser_lelNames[*p];
+ cerr &lt;&lt; " " &lt;&lt; toklen;
+ if ( tokdata != 0 )
+ cerr &lt;&lt; " " &lt;&lt; tokdata;
+ cerr &lt;&lt; endl;
+ #endif
+
+ loc.fileName = fileName;
+ loc.line = line;
+ loc.col = column;
+
+ parser-&gt;token( loc, type, tokdata, toklen );
+ </text></action>
+ </action_list>
+ <action_table_list length="4">
+ <action_table id="0" length="1">2</action_table>
+ <action_table id="1" length="2">0 1</action_table>
+ <action_table id="2" length="1">3</action_table>
+ <action_table id="3" length="1">4</action_table>
+ </action_table_list>
+ <start_state>3</start_state>
+ <error_state>0</error_state>
+ <entry_points>
+ <entry name="main">3</entry>
+ </entry_points>
+ <state_list length="4">
+ <state id="0">
+ <trans_list length="0">
+ </trans_list>
+ </state>
+
+ <state id="1">
+ <state_actions>x x 0</state_actions>
+ <trans_list length="3">
+ <t>-2147483648 131 x 0</t>
+ <t>132 132 2 1</t>
+ <t>133 2147483647 x 0</t>
+ </trans_list>
+ </state>
+
+ <state id="2">
+ <state_actions>x x 0</state_actions>
+ <trans_list length="3">
+ <t>-2147483648 58 x 0</t>
+ <t>59 59 3 2</t>
+ <t>60 2147483647 x 0</t>
+ </trans_list>
+ </state>
+
+ <state id="3" final="t">
+ <trans_list length="3">
+ <t>-2147483648 128 3 3</t>
+ <t>129 129 1 x</t>
+ <t>130 2147483647 3 3</t>
+ </trans_list>
+ </state>
+ </state_list>
+ </machine>
+</ragel_def>
+<host line="1" col="1">/*
+ * Copyright 2006-2007 Adrian Thurston &lt;thurston@cs.queensu.ca&gt;
+ */
+
+/* This file is part of Ragel.
+ *
+ * Ragel is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * Ragel is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License
+ * along with Ragel; if not, write to the Free Software
+ * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+ */
+
+#include &lt;iostream&gt;
+#include &lt;fstream&gt;
+#include &lt;string.h&gt;
+
+#include "colm.h"
+#include "lmscan.h"
+#include "lmparse.h"
+#include "parsedata.h"
+#include "avltree.h"
+#include "vector.h"
+
+//#define PRINT_TOKENS
+
+using std::ifstream;
+using std::istream;
+using std::ostream;
+using std::cout;
+using std::cerr;
+using std::endl;
+
+</host>
+<write def_name="section_parse" line="45" col="2"><arg>data</arg></write>
+<host line="46">
+
+void Scanner::sectionParseInit()
+{
+ </host>
+<write def_name="section_parse" line="50" col="5"><arg>init</arg></write>
+<host line="51">}
+
+ostream &amp;Scanner::scan_error()
+{
+ /* Maintain the error count. */
+ gblErrorCount += 1;
+ cerr &lt;&lt; fileName &lt;&lt; ":" &lt;&lt; line &lt;&lt; ":" &lt;&lt; column &lt;&lt; ": ";
+ return cerr;
+}
+
+bool Scanner::recursiveInclude( char *inclFileName, char *inclSectionName )
+{
+ for ( IncludeStack::Iter si = includeStack; si.lte(); si++ ) {
+ if ( strcmp( si-&gt;fileName, inclFileName ) == 0 &amp;&amp;
+ strcmp( si-&gt;sectionName, inclSectionName ) == 0 )
+ {
+ return true;
+ }
+ }
+ return false;
+}
+
+void Scanner::updateCol()
+{
+ char *from = lastnl;
+ if ( from == 0 )
+ from = tokstart;
+ //cerr &lt;&lt; "adding " &lt;&lt; tokend - from &lt;&lt; " to column" &lt;&lt; endl;
+ column += tokend - from;
+ lastnl = 0;
+}
+
+void Scanner::token( int type, char c )
+{
+ token( type, &amp;c, &amp;c + 1 );
+}
+
+void Scanner::token( int type )
+{
+ token( type, 0, 0 );
+}
+
+</host>
+<host line="178">
+
+void Scanner::token( int type, char *start, char *end )
+{
+ char *tokdata = 0;
+ int toklen = 0;
+ int *p = &amp;type;
+ int *pe = &amp;type + 1;
+
+ if ( start != 0 ) {
+ toklen = end-start;
+ tokdata = new char[toklen+1];
+ memcpy( tokdata, start, toklen );
+ tokdata[toklen] = 0;
+ }
+
+ </host>
+<write def_name="section_parse" line="196" col="3"><arg>exec</arg></write>
+<host line="197">
+
+ updateCol();
+}
+
+void Scanner::endSection( )
+{
+ /* Execute the eof actions for the section parser. */
+ </host>
+<write def_name="section_parse" line="207" col="3"><arg>eof</arg></write>
+<host line="208">
+}
+
+</host>
+<host line="516">
+
+</host>
+<write def_name="rlscan" line="518" col="4"><arg>data</arg></write>
+<host line="519">
+void Scanner::do_scan()
+{
+ int bufsize = 8;
+ char *buf = new char[bufsize];
+ const char last_char = 0;
+ int cs, act, have = 0;
+ int top, stack[32];
+ bool execute = true;
+
+ sectionParseInit();
+ </host>
+<write def_name="rlscan" line="530" col="5"><arg>init</arg></write>
+<host line="531">
+ while ( execute ) {
+ char *p = buf + have;
+ int space = bufsize - have;
+
+ if ( space == 0 ) {
+ /* We filled up the buffer trying to scan a token. Grow it. */
+ bufsize = bufsize * 2;
+ char *newbuf = new char[bufsize];
+
+ /* Recompute p and space. */
+ p = newbuf + have;
+ space = bufsize - have;
+
+ /* Patch up pointers possibly in use. */
+ if ( tokstart != 0 )
+ tokstart = newbuf + ( tokstart - buf );
+ tokend = newbuf + ( tokend - buf );
+
+ /* Copy the new buffer in. */
+ memcpy( newbuf, buf, have );
+ delete[] buf;
+ buf = newbuf;
+ }
+
+ input.read( p, space );
+ int len = input.gcount();
+
+ /* If we see eof then append the EOF char. */
+ if ( len == 0 ) {
+ p[0] = last_char, len = 1;
+ execute = false;
+ }
+
+ char *pe = p + len;
+ </host>
+<write def_name="rlscan" line="566" col="6"><arg>exec</arg></write>
+<host line="567">
+ /* Check if we failed. */
+ if ( cs == rlscan_error ) {
+ /* Machine failed before finding a token. I'm not yet sure if this
+ * is reachable. */
+ scan_error() &lt;&lt; "scanner error" &lt;&lt; endl;
+ exit(1);
+ }
+
+ /* Decide if we need to preserve anything. */
+ char *preserve = tokstart;
+
+ /* Now set up the prefix. */
+ if ( preserve == 0 )
+ have = 0;
+ else {
+ /* There is data that needs to be shifted over. */
+ have = pe - preserve;
+ memmove( buf, preserve, have );
+ unsigned int shiftback = preserve - buf;
+ if ( tokstart != 0 )
+ tokstart -= shiftback;
+ tokend -= shiftback;
+
+ preserve = buf;
+ }
+ }
+ delete[] buf;
+
+ InputLoc loc;
+ loc.fileName = "&lt;EOF&gt;";
+ loc.line = line;
+ loc.col = 1;
+ parser-&gt;token( loc, _eof, 0, 0 );
+}
+
+void scan( char *fileName, istream &amp;input, ostream &amp;output )
+{
+ Scanner scanner( fileName, input, output, 0, 0, 0 );
+}
+</host>
+</ragel>
diff --git a/test/xml/xml.lm b/test/xml/xml.lm
new file mode 100644
index 00000000..c8749bab
--- /dev/null
+++ b/test/xml/xml.lm
@@ -0,0 +1,167 @@
+#
+# Definitions
+#
+
+rl xml_digit / (0x30..0x39) /
+
+rl base_char / 0x41..0x5A | 0x61..0x7A /
+
+rl char / 0x9 | 0xA | 0xD | 0x20..0x7f /
+
+rl letter / base_char /
+
+rl name_char / letter | digit | '.' | '-' | '_' | ':' | 0xb7 /
+
+rl name / (letter | '_' | ':') name_char* /
+
+#
+# Reference definitions. These appear in the
+# top level and also in strings.
+#
+
+rl entity_ref_pat / '&' name ';' /
+
+rl char_ref_pat / '&#' [0-9]+ ';' | '&0x' [0-9a-fA-F]+ ';' /
+
+#
+# Single quotes.
+#
+lex sq
+{
+ token sq_close /'\''/
+
+ # References in single quotes
+ token sq_entity_ref /entity_ref_pat/
+ token sq_char_ref /char_ref_pat/
+
+ token sq_data / [^<&']+ /
+
+ def sq_item
+ [ sq_data ]
+ | [ sq_entity_ref ]
+ | [ sq_char_ref ]
+
+ # The opening quote belongs to the tag region.
+ def sq_string
+ [ '\'' sq_item* sq_close ]
+}
+
+#
+# Double quotes.
+#
+lex dq
+{
+ token dq_close /'"'/
+
+ # References in double quotes
+ token dq_entity_ref /entity_ref_pat/
+ token dq_char_ref /char_ref_pat/
+
+ token dq_data / [^<&"]+ /
+
+ def dq_item
+ [ dq_data ]
+ | [ dq_entity_ref ]
+ | [ dq_char_ref ]
+
+ # The opening quote belongs to the tag region.
+ def dq_string
+ [ '"' dq_item* dq_close ]
+}
+
+#
+# Tag elements.
+#
+lex tag
+{
+ literal '\'', '\"', '=', '\/', '>'
+
+ # Within this region whitespace is not significant.
+ ignore xml_space / (0x20 | 0x9 | 0xD | 0xA)+ /
+
+ #
+ # Attributes
+ #
+ token attr_name / name /
+}
+
+#
+# Top Level
+#
+lex start
+{
+ #
+ # Comments
+ #
+
+ # Cannot contain '--'
+ rl char_no_dash / char - '-' /
+ token comment / '<!--' ( char_no_dash | '-' char_no_dash )* '-->' /
+
+
+ # Opening a tag.
+ literal '<'
+
+ #
+ # Character Data
+ #
+
+ token cdata / '<![CDATA[' char* :> ']]>'/
+ token char_data / [^<&]+ /
+ token entity_ref /entity_ref_pat/
+ token char_ref /char_ref_pat/
+}
+
+
+def attribute_value
+ [ sq_string ]
+| [ dq_string ]
+
+def attribute
+ [ attr_name '=' attribute_value ]
+
+def empty_tag
+ [ '<' attr_name attribute* '/' '>' ]
+
+def close_tag
+ [ '<' '/' attr_name '>' ]
+
+def open_tag
+ [ '<' attr_name attribute* '>' ]
+
+def tag
+ [open_tag content close_tag]
+
+def content_item
+ [tag]
+| [empty_tag]
+| [char_data]
+| [entity_ref]
+| [char_ref]
+| [cdata]
+| [comment]
+
+def content
+ [content_item*]
+
+def document
+ [content]
+
+def start
+ [document]
+ {
+ for Switch:tag in lhs {
+ if match Switch
+ ["<lm_switch>" SwitchContent:content "</lm_switch>"]
+ {
+ print( 'SWITCH\n' )
+ for Text:tag in SwitchContent {
+ if match Text
+ ["<text>" TextContent:content "</text>"]
+ {
+ print( ' ', TextContent, '\n' )
+ }
+ }
+ }
+ }
+ }