summaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/calclex.py2
-rwxr-xr-xtest/cleanup.sh2
-rw-r--r--test/lex_closure.py54
-rw-r--r--test/lex_doc1.exp1
-rw-r--r--test/lex_doc1.py8
-rw-r--r--test/lex_dup1.exp2
-rw-r--r--test/lex_dup1.py6
-rw-r--r--test/lex_dup2.exp2
-rw-r--r--test/lex_dup2.py6
-rw-r--r--test/lex_dup3.exp2
-rw-r--r--test/lex_dup3.py6
-rw-r--r--test/lex_empty.exp1
-rw-r--r--test/lex_empty.py6
-rw-r--r--test/lex_error1.exp1
-rw-r--r--test/lex_error1.py6
-rw-r--r--test/lex_error2.exp1
-rw-r--r--test/lex_error2.py6
-rw-r--r--test/lex_error3.exp2
-rw-r--r--test/lex_error3.py6
-rw-r--r--test/lex_error4.exp2
-rw-r--r--test/lex_error4.py6
-rw-r--r--test/lex_hedit.exp3
-rw-r--r--test/lex_hedit.py2
-rw-r--r--test/lex_ignore.exp6
-rw-r--r--test/lex_ignore.py4
-rw-r--r--test/lex_ignore2.exp1
-rw-r--r--test/lex_ignore2.py6
-rw-r--r--test/lex_literal1.py25
-rw-r--r--test/lex_literal2.py25
-rw-r--r--test/lex_many_tokens.py24
-rw-r--r--test/lex_module.py10
-rw-r--r--test/lex_module_import.py42
-rw-r--r--test/lex_nowarn.exp0
-rw-r--r--test/lex_nowarn.py6
-rw-r--r--test/lex_object.py55
-rw-r--r--test/lex_opt_alias.py54
-rw-r--r--test/lex_optimize.py50
-rw-r--r--test/lex_optimize3.py52
-rw-r--r--test/lex_re1.exp6
-rw-r--r--test/lex_re1.py6
-rw-r--r--test/lex_re2.exp6
-rw-r--r--test/lex_re2.py6
-rw-r--r--test/lex_re3.exp7
-rw-r--r--test/lex_re3.py6
-rw-r--r--test/lex_rule1.exp2
-rw-r--r--test/lex_rule1.py8
-rw-r--r--test/lex_rule2.py29
-rw-r--r--test/lex_rule3.py27
-rw-r--r--test/lex_state1.exp6
-rw-r--r--test/lex_state1.py4
-rw-r--r--test/lex_state2.exp7
-rw-r--r--test/lex_state2.py4
-rw-r--r--test/lex_state3.exp7
-rw-r--r--test/lex_state3.py6
-rw-r--r--test/lex_state4.exp6
-rw-r--r--test/lex_state4.py8
-rw-r--r--test/lex_state5.exp6
-rw-r--r--test/lex_state5.py6
-rw-r--r--test/lex_state_noerror.exp1
-rw-r--r--test/lex_state_noerror.py6
-rw-r--r--test/lex_state_norule.exp6
-rw-r--r--test/lex_state_norule.py4
-rw-r--r--test/lex_state_try.exp7
-rw-r--r--test/lex_state_try.py7
-rw-r--r--test/lex_token1.exp1
-rw-r--r--test/lex_token1.py6
-rw-r--r--test/lex_token2.exp1
-rw-r--r--test/lex_token2.py5
-rw-r--r--test/lex_token3.exp2
-rw-r--r--test/lex_token3.py7
-rw-r--r--test/lex_token4.exp2
-rw-r--r--test/lex_token4.py6
-rw-r--r--test/lex_token5.exp1
-rw-r--r--test/lex_token5.py6
-rw-r--r--test/lex_token_dup.py29
-rw-r--r--test/rununit.py62
-rwxr-xr-xtest/testlex.py612
-rw-r--r--test/testyacc.py323
-rw-r--r--test/yacc_badargs.exp3
-rw-r--r--test/yacc_badargs.py2
-rw-r--r--test/yacc_badid.py77
-rw-r--r--test/yacc_badprec.exp1
-rw-r--r--test/yacc_badprec.py5
-rw-r--r--test/yacc_badprec2.exp3
-rw-r--r--test/yacc_badprec2.py5
-rw-r--r--test/yacc_badprec3.py68
-rw-r--r--test/yacc_badrule.exp5
-rw-r--r--test/yacc_badrule.py5
-rw-r--r--test/yacc_badtok.exp1
-rw-r--r--test/yacc_badtok.py6
-rw-r--r--test/yacc_dup.exp4
-rw-r--r--test/yacc_dup.py5
-rw-r--r--test/yacc_error1.exp1
-rw-r--r--test/yacc_error1.py5
-rw-r--r--test/yacc_error2.exp1
-rw-r--r--test/yacc_error2.py7
-rw-r--r--test/yacc_error3.exp1
-rw-r--r--test/yacc_error3.py7
-rw-r--r--test/yacc_error4.py72
-rw-r--r--test/yacc_inf.exp5
-rw-r--r--test/yacc_inf.py5
-rw-r--r--test/yacc_literal.py69
-rw-r--r--test/yacc_misplaced.py68
-rw-r--r--test/yacc_missing1.exp2
-rw-r--r--test/yacc_missing1.py5
-rw-r--r--test/yacc_nodoc.exp2
-rw-r--r--test/yacc_nodoc.py5
-rw-r--r--test/yacc_noerror.exp2
-rw-r--r--test/yacc_noerror.py5
-rw-r--r--test/yacc_nop.exp2
-rw-r--r--test/yacc_nop.py5
-rw-r--r--test/yacc_notfunc.exp4
-rw-r--r--test/yacc_notfunc.py5
-rw-r--r--test/yacc_notok.exp1
-rw-r--r--test/yacc_notok.py5
-rw-r--r--test/yacc_prec1.py68
-rw-r--r--test/yacc_rr.exp2
-rw-r--r--test/yacc_rr.py5
-rw-r--r--test/yacc_simple.exp1
-rw-r--r--test/yacc_simple.py5
-rw-r--r--test/yacc_sr.exp2
-rw-r--r--test/yacc_sr.py5
-rw-r--r--test/yacc_term1.exp2
-rw-r--r--test/yacc_term1.py5
-rw-r--r--test/yacc_unused.exp4
-rw-r--r--test/yacc_unused.py5
-rw-r--r--test/yacc_unused_rule.py72
-rw-r--r--test/yacc_uprec.exp2
-rw-r--r--test/yacc_uprec.py5
-rw-r--r--test/yacc_uprec2.py63
130 files changed, 1986 insertions, 484 deletions
diff --git a/test/calclex.py b/test/calclex.py
index 2550734..6ee9cb4 100644
--- a/test/calclex.py
+++ b/test/calclex.py
@@ -3,7 +3,7 @@
# -----------------------------------------------------------------------------
import sys
-sys.path.append("..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = (
diff --git a/test/cleanup.sh b/test/cleanup.sh
index d7d99b6..9db9368 100755
--- a/test/cleanup.sh
+++ b/test/cleanup.sh
@@ -1,4 +1,4 @@
#!/bin/sh
-rm -f *~ *.pyc *.dif *.out
+rm -f *~ *.pyc *.pyo *.dif *.out
diff --git a/test/lex_closure.py b/test/lex_closure.py
new file mode 100644
index 0000000..c2d8cdb
--- /dev/null
+++ b/test/lex_closure.py
@@ -0,0 +1,54 @@
+# -----------------------------------------------------------------------------
+# lex_closure.py
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.lex as lex
+
+tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+def make_calc():
+
+ # Tokens
+
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_EQUALS = r'='
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+ t_ignore = " \t"
+
+ def t_newline(t):
+ r'\n+'
+ t.lineno += t.value.count("\n")
+
+ def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+ # Build the lexer
+ return lex.lex()
+
+make_calc()
+lex.runmain(data="3+4")
+
+
+
diff --git a/test/lex_doc1.exp b/test/lex_doc1.exp
deleted file mode 100644
index 5b63c1e..0000000
--- a/test/lex_doc1.exp
+++ /dev/null
@@ -1 +0,0 @@
-./lex_doc1.py:18: No regular expression defined for rule 't_NUMBER'
diff --git a/test/lex_doc1.py b/test/lex_doc1.py
index 3951b5c..8a2bfcc 100644
--- a/test/lex_doc1.py
+++ b/test/lex_doc1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_doc1.py
#
# Missing documentation string
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -21,10 +21,6 @@ def t_NUMBER(t):
def t_error(t):
pass
-
-import sys
-sys.tracebacklimit = 0
-
lex.lex()
diff --git a/test/lex_dup1.exp b/test/lex_dup1.exp
deleted file mode 100644
index 2098a40..0000000
--- a/test/lex_dup1.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./lex_dup1.py:20: Rule t_NUMBER redefined. Previously defined on line 18
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_dup1.py b/test/lex_dup1.py
index 68f8092..fd04cdb 100644
--- a/test/lex_dup1.py
+++ b/test/lex_dup1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_dup1.py
#
# Duplicated rule specifiers
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -22,7 +22,7 @@ t_NUMBER = r'\d+'
def t_error(t):
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_dup2.exp b/test/lex_dup2.exp
deleted file mode 100644
index d327cfe..0000000
--- a/test/lex_dup2.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./lex_dup2.py:22: Rule t_NUMBER redefined. Previously defined on line 18
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_dup2.py b/test/lex_dup2.py
index f4d346e..870e5e7 100644
--- a/test/lex_dup2.py
+++ b/test/lex_dup2.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_dup2.py
#
# Duplicated rule specifiers
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -26,7 +26,7 @@ def t_NUMBER(t):
def t_error(t):
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_dup3.exp b/test/lex_dup3.exp
deleted file mode 100644
index ec0680c..0000000
--- a/test/lex_dup3.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./lex_dup3.py:20: Rule t_NUMBER redefined. Previously defined on line 18
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_dup3.py b/test/lex_dup3.py
index e17b520..94b5592 100644
--- a/test/lex_dup3.py
+++ b/test/lex_dup3.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_dup3.py
#
# Duplicated rule specifiers
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -24,7 +24,7 @@ def t_NUMBER(t):
def t_error(t):
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_empty.exp b/test/lex_empty.exp
deleted file mode 100644
index af38602..0000000
--- a/test/lex_empty.exp
+++ /dev/null
@@ -1 +0,0 @@
-SyntaxError: lex: no rules of the form t_rulename are defined.
diff --git a/test/lex_empty.py b/test/lex_empty.py
index 96625f7..e0368bf 100644
--- a/test/lex_empty.py
+++ b/test/lex_empty.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_empty.py
#
# No rules defined
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,7 @@ tokens = [
"NUMBER",
]
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_error1.exp b/test/lex_error1.exp
deleted file mode 100644
index baa19e5..0000000
--- a/test/lex_error1.exp
+++ /dev/null
@@ -1 +0,0 @@
-lex: Warning. no t_error rule is defined.
diff --git a/test/lex_error1.py b/test/lex_error1.py
index a99d9be..4508a80 100644
--- a/test/lex_error1.py
+++ b/test/lex_error1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_error1.py
#
# Missing t_error() rule
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -17,7 +17,7 @@ t_PLUS = r'\+'
t_MINUS = r'-'
t_NUMBER = r'\d+'
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_error2.exp b/test/lex_error2.exp
deleted file mode 100644
index fb1b55c..0000000
--- a/test/lex_error2.exp
+++ /dev/null
@@ -1 +0,0 @@
-SyntaxError: lex: Rule 't_error' must be defined as a function
diff --git a/test/lex_error2.py b/test/lex_error2.py
index a59c8d4..8040d39 100644
--- a/test/lex_error2.py
+++ b/test/lex_error2.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_error2.py
#
# t_error defined, but not function
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -19,7 +19,7 @@ t_NUMBER = r'\d+'
t_error = "foo"
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_error3.exp b/test/lex_error3.exp
deleted file mode 100644
index 1b482bf..0000000
--- a/test/lex_error3.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./lex_error3.py:20: Rule 't_error' requires an argument.
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_error3.py b/test/lex_error3.py
index 584600f..1feefb6 100644
--- a/test/lex_error3.py
+++ b/test/lex_error3.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_error3.py
#
# t_error defined as function, but with wrong # args
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -20,7 +20,7 @@ t_NUMBER = r'\d+'
def t_error():
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_error4.exp b/test/lex_error4.exp
deleted file mode 100644
index 98505a2..0000000
--- a/test/lex_error4.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./lex_error4.py:20: Rule 't_error' has too many arguments.
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_error4.py b/test/lex_error4.py
index d05de74..f4f48db 100644
--- a/test/lex_error4.py
+++ b/test/lex_error4.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_error4.py
#
# t_error defined as function, but too many args
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -20,7 +20,7 @@ t_NUMBER = r'\d+'
def t_error(t,s):
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_hedit.exp b/test/lex_hedit.exp
deleted file mode 100644
index 7b27dcb..0000000
--- a/test/lex_hedit.exp
+++ /dev/null
@@ -1,3 +0,0 @@
-(H_EDIT_DESCRIPTOR,'abc',1,0)
-(H_EDIT_DESCRIPTOR,'abcdefghij',1,6)
-(H_EDIT_DESCRIPTOR,'xy',1,20)
diff --git a/test/lex_hedit.py b/test/lex_hedit.py
index 0f87423..7d558c2 100644
--- a/test/lex_hedit.py
+++ b/test/lex_hedit.py
@@ -14,7 +14,7 @@
# such tokens
# -----------------------------------------------------------------------------
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
diff --git a/test/lex_ignore.exp b/test/lex_ignore.exp
deleted file mode 100644
index f7bdfdd..0000000
--- a/test/lex_ignore.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-./lex_ignore.py:20: Rule 't_ignore' must be defined as a string.
-Traceback (most recent call last):
- File "./lex_ignore.py", line 29, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_ignore.py b/test/lex_ignore.py
index 94b0266..6c43b4c 100644
--- a/test/lex_ignore.py
+++ b/test/lex_ignore.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_ignore.py
#
# Improperly specific ignore declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
diff --git a/test/lex_ignore2.exp b/test/lex_ignore2.exp
deleted file mode 100644
index 0eb6bf2..0000000
--- a/test/lex_ignore2.exp
+++ /dev/null
@@ -1 +0,0 @@
-lex: Warning. t_ignore contains a literal backslash '\'
diff --git a/test/lex_ignore2.py b/test/lex_ignore2.py
index fc95bd1..f60987a 100644
--- a/test/lex_ignore2.py
+++ b/test/lex_ignore2.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_ignore2.py
#
# ignore declaration as a raw string
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -22,7 +22,7 @@ t_ignore = r' \t'
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_literal1.py b/test/lex_literal1.py
new file mode 100644
index 0000000..db389c3
--- /dev/null
+++ b/test/lex_literal1.py
@@ -0,0 +1,25 @@
+# lex_literal1.py
+#
+# Bad literal specification
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = [
+ "NUMBER",
+ ]
+
+literals = ["+","-","**"]
+
+def t_NUMBER(t):
+ r'\d+'
+ return t
+
+def t_error(t):
+ pass
+
+lex.lex()
+
+
diff --git a/test/lex_literal2.py b/test/lex_literal2.py
new file mode 100644
index 0000000..b50b92c
--- /dev/null
+++ b/test/lex_literal2.py
@@ -0,0 +1,25 @@
+# lex_literal2.py
+#
+# Bad literal specification
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = [
+ "NUMBER",
+ ]
+
+literals = 23
+
+def t_NUMBER(t):
+ r'\d+'
+ return t
+
+def t_error(t):
+ pass
+
+lex.lex()
+
+
diff --git a/test/lex_many_tokens.py b/test/lex_many_tokens.py
new file mode 100644
index 0000000..3558b1a
--- /dev/null
+++ b/test/lex_many_tokens.py
@@ -0,0 +1,24 @@
+# lex_many_tokens.py
+#
+# Test lex's ability to handle a large number of tokens (beyond the
+# 100-group limit of the re module)
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = ["TOK%d" % i for i in range(1000)]
+
+for tok in tokens:
+ exec "t_%s = '%s:'" % (tok,tok)
+
+t_ignore = " \t"
+
+def t_error(t):
+ pass
+
+lex.lex(optimize=1,lextab="manytab")
+lex.runmain(data="TOK34: TOK143: TOK269: TOK372: TOK452: TOK561: TOK999:")
+
+
diff --git a/test/lex_module.py b/test/lex_module.py
new file mode 100644
index 0000000..8bdd3ed
--- /dev/null
+++ b/test/lex_module.py
@@ -0,0 +1,10 @@
+# lex_module.py
+#
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+import lex_module_import
+lex.lex(module=lex_module_import)
+lex.runmain(data="3+4")
diff --git a/test/lex_module_import.py b/test/lex_module_import.py
new file mode 100644
index 0000000..81eacdb
--- /dev/null
+++ b/test/lex_module_import.py
@@ -0,0 +1,42 @@
+# -----------------------------------------------------------------------------
+# lex_module_import.py
+#
+# A lexer defined in a module, but built in lex_module.py
+# -----------------------------------------------------------------------------
+
+tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+# Tokens
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_EQUALS = r'='
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+def t_NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+t_ignore = " \t"
+
+def t_newline(t):
+ r'\n+'
+ t.lineno += t.value.count("\n")
+
+def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
diff --git a/test/lex_nowarn.exp b/test/lex_nowarn.exp
deleted file mode 100644
index e69de29..0000000
--- a/test/lex_nowarn.exp
+++ /dev/null
diff --git a/test/lex_nowarn.py b/test/lex_nowarn.py
index d60d31c..3e58bde 100644
--- a/test/lex_nowarn.py
+++ b/test/lex_nowarn.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_nowarn.py
#
# Missing t_error() rule
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -23,7 +23,7 @@ t_NUMBER = r'\d+'
t_foo_NUMBER = r'\d+'
-sys.tracebacklimit = 0
+
lex.lex(nowarn=1)
diff --git a/test/lex_object.py b/test/lex_object.py
new file mode 100644
index 0000000..e10c95b
--- /dev/null
+++ b/test/lex_object.py
@@ -0,0 +1,55 @@
+# -----------------------------------------------------------------------------
+# lex_object.py
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.lex as lex
+
+class CalcLexer:
+ tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+ # Tokens
+
+ t_PLUS = r'\+'
+ t_MINUS = r'-'
+ t_TIMES = r'\*'
+ t_DIVIDE = r'/'
+ t_EQUALS = r'='
+ t_LPAREN = r'\('
+ t_RPAREN = r'\)'
+ t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+ def t_NUMBER(self,t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+ t_ignore = " \t"
+
+ def t_newline(self,t):
+ r'\n+'
+ t.lineno += t.value.count("\n")
+
+ def t_error(self,t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+
+calc = CalcLexer()
+
+# Build the lexer
+lex.lex(object=calc)
+lex.runmain(data="3+4")
+
+
+
+
diff --git a/test/lex_opt_alias.py b/test/lex_opt_alias.py
new file mode 100644
index 0000000..dd6e288
--- /dev/null
+++ b/test/lex_opt_alias.py
@@ -0,0 +1,54 @@
+# -----------------------------------------------------------------------------
+# lex_opt_alias.py
+#
+# Tests ability to match up functions with states, aliases, and
+# lexing tables.
+# -----------------------------------------------------------------------------
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+tokens = (
+ 'NAME','NUMBER',
+ )
+
+states = (('instdef','inclusive'),('spam','exclusive'))
+
+literals = ['=','+','-','*','/', '(',')']
+
+# Tokens
+
+def t_instdef_spam_BITS(t):
+ r'[01-]+'
+ return t
+
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+def NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+t_ANY_NUMBER = NUMBER
+
+t_ignore = " \t"
+t_spam_ignore = t_ignore
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += t.value.count("\n")
+
+def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+t_spam_error = t_error
+
+# Build the lexer
+import ply.lex as lex
+lex.lex(optimize=1,lextab="aliastab")
+lex.runmain(data="3+4")
diff --git a/test/lex_optimize.py b/test/lex_optimize.py
new file mode 100644
index 0000000..e8e44e1
--- /dev/null
+++ b/test/lex_optimize.py
@@ -0,0 +1,50 @@
+# -----------------------------------------------------------------------------
+# lex_optimize.py
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.lex as lex
+
+tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+# Tokens
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_EQUALS = r'='
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+def t_NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+t_ignore = " \t"
+
+def t_newline(t):
+ r'\n+'
+ t.lineno += t.value.count("\n")
+
+def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+# Build the lexer
+lex.lex(optimize=1)
+lex.runmain(data="3+4")
+
+
+
diff --git a/test/lex_optimize3.py b/test/lex_optimize3.py
new file mode 100644
index 0000000..e20df83
--- /dev/null
+++ b/test/lex_optimize3.py
@@ -0,0 +1,52 @@
+# -----------------------------------------------------------------------------
+# lex_optimize3.py
+#
+# Writes table in a subdirectory structure.
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.lex as lex
+
+tokens = (
+ 'NAME','NUMBER',
+ 'PLUS','MINUS','TIMES','DIVIDE','EQUALS',
+ 'LPAREN','RPAREN',
+ )
+
+# Tokens
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_TIMES = r'\*'
+t_DIVIDE = r'/'
+t_EQUALS = r'='
+t_LPAREN = r'\('
+t_RPAREN = r'\)'
+t_NAME = r'[a-zA-Z_][a-zA-Z0-9_]*'
+
+def t_NUMBER(t):
+ r'\d+'
+ try:
+ t.value = int(t.value)
+ except ValueError:
+ print "Integer value too large", t.value
+ t.value = 0
+ return t
+
+t_ignore = " \t"
+
+def t_newline(t):
+ r'\n+'
+ t.lineno += t.value.count("\n")
+
+def t_error(t):
+ print "Illegal character '%s'" % t.value[0]
+ t.lexer.skip(1)
+
+# Build the lexer
+lex.lex(optimize=1,lextab="lexdir.sub.calctab",outputdir="lexdir/sub")
+lex.runmain(data="3+4")
+
+
+
diff --git a/test/lex_re1.exp b/test/lex_re1.exp
deleted file mode 100644
index 968454d..0000000
--- a/test/lex_re1.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: Invalid regular expression for rule 't_NUMBER'. unbalanced parenthesis
-Traceback (most recent call last):
- File "./lex_re1.py", line 25, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_re1.py b/test/lex_re1.py
index 9e544fe..5be7aef 100644
--- a/test/lex_re1.py
+++ b/test/lex_re1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_re1.py
#
# Bad regular expression in a string
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -20,7 +20,7 @@ t_NUMBER = r'(\d+'
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_re2.exp b/test/lex_re2.exp
deleted file mode 100644
index cceb7a6..0000000
--- a/test/lex_re2.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: Regular expression for rule 't_PLUS' matches empty string.
-Traceback (most recent call last):
- File "./lex_re2.py", line 25, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_re2.py b/test/lex_re2.py
index 522b415..8dfb8e3 100644
--- a/test/lex_re2.py
+++ b/test/lex_re2.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_re2.py
#
# Regular expression rule matches empty string
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -20,7 +20,7 @@ t_NUMBER = r'(\d+)'
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_re3.exp b/test/lex_re3.exp
deleted file mode 100644
index 38ce8ca..0000000
--- a/test/lex_re3.exp
+++ /dev/null
@@ -1,7 +0,0 @@
-lex: Invalid regular expression for rule 't_POUND'. unbalanced parenthesis
-lex: Make sure '#' in rule 't_POUND' is escaped with '\#'.
-Traceback (most recent call last):
- File "./lex_re3.py", line 27, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_re3.py b/test/lex_re3.py
index 099e156..e179925 100644
--- a/test/lex_re3.py
+++ b/test/lex_re3.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_re3.py
#
# Regular expression rule matches empty string
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -22,7 +22,7 @@ t_POUND = r'#'
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_rule1.exp b/test/lex_rule1.exp
deleted file mode 100644
index 0c23ca2..0000000
--- a/test/lex_rule1.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-lex: t_NUMBER not defined as a function or string
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_rule1.py b/test/lex_rule1.py
index e49a15b..0406c6f 100644
--- a/test/lex_rule1.py
+++ b/test/lex_rule1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_rule1.py
#
-# Rule defined as some other type
+# Rule function with incorrect number of arguments
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -20,7 +20,7 @@ t_NUMBER = 1
def t_error(t):
pass
-sys.tracebacklimit = 0
+
lex.lex()
diff --git a/test/lex_rule2.py b/test/lex_rule2.py
new file mode 100644
index 0000000..1c29d87
--- /dev/null
+++ b/test/lex_rule2.py
@@ -0,0 +1,29 @@
+# lex_rule2.py
+#
+# Rule function with incorrect number of arguments
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = [
+ "PLUS",
+ "MINUS",
+ "NUMBER",
+ ]
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+def t_NUMBER():
+ r'\d+'
+ return t
+
+def t_error(t):
+ pass
+
+
+
+lex.lex()
+
+
diff --git a/test/lex_rule3.py b/test/lex_rule3.py
new file mode 100644
index 0000000..9ea94da
--- /dev/null
+++ b/test/lex_rule3.py
@@ -0,0 +1,27 @@
+# lex_rule3.py
+#
+# Rule function with incorrect number of arguments
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = [
+ "PLUS",
+ "MINUS",
+ "NUMBER",
+ ]
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+def t_NUMBER(t,s):
+ r'\d+'
+ return t
+
+def t_error(t):
+ pass
+
+lex.lex()
+
+
diff --git a/test/lex_state1.exp b/test/lex_state1.exp
deleted file mode 100644
index b89f8ad..0000000
--- a/test/lex_state1.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: states must be defined as a tuple or list.
-Traceback (most recent call last):
- File "./lex_state1.py", line 38, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state1.py b/test/lex_state1.py
index 7eb2976..881fec9 100644
--- a/test/lex_state1.py
+++ b/test/lex_state1.py
@@ -3,7 +3,7 @@
# Bad state declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -33,7 +33,7 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_state2.exp b/test/lex_state2.exp
deleted file mode 100644
index d7458f3..0000000
--- a/test/lex_state2.exp
+++ /dev/null
@@ -1,7 +0,0 @@
-lex: invalid state specifier 'comment'. Must be a tuple (statename,'exclusive|inclusive')
-lex: invalid state specifier 'example'. Must be a tuple (statename,'exclusive|inclusive')
-Traceback (most recent call last):
- File "./lex_state2.py", line 38, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state2.py b/test/lex_state2.py
index b76b0db..49dd66d 100644
--- a/test/lex_state2.py
+++ b/test/lex_state2.py
@@ -3,7 +3,7 @@
# Bad state declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -33,7 +33,7 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_state3.exp b/test/lex_state3.exp
deleted file mode 100644
index 054ec8d..0000000
--- a/test/lex_state3.exp
+++ /dev/null
@@ -1,7 +0,0 @@
-lex: state name 1 must be a string
-lex: No rules defined for state 'example'
-Traceback (most recent call last):
- File "./lex_state3.py", line 40, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state3.py b/test/lex_state3.py
index fb4ce6c..d9c9c78 100644
--- a/test/lex_state3.py
+++ b/test/lex_state3.py
@@ -1,9 +1,9 @@
-# lex_state2.py
+# lex_state3.py
#
# Bad state declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -35,7 +35,7 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_state4.exp b/test/lex_state4.exp
deleted file mode 100644
index 4c77243..0000000
--- a/test/lex_state4.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: state type for state comment must be 'inclusive' or 'exclusive'
-Traceback (most recent call last):
- File "./lex_state4.py", line 39, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state4.py b/test/lex_state4.py
index 0993aa9..681f205 100644
--- a/test/lex_state4.py
+++ b/test/lex_state4.py
@@ -1,9 +1,9 @@
-# lex_state2.py
+# lex_state4.py
#
# Bad state declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,7 @@ tokens = [
"NUMBER",
]
-comment = 1
+
states = (('comment', 'exclsive'),)
t_PLUS = r'\+'
@@ -34,7 +34,7 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
+
lex.lex()
diff --git a/test/lex_state5.exp b/test/lex_state5.exp
deleted file mode 100644
index 301f398..0000000
--- a/test/lex_state5.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: state 'comment' already defined.
-Traceback (most recent call last):
- File "./lex_state5.py", line 40, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state5.py b/test/lex_state5.py
index c3c1cbf..2ede0b6 100644
--- a/test/lex_state5.py
+++ b/test/lex_state5.py
@@ -1,9 +1,9 @@
-# lex_state2.py
+# lex_state5.py
#
# Bad state declaration
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,6 @@ tokens = [
"NUMBER",
]
-comment = 1
states = (('comment', 'exclusive'),
('comment', 'exclusive'))
@@ -35,7 +34,6 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
lex.lex()
diff --git a/test/lex_state_noerror.exp b/test/lex_state_noerror.exp
deleted file mode 100644
index e14149f..0000000
--- a/test/lex_state_noerror.exp
+++ /dev/null
@@ -1 +0,0 @@
-lex: Warning. no error rule is defined for exclusive state 'comment'
diff --git a/test/lex_state_noerror.py b/test/lex_state_noerror.py
index 853b157..2cb008b 100644
--- a/test/lex_state_noerror.py
+++ b/test/lex_state_noerror.py
@@ -1,9 +1,9 @@
-# lex_state2.py
+# lex_state_noerror.py
#
# Declaration of a state for which no rules are defined
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,6 @@ tokens = [
"NUMBER",
]
-comment = 1
states = (('comment', 'exclusive'),)
t_PLUS = r'\+'
@@ -34,7 +33,6 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
lex.lex()
diff --git a/test/lex_state_norule.exp b/test/lex_state_norule.exp
deleted file mode 100644
index 07f03d2..0000000
--- a/test/lex_state_norule.exp
+++ /dev/null
@@ -1,6 +0,0 @@
-lex: No rules defined for state 'example'
-Traceback (most recent call last):
- File "./lex_state_norule.py", line 40, in <module>
- lex.lex()
- File "../../ply/lex.py", line 783, in lex
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_state_norule.py b/test/lex_state_norule.py
index 2d15248..3037c83 100644
--- a/test/lex_state_norule.py
+++ b/test/lex_state_norule.py
@@ -3,7 +3,7 @@
# Declaration of a state for which no rules are defined
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,6 @@ tokens = [
"NUMBER",
]
-comment = 1
states = (('comment', 'exclusive'),
('example', 'exclusive'))
@@ -35,7 +34,6 @@ def t_comment_body_part(t):
def t_error(t):
pass
-import sys
lex.lex()
diff --git a/test/lex_state_try.exp b/test/lex_state_try.exp
deleted file mode 100644
index 11768b8..0000000
--- a/test/lex_state_try.exp
+++ /dev/null
@@ -1,7 +0,0 @@
-(NUMBER,'3',1,0)
-(PLUS,'+',1,2)
-(NUMBER,'4',1,4)
-Entering comment state
-comment body LexToken(body_part,'This is a comment */',1,9)
-(PLUS,'+',1,30)
-(NUMBER,'10',1,32)
diff --git a/test/lex_state_try.py b/test/lex_state_try.py
index a16403e..8eb6762 100644
--- a/test/lex_state_try.py
+++ b/test/lex_state_try.py
@@ -1,9 +1,9 @@
-# lex_state2.py
+# lex_state_try.py
#
# Declaration of a state for which no rules are defined
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -13,7 +13,6 @@ tokens = [
"NUMBER",
]
-comment = 1
states = (('comment', 'exclusive'),)
t_PLUS = r'\+'
@@ -39,8 +38,6 @@ def t_error(t):
t_comment_error = t_error
t_comment_ignore = t_ignore
-import sys
-
lex.lex()
data = "3 + 4 /* This is a comment */ + 10"
diff --git a/test/lex_token1.exp b/test/lex_token1.exp
deleted file mode 100644
index 3792831..0000000
--- a/test/lex_token1.exp
+++ /dev/null
@@ -1 +0,0 @@
-SyntaxError: lex: module does not define 'tokens'
diff --git a/test/lex_token1.py b/test/lex_token1.py
index 380c31c..6fca300 100644
--- a/test/lex_token1.py
+++ b/test/lex_token1.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_token1.py
#
# Tests for absence of tokens variable
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -14,8 +14,6 @@ t_NUMBER = r'\d+'
def t_error(t):
pass
-sys.tracebacklimit = 0
-
lex.lex()
diff --git a/test/lex_token2.exp b/test/lex_token2.exp
deleted file mode 100644
index 3f98fe5..0000000
--- a/test/lex_token2.exp
+++ /dev/null
@@ -1 +0,0 @@
-SyntaxError: lex: tokens must be a list or tuple.
diff --git a/test/lex_token2.py b/test/lex_token2.py
index 87db8a0..6e65ab0 100644
--- a/test/lex_token2.py
+++ b/test/lex_token2.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_token2.py
#
# Tests for tokens of wrong type
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -16,7 +16,6 @@ t_NUMBER = r'\d+'
def t_error(t):
pass
-sys.tracebacklimit = 0
lex.lex()
diff --git a/test/lex_token3.exp b/test/lex_token3.exp
deleted file mode 100644
index d991d3c..0000000
--- a/test/lex_token3.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-lex: Rule 't_MINUS' defined for an unspecified token MINUS.
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_token3.py b/test/lex_token3.py
index 27ce947..636452e 100644
--- a/test/lex_token3.py
+++ b/test/lex_token3.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_token3.py
#
# tokens is right type, but is missing a token for one rule
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -19,9 +19,6 @@ t_NUMBER = r'\d+'
def t_error(t):
pass
-
-sys.tracebacklimit = 0
-
lex.lex()
diff --git a/test/lex_token4.exp b/test/lex_token4.exp
deleted file mode 100644
index 3dd88e0..0000000
--- a/test/lex_token4.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-lex: Bad token name '-'
-SyntaxError: lex: Unable to build lexer.
diff --git a/test/lex_token4.py b/test/lex_token4.py
index 612ff13..52947e9 100644
--- a/test/lex_token4.py
+++ b/test/lex_token4.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_token4.py
#
# Bad token name
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -21,8 +21,6 @@ t_NUMBER = r'\d+'
def t_error(t):
pass
-sys.tracebacklimit = 0
-
lex.lex()
diff --git a/test/lex_token5.exp b/test/lex_token5.exp
deleted file mode 100644
index 2f03889..0000000
--- a/test/lex_token5.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.lex.LexError: ./lex_token5.py:19: Rule 't_NUMBER' returned an unknown token type 'NUM'
diff --git a/test/lex_token5.py b/test/lex_token5.py
index 77fabde..ef7a3c5 100644
--- a/test/lex_token5.py
+++ b/test/lex_token5.py
@@ -1,9 +1,9 @@
-# lex_token.py
+# lex_token5.py
#
# Return a bad token name
import sys
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
@@ -24,8 +24,6 @@ def t_NUMBER(t):
def t_error(t):
pass
-sys.tracebacklimit = 0
-
lex.lex()
lex.input("1234")
t = lex.token()
diff --git a/test/lex_token_dup.py b/test/lex_token_dup.py
new file mode 100644
index 0000000..384f4e9
--- /dev/null
+++ b/test/lex_token_dup.py
@@ -0,0 +1,29 @@
+# lex_token_dup.py
+#
+# Duplicate token name in tokens
+
+import sys
+if ".." not in sys.path: sys.path.insert(0,"..")
+
+import ply.lex as lex
+
+tokens = [
+ "PLUS",
+ "MINUS",
+ "NUMBER",
+ "MINUS"
+ ]
+
+t_PLUS = r'\+'
+t_MINUS = r'-'
+
+def t_NUMBER(t):
+ r'\d+'
+ return t
+
+def t_error(t):
+ pass
+
+lex.lex()
+
+
diff --git a/test/rununit.py b/test/rununit.py
deleted file mode 100644
index d6b36fd..0000000
--- a/test/rununit.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-'''Script to run all tests using python "unittest" module'''
-
-__author__ = "Miki Tebeka <miki.tebeka@zoran.com>"
-
-from unittest import TestCase, main, makeSuite, TestSuite
-from os import popen, environ, remove
-from glob import glob
-from sys import executable, argv
-from os.path import isfile, basename, splitext
-
-# Add path to lex.py and yacc.py
-environ["PYTHONPATH"] = ".."
-
-class PLYTest(TestCase):
- '''General test case for PLY test'''
- def _runtest(self, filename):
- '''Run a single test file an compare result'''
- exp_file = filename.replace(".py", ".exp")
- self.failUnless(isfile(exp_file), "can't find %s" % exp_file)
- pipe = popen("%s %s 2>&1" % (executable, filename))
- out = pipe.read().strip()
- self.failUnlessEqual(out, open(exp_file).read().strip())
-
-
-class LexText(PLYTest):
- '''Testing Lex'''
- pass
-
-class YaccTest(PLYTest):
- '''Testing Yacc'''
-
- def tearDown(self):
- '''Cleanup parsetab.py[c] file'''
- for ext in (".py", ".pyc"):
- fname = "parsetab%s" % ext
- if isfile(fname):
- remove(fname)
-
-def add_test(klass, filename):
- '''Add a test to TestCase class'''
- def t(self):
- self._runtest(filename)
- # Test name is test_FILENAME without the ./ and without the .py
- setattr(klass, "test_%s" % (splitext(basename(filename))[0]), t)
-
-# Add lex tests
-for file in glob("./lex_*.py"):
- add_test(LexText, file)
-lex_suite = makeSuite(LexText, "test_")
-
-# Add yacc tests
-for file in glob("./yacc_*.py"):
- add_test(YaccTest, file)
-yacc_suite = makeSuite(YaccTest, "test_")
-
-# All tests suite
-test_suite = TestSuite((lex_suite, yacc_suite))
-
-if __name__ == "__main__":
- main()
-
diff --git a/test/testlex.py b/test/testlex.py
index 2dae47a..953206c 100755
--- a/test/testlex.py
+++ b/test/testlex.py
@@ -1,57 +1,565 @@
-#!/usr/local/bin
-# ----------------------------------------------------------------------
# testlex.py
-#
-# Run tests for the lexing module
-# ----------------------------------------------------------------------
-
-import sys,os,glob
-
-if len(sys.argv) < 2:
- print "Usage: python testlex.py directory"
- raise SystemExit
-
-dirname = None
-make = 0
-
-for o in sys.argv[1:]:
- if o == '-make':
- make = 1
- else:
- dirname = o
- break
-
-if not dirname:
- print "Usage: python testlex.py [-make] directory"
- raise SystemExit
-
-f = glob.glob("%s/%s" % (dirname,"lex_*.py"))
-
-print "**** Running tests for lex ****"
-
-for t in f:
- name = t[:-3]
- print "Testing %-32s" % name,
- if make:
- if not os.path.exists("%s.exp" % name):
- os.system("python %s.py >%s.exp 2>&1" % (name,name))
- passed = 1
- else:
- os.system("python %s.py >%s.out 2>&1" % (name,name))
- a = os.system("diff %s.out %s.exp >%s.dif" % (name,name,name))
- if a == 0:
- passed = 1
- else:
- passed = 0
-
- if passed:
- print "Passed"
- else:
- print "Failed. See %s.dif" % name
-
-
-
+import unittest
+import StringIO
+import sys
+sys.path.insert(0,"..")
+sys.tracebacklimit = 0
+
+import ply.lex
+
+def check_expected(result,expected):
+ resultlines = result.splitlines()
+ expectedlines = expected.splitlines()
+ if len(resultlines) != len(expectedlines):
+ return False
+ for rline,eline in zip(resultlines,expectedlines):
+ if not rline.endswith(eline):
+ return False
+ return True
+
+def run_import(module):
+ code = "import "+module
+ exec code
+ del sys.modules[module]
+# Tests related to errors and warnings when building lexers
+class LexErrorWarningTests(unittest.TestCase):
+ def setUp(self):
+ sys.stderr = StringIO.StringIO()
+ sys.stdout = StringIO.StringIO()
+ def tearDown(self):
+ sys.stderr = sys.__stderr__
+ sys.stdout = sys.__stdout__
+ def test_lex_doc1(self):
+ run_import("lex_doc1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_doc1.py:18: No regular expression defined for rule 't_NUMBER'\n"))
+ def test_lex_dup1(self):
+ self.assertRaises(SyntaxError,run_import,"lex_dup1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_dup1.py:20: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+ def test_lex_dup2(self):
+ self.assertRaises(SyntaxError,run_import,"lex_dup2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_dup2.py:22: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+
+ def test_lex_dup3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_dup3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_dup3.py:20: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+
+ def test_lex_empty(self):
+ try:
+ run_import("lex_empty")
+ except SyntaxError,e:
+ pass
+ self.assertEquals(str(e),"lex: no rules of the form t_rulename are defined.")
+
+ def test_lex_error1(self):
+ run_import("lex_error1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Warning. no t_error rule is defined.\n"))
+
+ def test_lex_error2(self):
+ try:
+ run_import("lex_error2")
+ except SyntaxError,e:
+ pass
+ self.assertEquals(str(e),"lex: Rule 't_error' must be defined as a function")
+
+ def test_lex_error3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_error3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_error3.py:20: Rule 't_error' requires an argument.\n"))
+
+ def test_lex_error4(self):
+ self.assertRaises(SyntaxError,run_import,"lex_error4")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_error4.py:20: Rule 't_error' has too many arguments.\n"))
+
+ def test_lex_ignore(self):
+ self.assertRaises(SyntaxError,run_import,"lex_ignore")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_ignore.py:20: Rule 't_ignore' must be defined as a string.\n"))
+
+ def test_lex_ignore2(self):
+ run_import("lex_ignore2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Warning. t_ignore contains a literal backslash '\\'\n"))
+
+ def test_lex_nowarn(self):
+ run_import("lex_nowarn")
+ self.assertEquals(sys.stderr.getvalue(),"")
+ self.assertEquals(sys.stdout.getvalue(),"")
+
+ def test_lex_re1(self):
+ self.assertRaises(SyntaxError,run_import,"lex_re1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Invalid regular expression for rule 't_NUMBER'. unbalanced parenthesis\n"))
+
+ def test_lex_re2(self):
+ self.assertRaises(SyntaxError,run_import,"lex_re2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Regular expression for rule 't_PLUS' matches empty string.\n"))
+
+ def test_lex_re3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_re3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Invalid regular expression for rule 't_POUND'. unbalanced parenthesis\n"
+ "lex: Make sure '#' in rule 't_POUND' is escaped with '\\#'.\n"))
+
+ def test_lex_rule1(self):
+ self.assertRaises(SyntaxError,run_import,"lex_rule1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: t_NUMBER not defined as a function or string\n"))
+
+ def test_lex_rule2(self):
+ self.assertRaises(SyntaxError,run_import,"lex_rule2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_rule2.py:18: Rule 't_NUMBER' requires an argument.\n"))
+
+ def test_lex_rule3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_rule3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex_rule3.py:18: Rule 't_NUMBER' has too many arguments.\n"))
+
+
+ def test_lex_state1(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: states must be defined as a tuple or list.\n"))
+
+ def test_lex_state2(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: invalid state specifier 'comment'. Must be a tuple (statename,'exclusive|inclusive')\n"
+ "lex: invalid state specifier 'example'. Must be a tuple (statename,'exclusive|inclusive')\n"))
+
+ def test_lex_state3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: state name 1 must be a string\n"
+ "lex: No rules defined for state 'example'\n"))
+
+ def test_lex_state4(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state4")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: state type for state comment must be 'inclusive' or 'exclusive'\n"))
+
+
+ def test_lex_state5(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state5")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: state 'comment' already defined.\n"))
+
+ def test_lex_state_noerror(self):
+ run_import("lex_state_noerror")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Warning. no error rule is defined for exclusive state 'comment'\n"))
+
+ def test_lex_state_norule(self):
+ self.assertRaises(SyntaxError,run_import,"lex_state_norule")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: No rules defined for state 'example'\n"))
+
+ def test_lex_token1(self):
+ try:
+ run_import("lex_token1")
+ except SyntaxError,e:
+ pass
+ self.assertEquals(str(e),"lex: module does not define 'tokens'")
+
+
+ def test_lex_token2(self):
+ try:
+ run_import("lex_token2")
+ except SyntaxError,e:
+ pass
+ self.assertEquals(str(e),"lex: tokens must be a list or tuple.")
+
+ def test_lex_token3(self):
+ self.assertRaises(SyntaxError,run_import,"lex_token3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Rule 't_MINUS' defined for an unspecified token MINUS.\n"))
+
+
+ def test_lex_token4(self):
+ self.assertRaises(SyntaxError,run_import,"lex_token4")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Bad token name '-'\n"))
+
+
+ def test_lex_token5(self):
+ try:
+ run_import("lex_token5")
+ except ply.lex.LexError,e:
+ pass
+ self.assert_(check_expected(str(e),"lex_token5.py:19: Rule 't_NUMBER' returned an unknown token type 'NUM'"))
+
+ def test_lex_token_dup(self):
+ run_import("lex_token_dup")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Warning. Token 'MINUS' multiply defined.\n"))
+
+
+ def test_lex_literal1(self):
+ self.assertRaises(SyntaxError,run_import,"lex_literal1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Invalid literal '**'. Must be a single character\n"))
+
+ def test_lex_literal2(self):
+ self.assertRaises(SyntaxError,run_import,"lex_literal2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "lex: Invalid literals specification. literals must be a sequence of characters.\n"))
+
+import os
+import subprocess
+import shutil
+
+# Tests related to various build options associated with lexers
+class LexBuildOptionTests(unittest.TestCase):
+ def setUp(self):
+ sys.stderr = StringIO.StringIO()
+ sys.stdout = StringIO.StringIO()
+ def tearDown(self):
+ sys.stderr = sys.__stderr__
+ sys.stdout = sys.__stdout__
+ try:
+ shutil.rmtree("lexdir")
+ except OSError:
+ pass
+
+ def test_lex_module(self):
+ run_import("lex_module")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+
+ def test_lex_object(self):
+ run_import("lex_object")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+
+ def test_lex_closure(self):
+ run_import("lex_closure")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ def test_lex_optimize(self):
+ try:
+ os.remove("lextab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("lextab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("lextab.pyo")
+ except OSError:
+ pass
+ run_import("lex_optimize")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lextab.py"))
+
+ p = subprocess.Popen(['python','-O','lex_optimize.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lextab.pyo"))
+ os.remove("lextab.pyo")
+ p = subprocess.Popen(['python','-OO','lex_optimize.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lextab.pyo"))
+ try:
+ os.remove("lextab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("lextab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("lextab.pyo")
+ except OSError:
+ pass
+
+ def test_lex_optimize2(self):
+ try:
+ os.remove("opt2tab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("opt2tab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("opt2tab.pyo")
+ except OSError:
+ pass
+ run_import("lex_optimize2")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("opt2tab.py"))
+
+ p = subprocess.Popen(['python','-O','lex_optimize2.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("opt2tab.pyo"))
+ os.remove("opt2tab.pyo")
+ p = subprocess.Popen(['python','-OO','lex_optimize2.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("opt2tab.pyo"))
+ try:
+ os.remove("opt2tab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("opt2tab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("opt2tab.pyo")
+ except OSError:
+ pass
+
+ def test_lex_optimize3(self):
+ try:
+ shutil.rmtree("lexdir")
+ except OSError:
+ pass
+
+ os.mkdir("lexdir")
+ os.mkdir("lexdir/sub")
+ open("lexdir/__init__.py","w").write("")
+ open("lexdir/sub/__init__.py","w").write("")
+ run_import("lex_optimize3")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lexdir/sub/calctab.py"))
+
+ p = subprocess.Popen(['python','-O','lex_optimize3.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lexdir/sub/calctab.pyo"))
+ os.remove("lexdir/sub/calctab.pyo")
+ p = subprocess.Popen(['python','-OO','lex_optimize3.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(PLUS,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("lexdir/sub/calctab.pyo"))
+ try:
+ shutil.rmtree("lexdir")
+ except OSError:
+ pass
+
+ def test_lex_opt_alias(self):
+ try:
+ os.remove("aliastab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("aliastab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("aliastab.pyo")
+ except OSError:
+ pass
+ run_import("lex_opt_alias")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(+,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("aliastab.py"))
+
+ p = subprocess.Popen(['python','-O','lex_opt_alias.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(+,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("aliastab.pyo"))
+ os.remove("aliastab.pyo")
+ p = subprocess.Popen(['python','-OO','lex_opt_alias.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(NUMBER,3,1,0)\n"
+ "(+,'+',1,1)\n"
+ "(NUMBER,4,1,2)\n"))
+ self.assert_(os.path.exists("aliastab.pyo"))
+ try:
+ os.remove("aliastab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("aliastab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("aliastab.pyo")
+ except OSError:
+ pass
+
+ def test_lex_many_tokens(self):
+ try:
+ os.remove("manytab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("manytab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("manytab.pyo")
+ except OSError:
+ pass
+ run_import("lex_many_tokens")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(TOK34,'TOK34:',1,0)\n"
+ "(TOK143,'TOK143:',1,7)\n"
+ "(TOK269,'TOK269:',1,15)\n"
+ "(TOK372,'TOK372:',1,23)\n"
+ "(TOK452,'TOK452:',1,31)\n"
+ "(TOK561,'TOK561:',1,39)\n"
+ "(TOK999,'TOK999:',1,47)\n"
+ ))
+
+ self.assert_(os.path.exists("manytab.py"))
+
+ p = subprocess.Popen(['python','-O','lex_many_tokens.py'],
+ stdout=subprocess.PIPE)
+ result = p.stdout.read()
+ self.assert_(check_expected(result,
+ "(TOK34,'TOK34:',1,0)\n"
+ "(TOK143,'TOK143:',1,7)\n"
+ "(TOK269,'TOK269:',1,15)\n"
+ "(TOK372,'TOK372:',1,23)\n"
+ "(TOK452,'TOK452:',1,31)\n"
+ "(TOK561,'TOK561:',1,39)\n"
+ "(TOK999,'TOK999:',1,47)\n"
+ ))
+
+ self.assert_(os.path.exists("manytab.pyo"))
+ os.remove("manytab.pyo")
+ try:
+ os.remove("manytab.py")
+ except OSError:
+ pass
+ try:
+ os.remove("manytab.pyc")
+ except OSError:
+ pass
+ try:
+ os.remove("manytab.pyo")
+ except OSError:
+ pass
+
+# Tests related to run-time behavior of lexers
+class LexRunTests(unittest.TestCase):
+ def setUp(self):
+ sys.stderr = StringIO.StringIO()
+ sys.stdout = StringIO.StringIO()
+ def tearDown(self):
+ sys.stderr = sys.__stderr__
+ sys.stdout = sys.__stdout__
+
+ def test_lex_hedit(self):
+ run_import("lex_hedit")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(H_EDIT_DESCRIPTOR,'abc',1,0)\n"
+ "(H_EDIT_DESCRIPTOR,'abcdefghij',1,6)\n"
+ "(H_EDIT_DESCRIPTOR,'xy',1,20)\n"))
+
+ def test_lex_state_try(self):
+ run_import("lex_state_try")
+ result = sys.stdout.getvalue()
+ self.assert_(check_expected(result,
+ "(NUMBER,'3',1,0)\n"
+ "(PLUS,'+',1,2)\n"
+ "(NUMBER,'4',1,4)\n"
+ "Entering comment state\n"
+ "comment body LexToken(body_part,'This is a comment */',1,9)\n"
+ "(PLUS,'+',1,30)\n"
+ "(NUMBER,'10',1,32)\n"
+ ))
+
+
+
+unittest.main()
diff --git a/test/testyacc.py b/test/testyacc.py
index f976ff5..4793986 100644
--- a/test/testyacc.py
+++ b/test/testyacc.py
@@ -1,58 +1,275 @@
-#!/usr/local/bin
-# ----------------------------------------------------------------------
# testyacc.py
-#
-# Run tests for the yacc module
-# ----------------------------------------------------------------------
-
-import sys,os,glob
-
-if len(sys.argv) < 2:
- print "Usage: python testyacc.py directory"
- raise SystemExit
-
-dirname = None
-make = 0
-
-for o in sys.argv[1:]:
- if o == '-make':
- make = 1
- else:
- dirname = o
- break
-
-if not dirname:
- print "Usage: python testyacc.py [-make] directory"
- raise SystemExit
-
-f = glob.glob("%s/%s" % (dirname,"yacc_*.py"))
-
-print "**** Running tests for yacc ****"
-
-for t in f:
- name = t[:-3]
- print "Testing %-32s" % name,
- os.system("rm -f %s/parsetab.*" % dirname)
- if make:
- if not os.path.exists("%s.exp" % name):
- os.system("python %s.py >%s.exp 2>&1" % (name,name))
- passed = 1
- else:
- os.system("python %s.py >%s.out 2>&1" % (name,name))
- a = os.system("diff %s.out %s.exp >%s.dif" % (name,name,name))
- if a == 0:
- passed = 1
- else:
- passed = 0
-
- if passed:
- print "Passed"
- else:
- print "Failed. See %s.dif" % name
+
+import unittest
+import StringIO
+import sys
+import os
+
+sys.path.insert(0,"..")
+sys.tracebacklimit = 0
+
+import ply.yacc
+
+def check_expected(result,expected):
+ resultlines = result.splitlines()
+ expectedlines = expected.splitlines()
+ if len(resultlines) != len(expectedlines):
+ return False
+ for rline,eline in zip(resultlines,expectedlines):
+ if not rline.endswith(eline):
+ return False
+ return True
+
+def run_import(module):
+ code = "import "+module
+ exec code
+ del sys.modules[module]
+
+# Tests related to errors and warnings when building parsers
+class YaccErrorWarningTests(unittest.TestCase):
+ def setUp(self):
+ sys.stderr = StringIO.StringIO()
+ sys.stdout = StringIO.StringIO()
+ try:
+ os.remove("parsetab.py")
+ os.remove("parsetab.pyc")
+ except OSError:
+ pass
+ def tearDown(self):
+ sys.stderr = sys.__stderr__
+ sys.stdout = sys.__stdout__
+ def test_yacc_badargs(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badargs")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_badargs.py:23: Rule 'p_statement_assign' has too many arguments.\n"
+ "yacc_badargs.py:27: Rule 'p_statement_expr' requires an argument.\n"
+ ))
+ def test_yacc_badid(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badid")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_badid.py:32: Illegal name 'bad&rule' in rule 'statement'\n"
+ "yacc_badid.py:36: Illegal rule name 'bad&rule'\n"
+ ))
+
+ def test_yacc_badprec(self):
+ try:
+ run_import("yacc_badprec")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "precedence must be a list or tuple."))
+ def test_yacc_badprec2(self):
+ run_import("yacc_badprec2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Invalid precedence table.\n"
+ "yacc: Generating LALR parsing table...\n"
+ "yacc: 8 shift/reduce conflicts\n"
+ ))
+
+ def test_yacc_badprec3(self):
+ run_import("yacc_badprec3")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Precedence already specified for terminal 'MINUS'\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
-
+ def test_yacc_badrule(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badrule")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_badrule.py:24: Syntax error. Expected ':'\n"
+ "yacc_badrule.py:28: Syntax error in rule 'statement'\n"
+ "yacc_badrule.py:33: Syntax error. Expected ':'\n"
+ "yacc_badrule.py:42: Syntax error. Expected ':'\n"
+ ))
-
+ def test_yacc_badtok(self):
+ try:
+ run_import("yacc_badtok")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "tokens must be a list or tuple."))
+
+ def test_yacc_dup(self):
+ run_import("yacc_dup")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_dup.py:27: Function p_statement redefined. Previously defined on line 23\n"
+ "yacc: Warning. Token 'EQUALS' defined, but not used.\n"
+ "yacc: Warning. There is 1 unused token.\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+ def test_yacc_error1(self):
+ try:
+ run_import("yacc_error1")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "yacc_error1.py:61: p_error() requires 1 argument."))
+
+ def test_yacc_error2(self):
+ try:
+ run_import("yacc_error2")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "yacc_error2.py:61: p_error() requires 1 argument."))
+
+ def test_yacc_error3(self):
+ try:
+ run_import("yacc_error3")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "'p_error' defined, but is not a function or method."))
+
+ def test_yacc_error4(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_error4")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_error4.py:62: Illegal rule name 'error'. Already defined as a token.\n"
+ ))
-
+ def test_yacc_inf(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_inf")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Warning. Token 'NUMBER' defined, but not used.\n"
+ "yacc: Warning. There is 1 unused token.\n"
+ "yacc: Infinite recursion detected for symbol 'statement'.\n"
+ "yacc: Infinite recursion detected for symbol 'expression'.\n"
+ ))
+ def test_yacc_literal(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_literal")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_literal.py:36: Literal token '**' in rule 'expression' may only be a single character\n"
+ ))
+ def test_yacc_misplaced(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_misplaced")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_misplaced.py:32: Misplaced '|'.\n"
+ ))
+
+ def test_yacc_missing1(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_missing1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_missing1.py:24: Symbol 'location' used, but not defined as a token or a rule.\n"
+ ))
+
+ def test_yacc_nodoc(self):
+ run_import("yacc_nodoc")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_nodoc.py:27: No documentation string specified in function 'p_statement_expr'\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+
+ def test_yacc_noerror(self):
+ run_import("yacc_noerror")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Warning. no p_error() function is defined.\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+
+ def test_yacc_nop(self):
+ run_import("yacc_nop")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_nop.py:27: Warning. Possible grammar rule 'statement_expr' defined without p_ prefix.\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+
+ def test_yacc_notfunc(self):
+ run_import("yacc_notfunc")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Warning. 'p_statement_assign' not defined as a function\n"
+ "yacc: Warning. Token 'EQUALS' defined, but not used.\n"
+ "yacc: Warning. There is 1 unused token.\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+ def test_yacc_notok(self):
+ try:
+ run_import("yacc_notok")
+ except ply.yacc.YaccError,e:
+ self.assert_(check_expected(str(e),
+ "module does not define a list 'tokens'"))
+
+ def test_yacc_rr(self):
+ run_import("yacc_rr")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Generating LALR parsing table...\n"
+ "yacc: 1 reduce/reduce conflict\n"
+ ))
+ def test_yacc_simple(self):
+ run_import("yacc_simple")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Generating LALR parsing table...\n"
+ ))
+ def test_yacc_sr(self):
+ run_import("yacc_sr")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Generating LALR parsing table...\n"
+ "yacc: 20 shift/reduce conflicts\n"
+ ))
+
+ def test_yacc_term1(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_term1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_term1.py:24: Illegal rule name 'NUMBER'. Already defined as a token.\n"
+ ))
+
+ def test_yacc_unused(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_unused")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_unused.py:62: Symbol 'COMMA' used, but not defined as a token or a rule.\n"
+ "yacc: Symbol 'COMMA' is unreachable.\n"
+ "yacc: Symbol 'exprlist' is unreachable.\n"
+ ))
+ def test_yacc_unused_rule(self):
+ run_import("yacc_unused_rule")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_unused_rule.py:62: Warning. Rule 'integer' defined, but not used.\n"
+ "yacc: Warning. There is 1 unused rule.\n"
+ "yacc: Symbol 'integer' is unreachable.\n"
+ "yacc: Generating LALR parsing table...\n"
+ ))
+
+ def test_yacc_uprec(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_uprec.py:37: Nothing known about the precedence of 'UMINUS'\n"
+ ))
+
+ def test_yacc_uprec2(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec2")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc_uprec2.py:37: Syntax error. Nothing follows %prec.\n"
+ ))
+
+ def test_yacc_prec1(self):
+ self.assertRaises(ply.yacc.YaccError,run_import,"yacc_prec1")
+ result = sys.stderr.getvalue()
+ self.assert_(check_expected(result,
+ "yacc: Precedence rule 'left' defined for unknown symbol '+'\n"
+ "yacc: Precedence rule 'left' defined for unknown symbol '*'\n"
+ "yacc: Precedence rule 'left' defined for unknown symbol '-'\n"
+ "yacc: Precedence rule 'left' defined for unknown symbol '/'\n"
+ ))
+
+
+
+unittest.main()
diff --git a/test/yacc_badargs.exp b/test/yacc_badargs.exp
deleted file mode 100644
index e994676..0000000
--- a/test/yacc_badargs.exp
+++ /dev/null
@@ -1,3 +0,0 @@
-./yacc_badargs.py:23: Rule 'p_statement_assign' has too many arguments.
-./yacc_badargs.py:27: Rule 'p_statement_expr' requires an argument.
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_badargs.py b/test/yacc_badargs.py
index 810e529..0a6fd86 100644
--- a/test/yacc_badargs.py
+++ b/test/yacc_badargs.py
@@ -36,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_badid.py b/test/yacc_badid.py
new file mode 100644
index 0000000..8a21932
--- /dev/null
+++ b/test/yacc_badid.py
@@ -0,0 +1,77 @@
+# -----------------------------------------------------------------------------
+# yacc_badid.py
+#
+# Attempt to define a rule with a bad-identifier name
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_statement_expr(t):
+ 'statement : bad&rule'
+ pass
+
+def p_badrule(t):
+ 'bad&rule : expression'
+ pass
+
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ pass
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_badprec.exp b/test/yacc_badprec.exp
deleted file mode 100644
index f4f574b..0000000
--- a/test/yacc_badprec.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: precedence must be a list or tuple.
diff --git a/test/yacc_badprec.py b/test/yacc_badprec.py
index 8f64652..ed2e3db 100644
--- a/test/yacc_badprec.py
+++ b/test/yacc_badprec.py
@@ -4,9 +4,8 @@
# Bad precedence specifier
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -33,7 +32,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_badprec2.exp b/test/yacc_badprec2.exp
deleted file mode 100644
index 8fac075..0000000
--- a/test/yacc_badprec2.exp
+++ /dev/null
@@ -1,3 +0,0 @@
-yacc: Invalid precedence table.
-yacc: Generating LALR parsing table...
-yacc: 8 shift/reduce conflicts
diff --git a/test/yacc_badprec2.py b/test/yacc_badprec2.py
index 206bda7..c1e2705 100644
--- a/test/yacc_badprec2.py
+++ b/test/yacc_badprec2.py
@@ -4,9 +4,8 @@
# Bad precedence
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_badprec3.py b/test/yacc_badprec3.py
new file mode 100644
index 0000000..5998dd0
--- /dev/null
+++ b/test/yacc_badprec3.py
@@ -0,0 +1,68 @@
+# -----------------------------------------------------------------------------
+# yacc_badprec3.py
+#
+# Bad precedence
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE','MINUS'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[3] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_badrule.exp b/test/yacc_badrule.exp
deleted file mode 100644
index a87bf7d..0000000
--- a/test/yacc_badrule.exp
+++ /dev/null
@@ -1,5 +0,0 @@
-./yacc_badrule.py:25: Syntax error. Expected ':'
-./yacc_badrule.py:29: Syntax error in rule 'statement'
-./yacc_badrule.py:34: Syntax error. Expected ':'
-./yacc_badrule.py:43: Syntax error. Expected ':'
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_badrule.py b/test/yacc_badrule.py
index f5fef8a..b873c8c 100644
--- a/test/yacc_badrule.py
+++ b/test/yacc_badrule.py
@@ -4,9 +4,8 @@
# Syntax problems in the rule strings
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression: MINUS expression %prec UMINUS'
diff --git a/test/yacc_badtok.exp b/test/yacc_badtok.exp
deleted file mode 100644
index ccdc0e7..0000000
--- a/test/yacc_badtok.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: tokens must be a list or tuple.
diff --git a/test/yacc_badtok.py b/test/yacc_badtok.py
index 4f2af51..3b6db6b 100644
--- a/test/yacc_badtok.py
+++ b/test/yacc_badtok.py
@@ -5,9 +5,7 @@
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
tokens = "Hello"
@@ -38,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_dup.exp b/test/yacc_dup.exp
deleted file mode 100644
index fdfb210..0000000
--- a/test/yacc_dup.exp
+++ /dev/null
@@ -1,4 +0,0 @@
-./yacc_dup.py:28: Function p_statement redefined. Previously defined on line 24
-yacc: Warning. Token 'EQUALS' defined, but not used.
-yacc: Warning. There is 1 unused token.
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_dup.py b/test/yacc_dup.py
index e0b683d..1d2fa7d 100644
--- a/test/yacc_dup.py
+++ b/test/yacc_dup.py
@@ -4,9 +4,8 @@
# Duplicated rule name
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_error1.exp b/test/yacc_error1.exp
deleted file mode 100644
index 13bed04..0000000
--- a/test/yacc_error1.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: ./yacc_error1.py:62: p_error() requires 1 argument.
diff --git a/test/yacc_error1.py b/test/yacc_error1.py
index 2768fc1..e14c729 100644
--- a/test/yacc_error1.py
+++ b/test/yacc_error1.py
@@ -4,9 +4,8 @@
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_error2.exp b/test/yacc_error2.exp
deleted file mode 100644
index 4a7628d..0000000
--- a/test/yacc_error2.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: ./yacc_error2.py:62: p_error() requires 1 argument.
diff --git a/test/yacc_error2.py b/test/yacc_error2.py
index 8f3a052..fed3e23 100644
--- a/test/yacc_error2.py
+++ b/test/yacc_error2.py
@@ -1,12 +1,11 @@
# -----------------------------------------------------------------------------
-# yacc_error1.py
+# yacc_error2.py
#
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_error3.exp b/test/yacc_error3.exp
deleted file mode 100644
index 7fca2fe..0000000
--- a/test/yacc_error3.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: 'p_error' defined, but is not a function or method.
diff --git a/test/yacc_error3.py b/test/yacc_error3.py
index b387de5..c54f980 100644
--- a/test/yacc_error3.py
+++ b/test/yacc_error3.py
@@ -1,12 +1,11 @@
# -----------------------------------------------------------------------------
-# yacc_error1.py
+# yacc_error3.py
#
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_error4.py b/test/yacc_error4.py
new file mode 100644
index 0000000..aebb01d
--- /dev/null
+++ b/test/yacc_error4.py
@@ -0,0 +1,72 @@
+# -----------------------------------------------------------------------------
+# yacc_error4.py
+#
+# Attempt to define a rule named 'error'
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error_handler(t):
+ 'error : NAME'
+ pass
+
+def p_error(t):
+ pass
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_inf.exp b/test/yacc_inf.exp
deleted file mode 100644
index 88cfa4a..0000000
--- a/test/yacc_inf.exp
+++ /dev/null
@@ -1,5 +0,0 @@
-yacc: Warning. Token 'NUMBER' defined, but not used.
-yacc: Warning. There is 1 unused token.
-yacc: Infinite recursion detected for symbol 'statement'.
-yacc: Infinite recursion detected for symbol 'expression'.
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_inf.py b/test/yacc_inf.py
index 9b9aef7..7c2aa21 100644
--- a/test/yacc_inf.py
+++ b/test/yacc_inf.py
@@ -4,9 +4,8 @@
# Infinite recursion
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_literal.py b/test/yacc_literal.py
new file mode 100644
index 0000000..915dd6e
--- /dev/null
+++ b/test/yacc_literal.py
@@ -0,0 +1,69 @@
+# -----------------------------------------------------------------------------
+# yacc_literal.py
+#
+# Grammar with bad literal characters
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','+','-'),
+ ('left','*','/'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression '+' expression
+ | expression '-' expression
+ | expression '*' expression
+ | expression '/' expression
+ | expression '**' expression '''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_misplaced.py b/test/yacc_misplaced.py
new file mode 100644
index 0000000..150f9c1
--- /dev/null
+++ b/test/yacc_misplaced.py
@@ -0,0 +1,68 @@
+# -----------------------------------------------------------------------------
+# yacc_misplaced.py
+#
+# A misplaced | in grammar rules
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ ''' | expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_missing1.exp b/test/yacc_missing1.exp
deleted file mode 100644
index de63d4f..0000000
--- a/test/yacc_missing1.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./yacc_missing1.py:25: Symbol 'location' used, but not defined as a token or a rule.
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_missing1.py b/test/yacc_missing1.py
index fbc54d8..2d0c9e6 100644
--- a/test/yacc_missing1.py
+++ b/test/yacc_missing1.py
@@ -4,9 +4,8 @@
# Grammar with a missing rule
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_nodoc.exp b/test/yacc_nodoc.exp
deleted file mode 100644
index 889ccfc..0000000
--- a/test/yacc_nodoc.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./yacc_nodoc.py:28: No documentation string specified in function 'p_statement_expr'
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_nodoc.py b/test/yacc_nodoc.py
index 4c5ab20..6b662a5 100644
--- a/test/yacc_nodoc.py
+++ b/test/yacc_nodoc.py
@@ -4,9 +4,8 @@
# Rule with a missing doc-string
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -36,7 +35,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_noerror.exp b/test/yacc_noerror.exp
deleted file mode 100644
index 3ae7712..0000000
--- a/test/yacc_noerror.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-yacc: Warning. no p_error() function is defined.
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_noerror.py b/test/yacc_noerror.py
index 9c11838..7da6309 100644
--- a/test/yacc_noerror.py
+++ b/test/yacc_noerror.py
@@ -4,9 +4,8 @@
# No p_error() rule defined.
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_nop.exp b/test/yacc_nop.exp
deleted file mode 100644
index 515fff7..0000000
--- a/test/yacc_nop.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./yacc_nop.py:28: Warning. Possible grammar rule 'statement_expr' defined without p_ prefix.
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_nop.py b/test/yacc_nop.py
index c0b431d..c6caa52 100644
--- a/test/yacc_nop.py
+++ b/test/yacc_nop.py
@@ -4,9 +4,8 @@
# Possible grammar rule defined without p_ prefix
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_notfunc.exp b/test/yacc_notfunc.exp
deleted file mode 100644
index f73bc93..0000000
--- a/test/yacc_notfunc.exp
+++ /dev/null
@@ -1,4 +0,0 @@
-yacc: Warning. 'p_statement_assign' not defined as a function
-yacc: Warning. Token 'EQUALS' defined, but not used.
-yacc: Warning. There is 1 unused token.
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_notfunc.py b/test/yacc_notfunc.py
index 8389355..384a267 100644
--- a/test/yacc_notfunc.py
+++ b/test/yacc_notfunc.py
@@ -4,9 +4,8 @@
# p_rule not defined as a function
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -35,7 +34,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_notok.exp b/test/yacc_notok.exp
deleted file mode 100644
index d2399fe..0000000
--- a/test/yacc_notok.exp
+++ /dev/null
@@ -1 +0,0 @@
-ply.yacc.YaccError: module does not define a list 'tokens'
diff --git a/test/yacc_notok.py b/test/yacc_notok.py
index e566a1b..d4f901a 100644
--- a/test/yacc_notok.py
+++ b/test/yacc_notok.py
@@ -5,9 +5,8 @@
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
# Parsing rules
@@ -36,7 +35,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_prec1.py b/test/yacc_prec1.py
new file mode 100644
index 0000000..6d7ad98
--- /dev/null
+++ b/test/yacc_prec1.py
@@ -0,0 +1,68 @@
+# -----------------------------------------------------------------------------
+# yacc_prec1.py
+#
+# Tests case where precedence specifier doesn't match up to terminals
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','+','-'),
+ ('left','*','/'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_rr.exp b/test/yacc_rr.exp
deleted file mode 100644
index f73cefd..0000000
--- a/test/yacc_rr.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-yacc: Generating LALR parsing table...
-yacc: 1 reduce/reduce conflict
diff --git a/test/yacc_rr.py b/test/yacc_rr.py
index bb8cba2..86c70b7 100644
--- a/test/yacc_rr.py
+++ b/test/yacc_rr.py
@@ -4,9 +4,8 @@
# A grammar with a reduce/reduce conflict
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -41,7 +40,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_simple.exp b/test/yacc_simple.exp
deleted file mode 100644
index 3836031..0000000
--- a/test/yacc_simple.exp
+++ /dev/null
@@ -1 +0,0 @@
-yacc: Generating LALR parsing table...
diff --git a/test/yacc_simple.py b/test/yacc_simple.py
index b5dc9f3..7ff336c 100644
--- a/test/yacc_simple.py
+++ b/test/yacc_simple.py
@@ -4,9 +4,8 @@
# A simple, properly specifier grammar
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_sr.exp b/test/yacc_sr.exp
deleted file mode 100644
index 1b76450..0000000
--- a/test/yacc_sr.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-yacc: Generating LALR parsing table...
-yacc: 20 shift/reduce conflicts
diff --git a/test/yacc_sr.py b/test/yacc_sr.py
index e2f03ec..055efe3 100644
--- a/test/yacc_sr.py
+++ b/test/yacc_sr.py
@@ -4,9 +4,8 @@
# A grammar with shift-reduce conflicts
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -32,7 +31,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression'
diff --git a/test/yacc_term1.exp b/test/yacc_term1.exp
deleted file mode 100644
index 40f9bdf..0000000
--- a/test/yacc_term1.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./yacc_term1.py:25: Illegal rule name 'NUMBER'. Already defined as a token.
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_term1.py b/test/yacc_term1.py
index bbc52da..c0756dd 100644
--- a/test/yacc_term1.py
+++ b/test/yacc_term1.py
@@ -4,9 +4,8 @@
# Terminal used on the left-hand-side
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_unused.exp b/test/yacc_unused.exp
deleted file mode 100644
index 6caafd2..0000000
--- a/test/yacc_unused.exp
+++ /dev/null
@@ -1,4 +0,0 @@
-./yacc_unused.py:63: Symbol 'COMMA' used, but not defined as a token or a rule.
-yacc: Symbol 'COMMA' is unreachable.
-yacc: Symbol 'exprlist' is unreachable.
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_unused.py b/test/yacc_unused.py
index 3a61f99..b51a7f1 100644
--- a/test/yacc_unused.py
+++ b/test/yacc_unused.py
@@ -4,9 +4,8 @@
# A grammar with an unused rule
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -37,7 +36,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_unused_rule.py b/test/yacc_unused_rule.py
new file mode 100644
index 0000000..4a336db
--- /dev/null
+++ b/test/yacc_unused_rule.py
@@ -0,0 +1,72 @@
+# -----------------------------------------------------------------------------
+# yacc_unused_rule.py
+#
+# Grammar with an unused rule
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+precedence = (
+ ('left','PLUS','MINUS'),
+ ('left','TIMES','DIVIDE'),
+ ('right','UMINUS'),
+ )
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec UMINUS'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_integer(t):
+ 'integer : NUMBER'
+ t[0] = t[1]
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+
diff --git a/test/yacc_uprec.exp b/test/yacc_uprec.exp
deleted file mode 100644
index eb9a398..0000000
--- a/test/yacc_uprec.exp
+++ /dev/null
@@ -1,2 +0,0 @@
-./yacc_uprec.py:38: Nothing known about the precedence of 'UMINUS'
-ply.yacc.YaccError: Unable to construct parser.
diff --git a/test/yacc_uprec.py b/test/yacc_uprec.py
index 0e8711e..3702f6f 100644
--- a/test/yacc_uprec.py
+++ b/test/yacc_uprec.py
@@ -4,9 +4,8 @@
# A grammar with a bad %prec specifier
# -----------------------------------------------------------------------------
import sys
-sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
@@ -32,7 +31,7 @@ def p_expression_binop(t):
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
- elif t[3] == '/': t[0] = t[1] / t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
diff --git a/test/yacc_uprec2.py b/test/yacc_uprec2.py
new file mode 100644
index 0000000..d7b1f0d
--- /dev/null
+++ b/test/yacc_uprec2.py
@@ -0,0 +1,63 @@
+# -----------------------------------------------------------------------------
+# yacc_uprec2.py
+#
+# A grammar with a bad %prec specifier
+# -----------------------------------------------------------------------------
+import sys
+
+if ".." not in sys.path: sys.path.insert(0,"..")
+import ply.yacc as yacc
+
+from calclex import tokens
+
+# Parsing rules
+
+# dictionary of names
+names = { }
+
+def p_statement_assign(t):
+ 'statement : NAME EQUALS expression'
+ names[t[1]] = t[3]
+
+def p_statement_expr(t):
+ 'statement : expression'
+ print t[1]
+
+def p_expression_binop(t):
+ '''expression : expression PLUS expression
+ | expression MINUS expression
+ | expression TIMES expression
+ | expression DIVIDE expression'''
+ if t[2] == '+' : t[0] = t[1] + t[3]
+ elif t[2] == '-': t[0] = t[1] - t[3]
+ elif t[2] == '*': t[0] = t[1] * t[3]
+ elif t[2] == '/': t[0] = t[1] / t[3]
+
+def p_expression_uminus(t):
+ 'expression : MINUS expression %prec'
+ t[0] = -t[2]
+
+def p_expression_group(t):
+ 'expression : LPAREN expression RPAREN'
+ t[0] = t[2]
+
+def p_expression_number(t):
+ 'expression : NUMBER'
+ t[0] = t[1]
+
+def p_expression_name(t):
+ 'expression : NAME'
+ try:
+ t[0] = names[t[1]]
+ except LookupError:
+ print "Undefined name '%s'" % t[1]
+ t[0] = 0
+
+def p_error(t):
+ print "Syntax error at '%s'" % t.value
+
+yacc.yacc()
+
+
+
+