summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES7
-rw-r--r--MANIFEST.in4
-rw-r--r--Makefile26
-rw-r--r--README.md14
-rw-r--r--doc/ply.md8
-rw-r--r--example/BASIC/basic.py3
-rw-r--r--example/BASIC/basiclog.py2
-rw-r--r--example/GardenSnake/GardenSnake.py773
-rw-r--r--example/GardenSnake/README5
-rw-r--r--example/ansic/clex.py3
-rw-r--r--example/ansic/cparse.py1
-rw-r--r--example/calc/calc.py3
-rw-r--r--example/calcdebug/calc.py3
-rw-r--r--example/calceof/calc.py3
-rwxr-xr-xexample/classcalc/calc.py3
-rw-r--r--example/closurecalc/calc.py3
-rw-r--r--example/yply/ylex.py3
-rwxr-xr-xexample/yply/yply.py1
-rw-r--r--ply-2022_01_02.tar.gzbin49761 -> 0 bytes
-rw-r--r--ply-tests.tar.gzbin14317 -> 0 bytes
-rw-r--r--pyproject.toml3
-rw-r--r--setup.cfg18
-rw-r--r--src/ply/__init__.py (renamed from ply/__init__.py)2
-rw-r--r--src/ply/lex.py (renamed from ply/lex.py)0
-rw-r--r--src/ply/yacc.py (renamed from ply/yacc.py)0
-rw-r--r--test/testcpp.py153
-rw-r--r--tests/README (renamed from test/README)0
-rw-r--r--tests/calclex.py (renamed from test/calclex.py)3
-rwxr-xr-xtests/cleanup.sh (renamed from test/cleanup.sh)0
-rw-r--r--tests/lex_closure.py (renamed from test/lex_closure.py)3
-rw-r--r--tests/lex_doc1.py (renamed from test/lex_doc1.py)3
-rw-r--r--tests/lex_dup1.py (renamed from test/lex_dup1.py)3
-rw-r--r--tests/lex_dup2.py (renamed from test/lex_dup2.py)3
-rw-r--r--tests/lex_dup3.py (renamed from test/lex_dup3.py)3
-rw-r--r--tests/lex_empty.py (renamed from test/lex_empty.py)3
-rw-r--r--tests/lex_error1.py (renamed from test/lex_error1.py)3
-rw-r--r--tests/lex_error2.py (renamed from test/lex_error2.py)3
-rw-r--r--tests/lex_error3.py (renamed from test/lex_error3.py)3
-rw-r--r--tests/lex_error4.py (renamed from test/lex_error4.py)3
-rw-r--r--tests/lex_hedit.py (renamed from test/lex_hedit.py)2
-rw-r--r--tests/lex_ignore.py (renamed from test/lex_ignore.py)3
-rw-r--r--tests/lex_ignore2.py (renamed from test/lex_ignore2.py)3
-rw-r--r--tests/lex_literal1.py (renamed from test/lex_literal1.py)3
-rw-r--r--tests/lex_literal2.py (renamed from test/lex_literal2.py)3
-rw-r--r--tests/lex_literal3.py (renamed from test/lex_literal3.py)3
-rw-r--r--tests/lex_many_tokens.py (renamed from test/lex_many_tokens.py)2
-rw-r--r--tests/lex_module.py (renamed from test/lex_module.py)3
-rw-r--r--tests/lex_module_import.py (renamed from test/lex_module_import.py)0
-rw-r--r--tests/lex_object.py (renamed from test/lex_object.py)2
-rw-r--r--tests/lex_re1.py (renamed from test/lex_re1.py)3
-rw-r--r--tests/lex_re2.py (renamed from test/lex_re2.py)3
-rw-r--r--tests/lex_re3.py (renamed from test/lex_re3.py)3
-rw-r--r--tests/lex_rule1.py (renamed from test/lex_rule1.py)3
-rw-r--r--tests/lex_rule2.py (renamed from test/lex_rule2.py)3
-rw-r--r--tests/lex_rule3.py (renamed from test/lex_rule3.py)3
-rw-r--r--tests/lex_state1.py (renamed from test/lex_state1.py)3
-rw-r--r--tests/lex_state2.py (renamed from test/lex_state2.py)3
-rw-r--r--tests/lex_state3.py (renamed from test/lex_state3.py)3
-rw-r--r--tests/lex_state4.py (renamed from test/lex_state4.py)3
-rw-r--r--tests/lex_state5.py (renamed from test/lex_state5.py)3
-rw-r--r--tests/lex_state_noerror.py (renamed from test/lex_state_noerror.py)3
-rw-r--r--tests/lex_state_norule.py (renamed from test/lex_state_norule.py)3
-rw-r--r--tests/lex_state_try.py (renamed from test/lex_state_try.py)3
-rw-r--r--tests/lex_token1.py (renamed from test/lex_token1.py)3
-rw-r--r--tests/lex_token2.py (renamed from test/lex_token2.py)3
-rw-r--r--tests/lex_token3.py (renamed from test/lex_token3.py)3
-rw-r--r--tests/lex_token4.py (renamed from test/lex_token4.py)3
-rw-r--r--tests/lex_token_dup.py (renamed from test/lex_token_dup.py)3
-rw-r--r--tests/test_cpp_nonascii.c (renamed from test/test_cpp_nonascii.c)0
-rwxr-xr-xtests/testlex.py (renamed from test/testlex.py)19
-rw-r--r--tests/testyacc.py (renamed from test/testyacc.py)41
-rw-r--r--tests/yacc_badargs.py (renamed from test/yacc_badargs.py)2
-rw-r--r--tests/yacc_badid.py (renamed from test/yacc_badid.py)3
-rw-r--r--tests/yacc_badprec.py (renamed from test/yacc_badprec.py)3
-rw-r--r--tests/yacc_badprec2.py (renamed from test/yacc_badprec2.py)3
-rw-r--r--tests/yacc_badprec3.py (renamed from test/yacc_badprec3.py)3
-rw-r--r--tests/yacc_badrule.py (renamed from test/yacc_badrule.py)3
-rw-r--r--tests/yacc_badtok.py (renamed from test/yacc_badtok.py)3
-rw-r--r--tests/yacc_dup.py (renamed from test/yacc_dup.py)3
-rw-r--r--tests/yacc_error1.py (renamed from test/yacc_error1.py)3
-rw-r--r--tests/yacc_error2.py (renamed from test/yacc_error2.py)3
-rw-r--r--tests/yacc_error3.py (renamed from test/yacc_error3.py)3
-rw-r--r--tests/yacc_error4.py (renamed from test/yacc_error4.py)3
-rw-r--r--tests/yacc_error5.py (renamed from test/yacc_error5.py)3
-rw-r--r--tests/yacc_error6.py (renamed from test/yacc_error6.py)3
-rw-r--r--tests/yacc_error7.py (renamed from test/yacc_error7.py)3
-rw-r--r--tests/yacc_inf.py (renamed from test/yacc_inf.py)3
-rw-r--r--tests/yacc_literal.py (renamed from test/yacc_literal.py)3
-rw-r--r--tests/yacc_misplaced.py (renamed from test/yacc_misplaced.py)3
-rw-r--r--tests/yacc_missing1.py (renamed from test/yacc_missing1.py)3
-rw-r--r--tests/yacc_nested.py (renamed from test/yacc_nested.py)3
-rw-r--r--tests/yacc_nodoc.py (renamed from test/yacc_nodoc.py)3
-rw-r--r--tests/yacc_noerror.py (renamed from test/yacc_noerror.py)3
-rw-r--r--tests/yacc_nop.py (renamed from test/yacc_nop.py)3
-rw-r--r--tests/yacc_notfunc.py (renamed from test/yacc_notfunc.py)3
-rw-r--r--tests/yacc_notok.py (renamed from test/yacc_notok.py)4
-rw-r--r--tests/yacc_prec1.py (renamed from test/yacc_prec1.py)3
-rw-r--r--tests/yacc_rr.py (renamed from test/yacc_rr.py)3
-rw-r--r--tests/yacc_rr_unused.py (renamed from test/yacc_rr_unused.py)3
-rw-r--r--tests/yacc_simple.py (renamed from test/yacc_simple.py)3
-rw-r--r--tests/yacc_sr.py (renamed from test/yacc_sr.py)3
-rw-r--r--tests/yacc_term1.py (renamed from test/yacc_term1.py)3
-rw-r--r--tests/yacc_unicode_literals.py (renamed from test/yacc_unicode_literals.py)3
-rw-r--r--tests/yacc_unused.py (renamed from test/yacc_unused.py)3
-rw-r--r--tests/yacc_unused_rule.py (renamed from test/yacc_unused_rule.py)3
-rw-r--r--tests/yacc_uprec.py (renamed from test/yacc_uprec.py)3
-rw-r--r--tests/yacc_uprec2.py (renamed from test/yacc_uprec2.py)3
107 files changed, 880 insertions, 443 deletions
diff --git a/CHANGES b/CHANGES
index aefb2f9..0a88915 100644
--- a/CHANGES
+++ b/CHANGES
@@ -6,6 +6,13 @@ maintained as a mature library. No new major features are planned, but
issues reported for bugs are still welcome. Any changes to the
software will be noted here.
+Version 2022.10.27
+------------------
+10/27/22 Reoganization/modernization of the build process. PLY continues
+ to make no package-installable releases. However, you can now
+ use the Makefile to build artifacts installable via pip.
+ Use `make test` to test and `make build` to build.
+
Version 2022_01_02
------------------
12/12/21 PLY is no longer being developed in public. Instead
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..cfcf771
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include Makefile CONTRIBUTING.md
+recursive-include example *
+recursive-include tests *
+recursive-include docs *
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..683f97c
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,26 @@
+PYTHON=python3
+VENV=.venv
+
+# Setup and install all of the required tools for building, testing,
+# and deploying
+setup::
+ rm -rf $(VENV)
+ $(PYTHON) -m venv $(VENV)
+ ./$(VENV)/bin/python -m pip install pytest
+ ./$(VENV)/bin/python -m pip install pytest-cov
+ ./$(VENV)/bin/python -m pip install build
+ ./$(VENV)/bin/python -m pip install twine
+
+# Run unit tests
+test::
+ ./$(VENV)/bin/python -m pip install .
+ ./$(VENV)/bin/python tests/testlex.py
+ ./$(VENV)/bin/python tests/testyacc.py
+
+# Build an artifact suitable for installing with pip
+build::
+ ./$(VENV)/bin/python -m build
+
+# Install into the default Python
+install::
+ $(PYTHON) -m pip install .
diff --git a/README.md b/README.md
index cb48f59..1c10842 100644
--- a/README.md
+++ b/README.md
@@ -18,14 +18,17 @@ flexibility in terms of how you decide to use it. You can use PLY to
build Abstract Syntax Trees (ASTs), simple one-pass compilers,
protocol decoders, or even a more advanced parsing framework.
-## Download
+## Important Notice - October 27, 2022
-* [Current Release (ply-2022_01_02)](https://github.com/dabeaz/ply/raw/master/ply-2022_01_02.tar.gz)
-* [Historial Releases](https://github.com/dabeaz/archive/tree/main/ply)
+The PLY project will make no further package-installable releases.
+If you want the latest version, you'll need to download it here
+or clone the repo.
+
+## Requirements
The current release of PLY requires the use of Python 3.6 or
greater. If you need to support an older version, download one of the
-historical releases.
+historical releases at https://github.com/dabeaz/archive/tree/main/ply.
## How to Install and Use
@@ -33,7 +36,8 @@ Although PLY is open-source, it is not distributed or installed by
package manager. There are only two files: `lex.py` and `yacc.py`,
both of which are contained in a `ply` package directory. To use PLY,
copy the `ply` directory into your project and import `lex` and `yacc`
-from the associated `ply` subpackage.
+from the associated `ply` subpackage. Alternatively, you can install
+these files into your working python using `make install`.
```python
from .ply import lex
diff --git a/doc/ply.md b/doc/ply.md
index 37c42af..a7d72c1 100644
--- a/doc/ply.md
+++ b/doc/ply.md
@@ -477,13 +477,13 @@ parse. Here is an example of how this works:
# Get more input (Example)
more = input('... ')
if more:
- self.lexer.input(more)
- return self.lexer.token()
+ t.lexer.input(more)
+ return t.lexer.token()
return None
The EOF function should return the next available token (by calling
-`self.lexer.token())` or `None` to indicate no more data. Be aware that
-setting more input with the `self.lexer.input()` method does NOT reset
+`t.lexer.token())` or `None` to indicate no more data. Be aware that
+setting more input with the `t.lexer.input()` method does NOT reset
the lexer state or the `lineno` attribute used for position tracking.
The `lexpos` attribute is reset so be aware of that if you\'re using it
in error reporting.
diff --git a/example/BASIC/basic.py b/example/BASIC/basic.py
index 8a8a500..a8a0ac9 100644
--- a/example/BASIC/basic.py
+++ b/example/BASIC/basic.py
@@ -1,9 +1,6 @@
# An implementation of Dartmouth BASIC (1964)
#
-import sys
-sys.path.insert(0, "../..")
-
import basiclex
import basparse
import basinterp
diff --git a/example/BASIC/basiclog.py b/example/BASIC/basiclog.py
index 9258e29..2463bc4 100644
--- a/example/BASIC/basiclog.py
+++ b/example/BASIC/basiclog.py
@@ -2,8 +2,6 @@
#
import sys
-sys.path.insert(0, "../..")
-
if sys.version_info[0] >= 3:
raw_input = input
diff --git a/example/GardenSnake/GardenSnake.py b/example/GardenSnake/GardenSnake.py
new file mode 100644
index 0000000..0633f87
--- /dev/null
+++ b/example/GardenSnake/GardenSnake.py
@@ -0,0 +1,773 @@
+# GardenSnake - a parser generator demonstration program
+#
+# This implements a modified version of a subset of Python:
+# - only 'def', 'return' and 'if' statements
+# - 'if' only has 'then' clause (no elif nor else)
+# - single-quoted strings only, content in raw format
+# - numbers are decimal.Decimal instances (not integers or floats)
+# - no print statment; use the built-in 'print' function
+# - only < > == + - / * implemented (and unary + -)
+# - assignment and tuple assignment work
+# - no generators of any sort
+# - no ... well, no quite a lot
+
+# Why? I'm thinking about a new indentation-based configuration
+# language for a project and wanted to figure out how to do it. Once
+# I got that working I needed a way to test it out. My original AST
+# was dumb so I decided to target Python's AST and compile it into
+# Python code. Plus, it's pretty cool that it only took a day or so
+# from sitting down with Ply to having working code.
+
+# This uses David Beazley's Ply from http://www.dabeaz.com/ply/
+
+# This work is hereby released into the Public Domain. To view a copy of
+# the public domain dedication, visit
+# http://creativecommons.org/licenses/publicdomain/ or send a letter to
+# Creative Commons, 543 Howard Street, 5th Floor, San Francisco,
+# California, 94105, USA.
+#
+# Portions of this work are derived from Python's Grammar definition
+# and may be covered under the Python copyright and license
+#
+# Andrew Dalke / Dalke Scientific Software, LLC
+# 30 August 2006 / Cape Town, South Africa
+#
+# Lolbot Iichan / Moonworks
+# 12 July 2020 / Moscow, Russia
+
+# Changelog:
+# 30 August - added link to CC license; removed the "swapcase" encoding
+# 12 July - ported to Python 3; fixed whitespace in some regex
+
+# Modifications for inclusion in PLY distribution
+import sys
+sys.path.insert(0, "../..")
+from ply import *
+
+##### Lexer ######
+#import lex
+import decimal
+
+tokens = (
+ 'DEF',
+ 'IF',
+ 'NAME',
+ 'NUMBER', # Python decimals
+ 'STRING', # single quoted strings only; syntax of raw strings
+ 'LPAR',
+ 'RPAR',
+ 'COLON',
+ 'EQ',
+ 'ASSIGN',
+ 'LT',
+ 'GT',
+ 'PLUS',
+ 'MINUS',
+ 'MULT',
+ 'DIV',
+ 'RETURN',
+ 'WS',
+ 'NEWLINE',
+ 'COMMA',
+ 'SEMICOLON',
+ 'INDENT',
+ 'DEDENT',
+ 'ENDMARKER',
+)
+
+#t_NUMBER = r'\d+'
+# taken from decmial.py but without the leading sign
+
+
+def t_NUMBER(t):
+ r"""(\d+(\.\d*)?|\.\d+)([eE][-+]?\d+)?"""
+ t.value = decimal.Decimal(t.value)
+ if t.value == int(t.value):
+ t.value = int(t.value)
+ else:
+ t.value = float(t.value)
+ return t
+
+
+def t_STRING(t):
+ r"'([^\\']+|\\'|\\\\)*'" # I think this is right ...
+ t.value = t.value[1:-1].encode().decode("unicode_escape") # .swapcase() # for fun
+ return t
+
+t_COLON = r':'
+t_EQ = r'=='
+t_ASSIGN = r'='
+t_LT = r'<'
+t_GT = r'>'
+t_PLUS = r'\+'
+t_MINUS = r'-'
+t_MULT = r'\*'
+t_DIV = r'/'
+t_COMMA = r','
+t_SEMICOLON = r';'
+
+# Ply nicely documented how to do this.
+
+RESERVED = {
+ "def": "DEF",
+ "if": "IF",
+ "return": "RETURN",
+}
+
+
+def t_NAME(t):
+ r'[a-zA-Z_][a-zA-Z0-9_]*'
+ t.type = RESERVED.get(t.value, "NAME")
+ return t
+
+# Putting this before t_WS let it consume lines with only comments in
+# them so the latter code never sees the WS part. Not consuming the
+# newline. Needed for "if 1: #comment"
+
+
+def t_comment(t):
+ r"[ ]*\043[^\n]*" # \043 is '#'
+ pass
+
+
+# Whitespace
+def t_WS(t):
+ r'[ ]+'
+ if t.lexer.at_line_start and t.lexer.paren_count == 0:
+ return t
+
+# Don't generate newline tokens when inside of parenthesis, eg
+# a = (1,
+# 2, 3)
+
+
+def t_newline(t):
+ r'\n+'
+ t.lexer.lineno += len(t.value)
+ t.type = "NEWLINE"
+ if t.lexer.paren_count == 0:
+ return t
+
+
+def t_LPAR(t):
+ r'\('
+ t.lexer.paren_count += 1
+ return t
+
+
+def t_RPAR(t):
+ r'\)'
+ # check for underflow? should be the job of the parser
+ t.lexer.paren_count -= 1
+ return t
+
+
+def t_error(t):
+ raise SyntaxError("Unknown symbol %r" % (t.value[0],))
+ print("Skipping", repr(t.value[0]))
+ t.lexer.skip(1)
+
+# I implemented INDENT / DEDENT generation as a post-processing filter
+
+# The original lex token stream contains WS and NEWLINE characters.
+# WS will only occur before any other tokens on a line.
+
+# I have three filters. One tags tokens by adding two attributes.
+# "must_indent" is True if the token must be indented from the
+# previous code. The other is "at_line_start" which is True for WS
+# and the first non-WS/non-NEWLINE on a line. It flags the check so
+# see if the new line has changed indication level.
+
+# Python's syntax has three INDENT states
+# 0) no colon hence no need to indent
+# 1) "if 1: go()" - simple statements have a COLON but no need for an indent
+# 2) "if 1:\n go()" - complex statements have a COLON NEWLINE and must indent
+NO_INDENT = 0
+MAY_INDENT = 1
+MUST_INDENT = 2
+
+# only care about whitespace at the start of a line
+
+
+def track_tokens_filter(lexer, tokens):
+ lexer.at_line_start = at_line_start = True
+ indent = NO_INDENT
+ saw_colon = False
+ for token in tokens:
+ token.at_line_start = at_line_start
+
+ if token.type == "COLON":
+ at_line_start = False
+ indent = MAY_INDENT
+ token.must_indent = False
+
+ elif token.type == "NEWLINE":
+ at_line_start = True
+ if indent == MAY_INDENT:
+ indent = MUST_INDENT
+ token.must_indent = False
+
+ elif token.type == "WS":
+ assert token.at_line_start == True
+ at_line_start = True
+ token.must_indent = False
+
+ else:
+ # A real token; only indent after COLON NEWLINE
+ if indent == MUST_INDENT:
+ token.must_indent = True
+ else:
+ token.must_indent = False
+ at_line_start = False
+ indent = NO_INDENT
+
+ yield token
+ lexer.at_line_start = at_line_start
+
+
+def _new_token(type, lineno):
+ tok = lex.LexToken()
+ tok.type = type
+ tok.value = None
+ tok.lineno = lineno
+ tok.lexpos = 0
+ return tok
+
+# Synthesize a DEDENT tag
+
+
+def DEDENT(lineno):
+ return _new_token("DEDENT", lineno)
+
+# Synthesize an INDENT tag
+
+
+def INDENT(lineno):
+ return _new_token("INDENT", lineno)
+
+
+# Track the indentation level and emit the right INDENT / DEDENT events.
+def indentation_filter(tokens):
+ # A stack of indentation levels; will never pop item 0
+ levels = [0]
+ token = None
+ depth = 0
+ prev_was_ws = False
+ for token in tokens:
+ # if 1:
+ # print "Process", token,
+ # if token.at_line_start:
+ # print "at_line_start",
+ # if token.must_indent:
+ # print "must_indent",
+ # print
+
+ # WS only occurs at the start of the line
+ # There may be WS followed by NEWLINE so
+ # only track the depth here. Don't indent/dedent
+ # until there's something real.
+ if token.type == "WS":
+ assert depth == 0
+ depth = len(token.value)
+ prev_was_ws = True
+ # WS tokens are never passed to the parser
+ continue
+
+ if token.type == "NEWLINE":
+ depth = 0
+ if prev_was_ws or token.at_line_start:
+ # ignore blank lines
+ continue
+ # pass the other cases on through
+ yield token
+ continue
+
+ # then it must be a real token (not WS, not NEWLINE)
+ # which can affect the indentation level
+
+ prev_was_ws = False
+ if token.must_indent:
+ # The current depth must be larger than the previous level
+ if not (depth > levels[-1]):
+ raise IndentationError("expected an indented block")
+
+ levels.append(depth)
+ yield INDENT(token.lineno)
+
+ elif token.at_line_start:
+ # Must be on the same level or one of the previous levels
+ if depth == levels[-1]:
+ # At the same level
+ pass
+ elif depth > levels[-1]:
+ raise IndentationError(
+ "indentation increase but not in new block")
+ else:
+ # Back up; but only if it matches a previous level
+ try:
+ i = levels.index(depth)
+ except ValueError:
+ raise IndentationError("inconsistent indentation")
+ for _ in range(i + 1, len(levels)):
+ yield DEDENT(token.lineno)
+ levels.pop()
+
+ yield token
+
+ ### Finished processing ###
+
+ # Must dedent any remaining levels
+ if len(levels) > 1:
+ assert token is not None
+ for _ in range(1, len(levels)):
+ yield DEDENT(token.lineno)
+
+
+# The top-level filter adds an ENDMARKER, if requested.
+# Python's grammar uses it.
+def filter(lexer, add_endmarker=True):
+ token = None
+ tokens = iter(lexer.token, None)
+ tokens = track_tokens_filter(lexer, tokens)
+ for token in indentation_filter(tokens):
+ yield token
+
+ if add_endmarker:
+ lineno = 1
+ if token is not None:
+ lineno = token.lineno
+ yield _new_token("ENDMARKER", lineno)
+
+# Combine Ply and my filters into a new lexer
+
+
+class IndentLexer(object):
+
+ def __init__(self, debug=0, reflags=0):
+ self.lexer = lex.lex(debug=debug, reflags=reflags)
+ self.token_stream = None
+
+ def input(self, s, add_endmarker=True):
+ self.lexer.paren_count = 0
+ self.lexer.input(s)
+ self.token_stream = filter(self.lexer, add_endmarker)
+
+ def token(self):
+ try:
+ return next(self.token_stream)
+ except StopIteration:
+ return None
+
+########## Parser (tokens -> AST) ######
+
+# also part of Ply
+#import yacc
+
+# I use the Python AST
+import ast
+
+# Helper function
+
+
+def Assign(left, right):
+ names = []
+ if isinstance(left, ast.Name):
+ # Single assignment on left
+ return ast.Assign([ast.Name(left.id, ctx=ast.Store())], right)
+ elif isinstance(left, ast.Tuple):
+ # List of things - make sure they are Name nodes
+ names = []
+ for child in left.elts:
+ if not isinstance(child, ast.Name):
+ raise SyntaxError("that assignment not supported")
+ names.append(child.id)
+ ass_list = [ast.Name(name, ctx=ast.Store()) for name in names]
+ return ast.Assign([ast.Tuple(ass_list, ctx=ast.Store())], right)
+ else:
+ raise SyntaxError("Can't do that yet")
+
+
+# The grammar comments come from Python's Grammar/Grammar file
+
+# NB: compound_stmt in single_input is followed by extra NEWLINE!
+# file_input: (NEWLINE | stmt)* ENDMARKER
+def p_file_input_end(p):
+ """file_input_end : file_input ENDMARKER"""
+ p[0] = p[1]
+
+
+def p_file_input(p):
+ """file_input : file_input NEWLINE
+ | file_input stmt
+ | NEWLINE
+ | stmt"""
+ if isinstance(p[len(p) - 1], str):
+ if len(p) == 3:
+ p[0] = p[1]
+ else:
+ p[0] = [] # p == 2 --> only a blank line
+ else:
+ if len(p) == 3:
+ p[0] = p[1] + p[2]
+ else:
+ p[0] = p[1]
+
+
+# funcdef: [decorators] 'def' NAME parameters ':' suite
+# ignoring decorators
+def p_funcdef(p):
+ "funcdef : DEF NAME parameters COLON suite"
+ p[0] = ast.FunctionDef(p[2], args=ast.arguments([ast.arg(x, None) for x in p[3]], None, [], [], None, []), body=p[5], decorator_list=[], returns=None)
+
+# parameters: '(' [varargslist] ')'
+
+
+def p_parameters(p):
+ """parameters : LPAR RPAR
+ | LPAR varargslist RPAR"""
+ if len(p) == 3:
+ p[0] = []
+ else:
+ p[0] = p[2]
+
+
+# varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME) |
+# highly simplified
+def p_varargslist(p):
+ """varargslist : varargslist COMMA NAME
+ | NAME"""
+ if len(p) == 4:
+ p[0] = p[1] + p[3]
+ else:
+ p[0] = [p[1]]
+
+# stmt: simple_stmt | compound_stmt
+
+
+def p_stmt_simple(p):
+ """stmt : simple_stmt"""
+ # simple_stmt is a list
+ p[0] = p[1]
+
+
+def p_stmt_compound(p):
+ """stmt : compound_stmt"""
+ p[0] = [p[1]]
+
+# simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
+
+
+def p_simple_stmt(p):
+ """simple_stmt : small_stmts NEWLINE
+ | small_stmts SEMICOLON NEWLINE"""
+ p[0] = p[1]
+
+
+def p_small_stmts(p):
+ """small_stmts : small_stmts SEMICOLON small_stmt
+ | small_stmt"""
+ if len(p) == 4:
+ p[0] = p[1] + [p[3]]
+ else:
+ p[0] = [p[1]]
+
+# small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
+# import_stmt | global_stmt | exec_stmt | assert_stmt
+
+
+def p_small_stmt(p):
+ """small_stmt : flow_stmt
+ | expr_stmt"""
+ p[0] = p[1]
+
+# expr_stmt: testlist (augassign (yield_expr|testlist) |
+# ('=' (yield_expr|testlist))*)
+# augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
+# '<<=' | '>>=' | '**=' | '//=')
+
+
+def p_expr_stmt(p):
+ """expr_stmt : testlist ASSIGN testlist
+ | testlist """
+ if len(p) == 2:
+ # a list of expressions
+ p[0] = ast.Expr(p[1])
+ else:
+ p[0] = Assign(p[1], p[3])
+
+
+def p_flow_stmt(p):
+ "flow_stmt : return_stmt"
+ p[0] = p[1]
+
+# return_stmt: 'return' [testlist]
+
+
+def p_return_stmt(p):
+ "return_stmt : RETURN testlist"
+ p[0] = ast.Return(p[2])
+
+
+def p_compound_stmt(p):
+ """compound_stmt : if_stmt
+ | funcdef"""
+ p[0] = p[1]
+
+
+def p_if_stmt(p):
+ 'if_stmt : IF test COLON suite'
+ p[0] = ast.If(p[2], p[4], [])
+
+
+def p_suite(p):
+ """suite : simple_stmt
+ | NEWLINE INDENT stmts DEDENT"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ p[0] = p[3]
+
+
+def p_stmts(p):
+ """stmts : stmts stmt
+ | stmt"""
+ if len(p) == 3:
+ p[0] = p[1] + p[2]
+ else:
+ p[0] = p[1]
+
+# No using Python's approach because Ply supports precedence
+
+# comparison: expr (comp_op expr)*
+# arith_expr: term (('+'|'-') term)*
+# term: factor (('*'|'/'|'%'|'//') factor)*
+# factor: ('+'|'-'|'~') factor | power
+# comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
+
+
+binary_ops = {
+ "+": ast.Add,
+ "-": ast.Sub,
+ "*": ast.Mult,
+ "/": ast.Div,
+}
+compare_ops = {
+ "<": ast.Lt,
+ ">": ast.Gt,
+ "==": ast.Eq,
+}
+unary_ops = {
+ "+": ast.UAdd,
+ "-": ast.USub,
+}
+precedence = (
+ ("left", "EQ", "GT", "LT"),
+ ("left", "PLUS", "MINUS"),
+ ("left", "MULT", "DIV"),
+)
+
+
+def p_comparison(p):
+ """comparison : comparison PLUS comparison
+ | comparison MINUS comparison
+ | comparison MULT comparison
+ | comparison DIV comparison
+ | comparison LT comparison
+ | comparison EQ comparison
+ | comparison GT comparison
+ | PLUS comparison
+ | MINUS comparison
+ | power"""
+ if len(p) == 4:
+ if p[2] in binary_ops:
+ p[0] = ast.BinOp(p[1], binary_ops[p[2]](), p[3])
+ else:
+ p[0] = ast.Compare(p[1], [compare_ops[p[2]]()], [p[3]])
+ elif len(p) == 3:
+ p[0] = ast.UnaryOp(unary_ops[p[1]](), p[2])
+ else:
+ p[0] = p[1]
+
+# power: atom trailer* ['**' factor]
+# trailers enables function calls. I only allow one level of calls
+# so this is 'trailer'
+
+
+def p_power(p):
+ """power : atom
+ | atom trailer"""
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ if p[2][0] == "CALL":
+ p[0] = ast.Call(p[1], p[2][1], [])
+ else:
+ raise AssertionError("not implemented")
+
+
+def p_atom_name(p):
+ """atom : NAME"""
+ p[0] = ast.Name(p[1],ctx=ast.Load())
+
+
+def p_atom_number(p):
+ """atom : NUMBER"""
+ p[0] = ast.Num(p[1])
+
+def p_atom_string(p):
+ """atom : STRING"""
+ p[0] = ast.Str(p[1])
+
+def p_atom_tuple(p):
+ """atom : LPAR testlist RPAR"""
+ p[0] = p[2]
+
+# trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
+
+
+def p_trailer(p):
+ "trailer : LPAR arglist RPAR"
+ p[0] = ("CALL", p[2])
+
+# testlist: test (',' test)* [',']
+# Contains shift/reduce error
+
+
+def p_testlist(p):
+ """testlist : testlist_multi COMMA
+ | testlist_multi """
+ if len(p) == 2:
+ p[0] = p[1]
+ else:
+ # May need to promote singleton to tuple
+ if isinstance(p[1], list):
+ p[0] = p[1]
+ else:
+ p[0] = [p[1]]
+ # Convert into a tuple?
+ if isinstance(p[0], list):
+ p[0] = ast.Tuple(p[0], ctx=ast.Load())
+
+
+def p_testlist_multi(p):
+ """testlist_multi : testlist_multi COMMA test
+ | test"""
+ if len(p) == 2:
+ # singleton
+ p[0] = p[1]
+ else:
+ if isinstance(p[1], list):
+ p[0] = p[1] + [p[3]]
+ else:
+ # singleton -> tuple
+ p[0] = [p[1], p[3]]
+
+
+# test: or_test ['if' or_test 'else' test] | lambdef
+# as I don't support 'and', 'or', and 'not' this works down to 'comparison'
+def p_test(p):
+ "test : comparison"
+ p[0] = p[1]
+
+
+# arglist: (argument ',')* (argument [',']| '*' test [',' '**' test] | '**' test)
+# XXX INCOMPLETE: this doesn't allow the trailing comma
+def p_arglist(p):
+ """arglist : arglist COMMA argument
+ | argument"""
+ if len(p) == 4:
+ p[0] = p[1] + [p[3]]
+ else:
+ p[0] = [p[1]]
+
+# argument: test [gen_for] | test '=' test # Really [keyword '='] test
+
+
+def p_argument(p):
+ "argument : test"
+ p[0] = p[1]
+
+
+def p_error(p):
+ # print "Error!", repr(p)
+ raise SyntaxError(p)
+
+
+class GardenSnakeParser(object):
+
+ def __init__(self, lexer=None):
+ if lexer is None:
+ lexer = IndentLexer()
+ self.lexer = lexer
+ self.parser = yacc.yacc(start="file_input_end")
+
+ def parse(self, code):
+ self.lexer.input(code)
+ result = self.parser.parse(lexer=self.lexer)
+ return ast.Module(result)
+
+
+###### Code generation ######
+
+class GardenSnakeCompiler(object):
+
+ def __init__(self):
+ self.parser = GardenSnakeParser()
+
+ def do_compile(self, code, filename="<string>"):
+ tree = self.parser.parse(code)
+ tree = ast.fix_missing_locations(tree)
+ # print(ast.dump(tree))
+ return compile(tree, filename, "exec")
+
+####### Test code #######
+
+do_compile = GardenSnakeCompiler().do_compile
+
+code = r"""
+
+print('LET\'S TRY THIS \\OUT')
+
+#Comment here
+def x(a):
+ print('called with',a)
+ if a == 1:
+ return 2
+ if a*2 > 10: return 999 / 4
+ # Another comment here
+
+ return a+2*3
+
+ints = (1, 2,
+ 3, 4,
+5)
+print('mutiline-expression', ints)
+
+t = 4+1/3*2+6*(9-5+1)
+print('predence test; should be 34+2/3:', t, t==(34+2/3))
+
+print('numbers', 1,2,3,4,5)
+if 1:
+ 8
+ a=9
+ print(x(a))
+
+print(x(1))
+print(x(2))
+print(x(8),'3')
+print('this is decimal', 1/5)
+print('BIG DECIMAL', 1.234567891234567e12345)
+print('LITTE DECIMAL', 1.234567891234567e-12345)
+
+print(t,a)
+a,t = t,a
+print(t,a)
+"""
+
+# Set up the GardenSnake run-time environment
+
+
+compiled_code = do_compile(code)
+
+exec(compiled_code, globals())
+print("Done")
diff --git a/example/GardenSnake/README b/example/GardenSnake/README
new file mode 100644
index 0000000..4d8be2d
--- /dev/null
+++ b/example/GardenSnake/README
@@ -0,0 +1,5 @@
+This example is Andrew Dalke's GardenSnake language. It shows how to process an
+indentation-like language like Python. Further details can be found here:
+
+http://dalkescientific.com/writings/diary/archive/2006/08/30/gardensnake_language.html
+
diff --git a/example/ansic/clex.py b/example/ansic/clex.py
index 4bde1d7..5d99416 100644
--- a/example/ansic/clex.py
+++ b/example/ansic/clex.py
@@ -4,9 +4,6 @@
# A lexer for ANSI C.
# ----------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
import ply.lex as lex
# Reserved words
diff --git a/example/ansic/cparse.py b/example/ansic/cparse.py
index 5fe9bce..21c3478 100644
--- a/example/ansic/cparse.py
+++ b/example/ansic/cparse.py
@@ -4,7 +4,6 @@
# Simple parser for ANSI C. Based on the grammar in K&R, 2nd Ed.
# -----------------------------------------------------------------------------
-import sys
import clex
import ply.yacc as yacc
diff --git a/example/calc/calc.py b/example/calc/calc.py
index 406d83c..3eae2b2 100644
--- a/example/calc/calc.py
+++ b/example/calc/calc.py
@@ -5,9 +5,6 @@
# "Lex and Yacc", p. 63.
# -----------------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
tokens = (
'NAME', 'NUMBER',
)
diff --git a/example/calcdebug/calc.py b/example/calcdebug/calc.py
index 386000e..35de8ab 100644
--- a/example/calcdebug/calc.py
+++ b/example/calcdebug/calc.py
@@ -5,9 +5,6 @@
# with output routed to a logging object.
# -----------------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
tokens = (
'NAME', 'NUMBER',
)
diff --git a/example/calceof/calc.py b/example/calceof/calc.py
index 7bb7e0f..1ec8298 100644
--- a/example/calceof/calc.py
+++ b/example/calceof/calc.py
@@ -5,9 +5,6 @@
# demonstrates the use of the t_eof() rule.
# -----------------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
tokens = (
'NAME', 'NUMBER',
)
diff --git a/example/classcalc/calc.py b/example/classcalc/calc.py
index 6f35195..93bfeaa 100755
--- a/example/classcalc/calc.py
+++ b/example/classcalc/calc.py
@@ -9,9 +9,6 @@
# Class-based example contributed to PLY by David McNab
# -----------------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
import ply.lex as lex
import ply.yacc as yacc
import os
diff --git a/example/closurecalc/calc.py b/example/closurecalc/calc.py
index 59c9d6f..4fc0cd7 100644
--- a/example/closurecalc/calc.py
+++ b/example/closurecalc/calc.py
@@ -6,9 +6,6 @@
# lexing rules, parsing rules, and internal state are held inside the function.
# -----------------------------------------------------------------------------
-import sys
-sys.path.insert(0, "../..")
-
# Make a calculator function
diff --git a/example/yply/ylex.py b/example/yply/ylex.py
index 16410e2..6fb3692 100644
--- a/example/yply/ylex.py
+++ b/example/yply/ylex.py
@@ -3,9 +3,6 @@
# Author: David Beazley (dave@dabeaz.com)
# Date : October 2, 2006
-import sys
-sys.path.append("../..")
-
from ply import *
tokens = (
diff --git a/example/yply/yply.py b/example/yply/yply.py
index e24616c..b730fbb 100755
--- a/example/yply/yply.py
+++ b/example/yply/yply.py
@@ -21,7 +21,6 @@
#
import sys
-sys.path.insert(0, "../..")
import ylex
import yparse
diff --git a/ply-2022_01_02.tar.gz b/ply-2022_01_02.tar.gz
deleted file mode 100644
index b464a14..0000000
--- a/ply-2022_01_02.tar.gz
+++ /dev/null
Binary files differ
diff --git a/ply-tests.tar.gz b/ply-tests.tar.gz
deleted file mode 100644
index 8ffa1aa..0000000
--- a/ply-tests.tar.gz
+++ /dev/null
Binary files differ
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 0000000..9787c3b
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,3 @@
+[build-system]
+requires = ["setuptools", "wheel"]
+build-backend = "setuptools.build_meta"
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..b4ba4ed
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,18 @@
+[metadata]
+name = ply
+version = 2022.10.27
+url = https://github.com/dabeaz/ply
+author = David Beazley
+author_email = "David Beazley" <dave@dabeaz.com>
+description = "PLY - Sly Lex Yacc"
+long_description = "PLY is an implementation of lex and yacc. No longer maintained on PyPI. Latest version on GitHub."
+license = MIT
+license_files = LICENSE
+classifiers =
+ License :: OSI Approved :: MIT License
+
+[options]
+package_dir =
+ =src
+
+packages = ply
diff --git a/ply/__init__.py b/src/ply/__init__.py
index 0ef83ef..45f28c5 100644
--- a/ply/__init__.py
+++ b/src/ply/__init__.py
@@ -2,4 +2,4 @@
# Author: David Beazley (dave@dabeaz.com)
# https://github.com/dabeaz/ply
-__version__ = '2022_01_02'
+__version__ = '2022.10.27'
diff --git a/ply/lex.py b/src/ply/lex.py
index de011fe..de011fe 100644
--- a/ply/lex.py
+++ b/src/ply/lex.py
diff --git a/ply/yacc.py b/src/ply/yacc.py
index 6528796..6528796 100644
--- a/ply/yacc.py
+++ b/src/ply/yacc.py
diff --git a/test/testcpp.py b/test/testcpp.py
deleted file mode 100644
index dbfb3e4..0000000
--- a/test/testcpp.py
+++ /dev/null
@@ -1,153 +0,0 @@
-from unittest import TestCase, main
-
-from multiprocessing import Process, Queue
-from six.moves.queue import Empty
-
-import sys
-import locale
-
-if ".." not in sys.path:
- sys.path.insert(0, "..")
-
-from ply.lex import lex
-from ply.cpp import *
-
-
-def preprocessing(in_, out_queue):
- out = None
-
- try:
- p = Preprocessor(lex())
- p.parse(in_)
- tokens = [t.value for t in p.parser]
- out = "".join(tokens)
- finally:
- out_queue.put(out)
-
-class CPPTests(TestCase):
- "Tests related to ANSI-C style lexical preprocessor."
-
- def __test_preprocessing(self, in_, expected, time_limit = 1.0):
- out_queue = Queue()
-
- preprocessor = Process(
- name = "PLY`s C preprocessor",
- target = preprocessing,
- args = (in_, out_queue)
- )
-
- preprocessor.start()
-
- try:
- out = out_queue.get(timeout = time_limit)
- except Empty:
- preprocessor.terminate()
- raise RuntimeError("Time limit exceeded!")
- else:
- self.assertMultiLineEqual(out, expected)
-
- def test_infinite_argument_expansion(self):
- # CPP does not drags set of currently expanded macros through macro
- # arguments expansion. If there is a match between an argument value
- # and name of an already expanded macro then CPP falls into infinite
- # recursion.
- self.__test_preprocessing("""\
-#define a(x) x
-#define b a(b)
-b
-""" , """\
-
-
-b"""
- )
-
-
- def test_concatenation(self):
- self.__test_preprocessing("""\
-#define a(x) x##_
-#define b(x) _##x
-#define c(x) _##x##_
-#define d(x,y) _##x##y##_
-
-a(i)
-b(j)
-c(k)
-d(q,s)"""
- , """\
-
-
-
-
-
-i_
-_j
-_k_
-_qs_"""
- )
-
- def test_deadloop_macro(self):
- # If there is a word which equals to name of a parametrized macro, then
- # attempt to expand such word as a macro manages the parser to fall
- # into an infinite loop.
-
- self.__test_preprocessing("""\
-#define a(x) x
-
-a;"""
- , """\
-
-
-a;"""
- )
-
- def test_index_error(self):
- # If there are no tokens after a word ("a") which equals to name of
- # a parameterized macro, then attempt to expand this word leads to
- # IndexError.
-
- self.__test_preprocessing("""\
-#define a(x) x
-
-a"""
- , """\
-
-
-a"""
- )
-
- def test_evalexpr(self):
- # #if 1 != 2 is not processed correctly; undefined values are converted
- # to 0L instead of 0 (issue #195)
- #
- self.__test_preprocessing("""\
-#if (1!=0) && (!x || (!(1==2)))
-a;
-#else
-b;
-#endif
-"""
- , """\
-
-a;
-
-"""
- )
-
- def test_include_nonascii(self):
- # Issue #196: #included files are read using the current locale's
- # getdefaultencoding. if a #included file contains non-ascii characters,
- # while default encoding is e.g. US_ASCII, this causes an error
- locale.setlocale(locale.LC_ALL, 'C')
- self.__test_preprocessing("""\
-#include "test_cpp_nonascii.c"
-x;
-
-"""
- , """\
-
-
-1;
-"""
- )
-
-main()
diff --git a/test/README b/tests/README
index 03b167c..03b167c 100644
--- a/test/README
+++ b/tests/README
diff --git a/test/calclex.py b/tests/calclex.py
index 030a986..4862753 100644
--- a/test/calclex.py
+++ b/tests/calclex.py
@@ -1,9 +1,6 @@
# -----------------------------------------------------------------------------
# calclex.py
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = (
diff --git a/test/cleanup.sh b/tests/cleanup.sh
index 9374f2c..9374f2c 100755
--- a/test/cleanup.sh
+++ b/tests/cleanup.sh
diff --git a/test/lex_closure.py b/tests/lex_closure.py
index 30ee679..0b34cff 100644
--- a/test/lex_closure.py
+++ b/tests/lex_closure.py
@@ -1,9 +1,6 @@
# -----------------------------------------------------------------------------
# lex_closure.py
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
tokens = (
diff --git a/test/lex_doc1.py b/tests/lex_doc1.py
index 8a2bfcc..4b62609 100644
--- a/test/lex_doc1.py
+++ b/tests/lex_doc1.py
@@ -2,9 +2,6 @@
#
# Missing documentation string
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_dup1.py b/tests/lex_dup1.py
index fd04cdb..b9dc3c5 100644
--- a/test/lex_dup1.py
+++ b/tests/lex_dup1.py
@@ -2,9 +2,6 @@
#
# Duplicated rule specifiers
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_dup2.py b/tests/lex_dup2.py
index 870e5e7..47bf5b5 100644
--- a/test/lex_dup2.py
+++ b/tests/lex_dup2.py
@@ -2,9 +2,6 @@
#
# Duplicated rule specifiers
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_dup3.py b/tests/lex_dup3.py
index 94b5592..5398f48 100644
--- a/test/lex_dup3.py
+++ b/tests/lex_dup3.py
@@ -2,9 +2,6 @@
#
# Duplicated rule specifiers
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_empty.py b/tests/lex_empty.py
index e0368bf..d0baf31 100644
--- a/test/lex_empty.py
+++ b/tests/lex_empty.py
@@ -2,9 +2,6 @@
#
# No rules defined
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_error1.py b/tests/lex_error1.py
index 4508a80..fec0282 100644
--- a/test/lex_error1.py
+++ b/tests/lex_error1.py
@@ -2,9 +2,6 @@
#
# Missing t_error() rule
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_error2.py b/tests/lex_error2.py
index 8040d39..8914ab6 100644
--- a/test/lex_error2.py
+++ b/tests/lex_error2.py
@@ -2,9 +2,6 @@
#
# t_error defined, but not function
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_error3.py b/tests/lex_error3.py
index 1feefb6..71ca903 100644
--- a/test/lex_error3.py
+++ b/tests/lex_error3.py
@@ -2,9 +2,6 @@
#
# t_error defined as function, but with wrong # args
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_error4.py b/tests/lex_error4.py
index f4f48db..5387808 100644
--- a/test/lex_error4.py
+++ b/tests/lex_error4.py
@@ -2,9 +2,6 @@
#
# t_error defined as function, but too many args
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_hedit.py b/tests/lex_hedit.py
index 34f15a1..9f724d0 100644
--- a/test/lex_hedit.py
+++ b/tests/lex_hedit.py
@@ -13,8 +13,6 @@
# This example shows how to modify the state of the lexer to parse
# such tokens
# -----------------------------------------------------------------------------
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
diff --git a/test/lex_ignore.py b/tests/lex_ignore.py
index 6c43b4c..b31fb39 100644
--- a/test/lex_ignore.py
+++ b/tests/lex_ignore.py
@@ -2,9 +2,6 @@
#
# Improperly specific ignore declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_ignore2.py b/tests/lex_ignore2.py
index f60987a..de0958a 100644
--- a/test/lex_ignore2.py
+++ b/tests/lex_ignore2.py
@@ -2,9 +2,6 @@
#
# ignore declaration as a raw string
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_literal1.py b/tests/lex_literal1.py
index db389c3..510b379 100644
--- a/test/lex_literal1.py
+++ b/tests/lex_literal1.py
@@ -2,9 +2,6 @@
#
# Bad literal specification
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_literal2.py b/tests/lex_literal2.py
index b50b92c..a7a2c56 100644
--- a/test/lex_literal2.py
+++ b/tests/lex_literal2.py
@@ -2,9 +2,6 @@
#
# Bad literal specification
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_literal3.py b/tests/lex_literal3.py
index 91ab980..9d697c9 100644
--- a/test/lex_literal3.py
+++ b/tests/lex_literal3.py
@@ -3,9 +3,6 @@
# An empty literal specification given as a list
# Issue 8 : Literals empty list causes IndexError
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_many_tokens.py b/tests/lex_many_tokens.py
index 81ae57a..02e3a11 100644
--- a/test/lex_many_tokens.py
+++ b/tests/lex_many_tokens.py
@@ -4,8 +4,6 @@
# 100-group limit of the re module)
import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = ["TOK%d" % i for i in range(1000)]
diff --git a/test/lex_module.py b/tests/lex_module.py
index 8bdd3ed..0fa8544 100644
--- a/test/lex_module.py
+++ b/tests/lex_module.py
@@ -1,9 +1,6 @@
# lex_module.py
#
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
import lex_module_import
lex.lex(module=lex_module_import)
diff --git a/test/lex_module_import.py b/tests/lex_module_import.py
index df42082..df42082 100644
--- a/test/lex_module_import.py
+++ b/tests/lex_module_import.py
diff --git a/test/lex_object.py b/tests/lex_object.py
index 7e9f389..2fa7537 100644
--- a/test/lex_object.py
+++ b/tests/lex_object.py
@@ -1,9 +1,7 @@
# -----------------------------------------------------------------------------
# lex_object.py
# -----------------------------------------------------------------------------
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.lex as lex
class CalcLexer:
diff --git a/test/lex_re1.py b/tests/lex_re1.py
index 5be7aef..d6b7c38 100644
--- a/test/lex_re1.py
+++ b/tests/lex_re1.py
@@ -2,9 +2,6 @@
#
# Bad regular expression in a string
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_re2.py b/tests/lex_re2.py
index 8dfb8e3..f5ce695 100644
--- a/test/lex_re2.py
+++ b/tests/lex_re2.py
@@ -2,9 +2,6 @@
#
# Regular expression rule matches empty string
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_re3.py b/tests/lex_re3.py
index e179925..8f594f3 100644
--- a/test/lex_re3.py
+++ b/tests/lex_re3.py
@@ -2,9 +2,6 @@
#
# Regular expression rule matches empty string
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_rule1.py b/tests/lex_rule1.py
index 0406c6f..ae279e9 100644
--- a/test/lex_rule1.py
+++ b/tests/lex_rule1.py
@@ -2,9 +2,6 @@
#
# Rule function with incorrect number of arguments
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_rule2.py b/tests/lex_rule2.py
index 1c29d87..adefee2 100644
--- a/test/lex_rule2.py
+++ b/tests/lex_rule2.py
@@ -2,9 +2,6 @@
#
# Rule function with incorrect number of arguments
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_rule3.py b/tests/lex_rule3.py
index 9ea94da..bd61040 100644
--- a/test/lex_rule3.py
+++ b/tests/lex_rule3.py
@@ -2,9 +2,6 @@
#
# Rule function with incorrect number of arguments
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state1.py b/tests/lex_state1.py
index 7528c91..6fa5e9f 100644
--- a/test/lex_state1.py
+++ b/tests/lex_state1.py
@@ -2,9 +2,6 @@
#
# Bad state declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state2.py b/tests/lex_state2.py
index 3aef69e..9300ed7 100644
--- a/test/lex_state2.py
+++ b/tests/lex_state2.py
@@ -2,9 +2,6 @@
#
# Bad state declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state3.py b/tests/lex_state3.py
index 616e484..2e5b7cf 100644
--- a/test/lex_state3.py
+++ b/tests/lex_state3.py
@@ -2,9 +2,6 @@
#
# Bad state declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state4.py b/tests/lex_state4.py
index 1825016..fb147c8 100644
--- a/test/lex_state4.py
+++ b/tests/lex_state4.py
@@ -2,9 +2,6 @@
#
# Bad state declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state5.py b/tests/lex_state5.py
index 4ce828e..fb2f3a7 100644
--- a/test/lex_state5.py
+++ b/tests/lex_state5.py
@@ -2,9 +2,6 @@
#
# Bad state declaration
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state_noerror.py b/tests/lex_state_noerror.py
index 90bbea8..54b892e 100644
--- a/test/lex_state_noerror.py
+++ b/tests/lex_state_noerror.py
@@ -2,9 +2,6 @@
#
# Declaration of a state for which no rules are defined
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state_norule.py b/tests/lex_state_norule.py
index 64ec6d3..4b979b2 100644
--- a/test/lex_state_norule.py
+++ b/tests/lex_state_norule.py
@@ -2,9 +2,6 @@
#
# Declaration of a state for which no rules are defined
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_state_try.py b/tests/lex_state_try.py
index fd5ba22..c5f3448 100644
--- a/test/lex_state_try.py
+++ b/tests/lex_state_try.py
@@ -2,9 +2,6 @@
#
# Declaration of a state for which no rules are defined
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_token1.py b/tests/lex_token1.py
index 6fca300..1001a99 100644
--- a/test/lex_token1.py
+++ b/tests/lex_token1.py
@@ -2,9 +2,6 @@
#
# Tests for absence of tokens variable
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
t_PLUS = r'\+'
diff --git a/test/lex_token2.py b/tests/lex_token2.py
index 6e65ab0..00e758b 100644
--- a/test/lex_token2.py
+++ b/tests/lex_token2.py
@@ -2,9 +2,6 @@
#
# Tests for tokens of wrong type
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = "PLUS MINUS NUMBER"
diff --git a/test/lex_token3.py b/tests/lex_token3.py
index 636452e..3b71b3a 100644
--- a/test/lex_token3.py
+++ b/tests/lex_token3.py
@@ -2,9 +2,6 @@
#
# tokens is right type, but is missing a token for one rule
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_token4.py b/tests/lex_token4.py
index 52947e9..89bc2e2 100644
--- a/test/lex_token4.py
+++ b/tests/lex_token4.py
@@ -2,9 +2,6 @@
#
# Bad token name
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/lex_token_dup.py b/tests/lex_token_dup.py
index 384f4e9..631535f 100644
--- a/test/lex_token_dup.py
+++ b/tests/lex_token_dup.py
@@ -2,9 +2,6 @@
#
# Duplicate token name in tokens
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
-
import ply.lex as lex
tokens = [
diff --git a/test/test_cpp_nonascii.c b/tests/test_cpp_nonascii.c
index 3e97d81..3e97d81 100644
--- a/test/test_cpp_nonascii.c
+++ b/tests/test_cpp_nonascii.c
diff --git a/test/testlex.py b/tests/testlex.py
index a41fda8..7f2adad 100755
--- a/test/testlex.py
+++ b/tests/testlex.py
@@ -11,7 +11,6 @@ import os
import warnings
import platform
-sys.path.insert(0,"..")
sys.tracebacklimit = 0
import ply.lex
@@ -98,24 +97,24 @@ class LexErrorWarningTests(unittest.TestCase):
self.assertRaises(SyntaxError,run_import,"lex_doc1")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_doc1.py:18: No regular expression defined for rule 't_NUMBER'\n"))
+ "lex_doc1.py:15: No regular expression defined for rule 't_NUMBER'\n"))
def test_lex_dup1(self):
self.assertRaises(SyntaxError,run_import,"lex_dup1")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_dup1.py:20: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+ "lex_dup1.py:17: Rule t_NUMBER redefined. Previously defined on line 15\n" ))
def test_lex_dup2(self):
self.assertRaises(SyntaxError,run_import,"lex_dup2")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_dup2.py:22: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+ "lex_dup2.py:19: Rule t_NUMBER redefined. Previously defined on line 15\n" ))
def test_lex_dup3(self):
self.assertRaises(SyntaxError,run_import,"lex_dup3")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_dup3.py:20: Rule t_NUMBER redefined. Previously defined on line 18\n" ))
+ "lex_dup3.py:17: Rule t_NUMBER redefined. Previously defined on line 15\n" ))
def test_lex_empty(self):
self.assertRaises(SyntaxError,run_import,"lex_empty")
@@ -141,19 +140,19 @@ class LexErrorWarningTests(unittest.TestCase):
self.assertRaises(SyntaxError,run_import,"lex_error3")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_error3.py:20: Rule 't_error' requires an argument\n"))
+ "lex_error3.py:17: Rule 't_error' requires an argument\n"))
def test_lex_error4(self):
self.assertRaises(SyntaxError,run_import,"lex_error4")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_error4.py:20: Rule 't_error' has too many arguments\n"))
+ "lex_error4.py:17: Rule 't_error' has too many arguments\n"))
def test_lex_ignore(self):
self.assertRaises(SyntaxError,run_import,"lex_ignore")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_ignore.py:20: Rule 't_ignore' must be defined as a string\n"))
+ "lex_ignore.py:17: Rule 't_ignore' must be defined as a string\n"))
def test_lex_ignore2(self):
run_import("lex_ignore2")
@@ -206,13 +205,13 @@ class LexErrorWarningTests(unittest.TestCase):
self.assertRaises(SyntaxError,run_import,"lex_rule2")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_rule2.py:18: Rule 't_NUMBER' requires an argument\n"))
+ "lex_rule2.py:15: Rule 't_NUMBER' requires an argument\n"))
def test_lex_rule3(self):
self.assertRaises(SyntaxError,run_import,"lex_rule3")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "lex_rule3.py:18: Rule 't_NUMBER' has too many arguments\n"))
+ "lex_rule3.py:15: Rule 't_NUMBER' has too many arguments\n"))
def test_lex_state1(self):
diff --git a/test/testyacc.py b/tests/testyacc.py
index c52d0ac..b488bf7 100644
--- a/test/testyacc.py
+++ b/tests/testyacc.py
@@ -12,7 +12,6 @@ import warnings
import re
import platform
-sys.path.insert(0,"..")
sys.tracebacklimit = 0
import ply.yacc
@@ -105,8 +104,8 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badid")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_badid.py:32: Illegal name 'bad&rule' in rule 'statement'\n"
- "yacc_badid.py:36: Illegal rule name 'bad&rule'\n"
+ "yacc_badid.py:29: Illegal name 'bad&rule' in rule 'statement'\n"
+ "yacc_badid.py:33: Illegal rule name 'bad&rule'\n"
))
def test_yacc_badprec(self):
@@ -137,10 +136,10 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_badrule")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_badrule.py:24: Syntax error. Expected ':'\n"
- "yacc_badrule.py:28: Syntax error in rule 'statement'\n"
- "yacc_badrule.py:33: Syntax error. Expected ':'\n"
- "yacc_badrule.py:42: Syntax error. Expected ':'\n"
+ "yacc_badrule.py:21: Syntax error. Expected ':'\n"
+ "yacc_badrule.py:25: Syntax error in rule 'statement'\n"
+ "yacc_badrule.py:30: Syntax error. Expected ':'\n"
+ "yacc_badrule.py:39: Syntax error. Expected ':'\n"
))
def test_yacc_badtok(self):
@@ -155,7 +154,7 @@ class YaccErrorWarningTests(unittest.TestCase):
run_import("yacc_dup")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_dup.py:27: Function p_statement redefined. Previously defined on line 23\n"
+ "yacc_dup.py:24: Function p_statement redefined. Previously defined on line 20\n"
"Token 'EQUALS' defined, but not used\n"
"There is 1 unused token\n"
"Generating LALR tables\n"
@@ -167,7 +166,7 @@ class YaccErrorWarningTests(unittest.TestCase):
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_error1.py:61: p_error() requires 1 argument\n"))
+ "yacc_error1.py:58: p_error() requires 1 argument\n"))
def test_yacc_error2(self):
try:
@@ -175,7 +174,7 @@ class YaccErrorWarningTests(unittest.TestCase):
except ply.yacc.YaccError:
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_error2.py:61: p_error() requires 1 argument\n"))
+ "yacc_error2.py:58: p_error() requires 1 argument\n"))
def test_yacc_error3(self):
try:
@@ -190,7 +189,7 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_error4")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_error4.py:62: Illegal rule name 'error'. Already defined as a token\n"
+ "yacc_error4.py:59: Illegal rule name 'error'. Already defined as a token\n"
))
@@ -237,20 +236,20 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_literal")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_literal.py:36: Literal token '**' in rule 'expression' may only be a single character\n"
+ "yacc_literal.py:33: Literal token '**' in rule 'expression' may only be a single character\n"
))
def test_yacc_misplaced(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_misplaced")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_misplaced.py:32: Misplaced '|'\n"
+ "yacc_misplaced.py:29: Misplaced '|'\n"
))
def test_yacc_missing1(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_missing1")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_missing1.py:24: Symbol 'location' used, but not defined as a token or a rule\n"
+ "yacc_missing1.py:21: Symbol 'location' used, but not defined as a token or a rule\n"
))
def test_yacc_nested(self):
@@ -266,7 +265,7 @@ class YaccErrorWarningTests(unittest.TestCase):
run_import("yacc_nodoc")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_nodoc.py:27: No documentation string specified in function 'p_statement_expr' (ignored)\n"
+ "yacc_nodoc.py:24: No documentation string specified in function 'p_statement_expr' (ignored)\n"
"Generating LALR tables\n"
))
@@ -282,7 +281,7 @@ class YaccErrorWarningTests(unittest.TestCase):
run_import("yacc_nop")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_nop.py:27: Possible grammar rule 'statement_expr' defined without p_ prefix\n"
+ "yacc_nop.py:24: Possible grammar rule 'statement_expr' defined without p_ prefix\n"
"Generating LALR tables\n"
))
@@ -349,7 +348,7 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_term1")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_term1.py:24: Illegal rule name 'NUMBER'. Already defined as a token\n"
+ "yacc_term1.py:21: Illegal rule name 'NUMBER'. Already defined as a token\n"
))
def test_yacc_unicode_literals(self):
@@ -363,7 +362,7 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_unused")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_unused.py:62: Symbol 'COMMA' used, but not defined as a token or a rule\n"
+ "yacc_unused.py:59: Symbol 'COMMA' used, but not defined as a token or a rule\n"
"Symbol 'COMMA' is unreachable\n"
"Symbol 'exprlist' is unreachable\n"
))
@@ -371,7 +370,7 @@ class YaccErrorWarningTests(unittest.TestCase):
run_import("yacc_unused_rule")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_unused_rule.py:62: Rule 'integer' defined, but not used\n"
+ "yacc_unused_rule.py:59: Rule 'integer' defined, but not used\n"
"There is 1 unused rule\n"
"Symbol 'integer' is unreachable\n"
"Generating LALR tables\n"
@@ -381,14 +380,14 @@ class YaccErrorWarningTests(unittest.TestCase):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_uprec.py:37: Nothing known about the precedence of 'UMINUS'\n"
+ "yacc_uprec.py:34: Nothing known about the precedence of 'UMINUS'\n"
))
def test_yacc_uprec2(self):
self.assertRaises(ply.yacc.YaccError,run_import,"yacc_uprec2")
result = sys.stderr.getvalue()
self.assertTrue(check_expected(result,
- "yacc_uprec2.py:37: Syntax error. Nothing follows %prec\n"
+ "yacc_uprec2.py:34: Syntax error. Nothing follows %prec\n"
))
def test_yacc_prec1(self):
diff --git a/test/yacc_badargs.py b/tests/yacc_badargs.py
index 9a1d03f..cd4b1e7 100644
--- a/test/yacc_badargs.py
+++ b/tests/yacc_badargs.py
@@ -5,7 +5,7 @@
# -----------------------------------------------------------------------------
import sys
sys.tracebacklimit = 0
-sys.path.insert(0,"..")
+
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badid.py b/tests/yacc_badid.py
index e4b9f5e..f624aca 100644
--- a/test/yacc_badid.py
+++ b/tests/yacc_badid.py
@@ -3,9 +3,6 @@
#
# Attempt to define a rule with a bad-identifier name
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badprec.py b/tests/yacc_badprec.py
index 3013bb6..95e9db2 100644
--- a/test/yacc_badprec.py
+++ b/tests/yacc_badprec.py
@@ -3,9 +3,6 @@
#
# Bad precedence specifier
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badprec2.py b/tests/yacc_badprec2.py
index 83093b4..c33303d 100644
--- a/test/yacc_badprec2.py
+++ b/tests/yacc_badprec2.py
@@ -3,9 +3,6 @@
#
# Bad precedence
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badprec3.py b/tests/yacc_badprec3.py
index d925ecd..e1ce3a6 100644
--- a/test/yacc_badprec3.py
+++ b/tests/yacc_badprec3.py
@@ -3,9 +3,6 @@
#
# Bad precedence
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badrule.py b/tests/yacc_badrule.py
index 92af646..898a8c7 100644
--- a/test/yacc_badrule.py
+++ b/tests/yacc_badrule.py
@@ -3,9 +3,6 @@
#
# Syntax problems in the rule strings
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_badtok.py b/tests/yacc_badtok.py
index fc4afe1..567aadf 100644
--- a/test/yacc_badtok.py
+++ b/tests/yacc_badtok.py
@@ -3,9 +3,6 @@
#
# A grammar, but tokens is a bad datatype
# -----------------------------------------------------------------------------
-
-import sys
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
tokens = "Hello"
diff --git a/test/yacc_dup.py b/tests/yacc_dup.py
index 309ba32..94238a4 100644
--- a/test/yacc_dup.py
+++ b/tests/yacc_dup.py
@@ -3,9 +3,6 @@
#
# Duplicated rule name
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error1.py b/tests/yacc_error1.py
index 10ac6a9..39b9124 100644
--- a/test/yacc_error1.py
+++ b/tests/yacc_error1.py
@@ -3,9 +3,6 @@
#
# Bad p_error() function
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error2.py b/tests/yacc_error2.py
index 7591418..2463ee9 100644
--- a/test/yacc_error2.py
+++ b/tests/yacc_error2.py
@@ -3,9 +3,6 @@
#
# Bad p_error() function
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error3.py b/tests/yacc_error3.py
index 4604a48..f181253 100644
--- a/test/yacc_error3.py
+++ b/tests/yacc_error3.py
@@ -3,9 +3,6 @@
#
# Bad p_error() function
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error4.py b/tests/yacc_error4.py
index 9c550cd..f6bef7e 100644
--- a/test/yacc_error4.py
+++ b/tests/yacc_error4.py
@@ -3,9 +3,6 @@
#
# Attempt to define a rule named 'error'
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error5.py b/tests/yacc_error5.py
index 9eb0f85..5d4b683 100644
--- a/test/yacc_error5.py
+++ b/tests/yacc_error5.py
@@ -3,9 +3,6 @@
#
# Lineno and position tracking with error tokens
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error6.py b/tests/yacc_error6.py
index 8d0ec85..fde3c4d 100644
--- a/test/yacc_error6.py
+++ b/tests/yacc_error6.py
@@ -3,9 +3,6 @@
#
# Panic mode recovery test
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_error7.py b/tests/yacc_error7.py
index abdc834..17cc4f1 100644
--- a/test/yacc_error7.py
+++ b/tests/yacc_error7.py
@@ -3,9 +3,6 @@
#
# Panic mode recovery test using deprecated functionality
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_inf.py b/tests/yacc_inf.py
index efd3612..f8b4c30 100644
--- a/test/yacc_inf.py
+++ b/tests/yacc_inf.py
@@ -3,9 +3,6 @@
#
# Infinite recursion
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_literal.py b/tests/yacc_literal.py
index 0d62803..687d916 100644
--- a/test/yacc_literal.py
+++ b/tests/yacc_literal.py
@@ -3,9 +3,6 @@
#
# Grammar with bad literal characters
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_misplaced.py b/tests/yacc_misplaced.py
index 9159b01..7cc75be 100644
--- a/test/yacc_misplaced.py
+++ b/tests/yacc_misplaced.py
@@ -3,9 +3,6 @@
#
# A misplaced | in grammar rules
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_missing1.py b/tests/yacc_missing1.py
index d1b5105..81955be 100644
--- a/test/yacc_missing1.py
+++ b/tests/yacc_missing1.py
@@ -3,9 +3,6 @@
#
# Grammar with a missing rule
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_nested.py b/tests/yacc_nested.py
index a3543a9..93ff400 100644
--- a/test/yacc_nested.py
+++ b/tests/yacc_nested.py
@@ -1,6 +1,3 @@
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
from ply import lex, yacc
diff --git a/test/yacc_nodoc.py b/tests/yacc_nodoc.py
index 0f61920..a2f34dd 100644
--- a/test/yacc_nodoc.py
+++ b/tests/yacc_nodoc.py
@@ -3,9 +3,6 @@
#
# Rule with a missing doc-string
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_noerror.py b/tests/yacc_noerror.py
index b38c758..2262745 100644
--- a/test/yacc_noerror.py
+++ b/tests/yacc_noerror.py
@@ -3,9 +3,6 @@
#
# No p_error() rule defined.
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_nop.py b/tests/yacc_nop.py
index 789a9cf..3ff17ac 100644
--- a/test/yacc_nop.py
+++ b/tests/yacc_nop.py
@@ -3,9 +3,6 @@
#
# Possible grammar rule defined without p_ prefix
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_notfunc.py b/tests/yacc_notfunc.py
index 5093a74..c8bd123 100644
--- a/test/yacc_notfunc.py
+++ b/tests/yacc_notfunc.py
@@ -3,9 +3,6 @@
#
# p_rule not defined as a function
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_notok.py b/tests/yacc_notok.py
index cff55a8..6e271a0 100644
--- a/test/yacc_notok.py
+++ b/tests/yacc_notok.py
@@ -3,10 +3,6 @@
#
# A grammar, but we forgot to import the tokens list
# -----------------------------------------------------------------------------
-
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
# Parsing rules
diff --git a/test/yacc_prec1.py b/tests/yacc_prec1.py
index 99fcd90..d4fad6e 100644
--- a/test/yacc_prec1.py
+++ b/tests/yacc_prec1.py
@@ -3,9 +3,6 @@
#
# Tests case where precedence specifier doesn't match up to terminals
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_rr.py b/tests/yacc_rr.py
index e7336c2..de78a58 100644
--- a/test/yacc_rr.py
+++ b/tests/yacc_rr.py
@@ -3,9 +3,6 @@
#
# A grammar with a reduce/reduce conflict
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_rr_unused.py b/tests/yacc_rr_unused.py
index 1ca5f7e..70a8c78 100644
--- a/test/yacc_rr_unused.py
+++ b/tests/yacc_rr_unused.py
@@ -4,9 +4,6 @@
# A grammar with reduce/reduce conflicts and a rule that never
# gets reduced.
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
tokens = ('A', 'B', 'C')
diff --git a/test/yacc_simple.py b/tests/yacc_simple.py
index bd989f4..5261838 100644
--- a/test/yacc_simple.py
+++ b/tests/yacc_simple.py
@@ -3,9 +3,6 @@
#
# A simple, properly specifier grammar
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_sr.py b/tests/yacc_sr.py
index 69a1e9c..0f15a50 100644
--- a/test/yacc_sr.py
+++ b/tests/yacc_sr.py
@@ -3,9 +3,6 @@
#
# A grammar with shift-reduce conflicts
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_term1.py b/tests/yacc_term1.py
index eaa36e9..1c1d8c2 100644
--- a/test/yacc_term1.py
+++ b/tests/yacc_term1.py
@@ -3,9 +3,6 @@
#
# Terminal used on the left-hand-side
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_unicode_literals.py b/tests/yacc_unicode_literals.py
index 5ae4f5b..04f6cfd 100644
--- a/test/yacc_unicode_literals.py
+++ b/tests/yacc_unicode_literals.py
@@ -5,9 +5,6 @@
# -----------------------------------------------------------------------------
from __future__ import unicode_literals
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_unused.py b/tests/yacc_unused.py
index 55b677b..7bc6749 100644
--- a/test/yacc_unused.py
+++ b/tests/yacc_unused.py
@@ -3,9 +3,6 @@
#
# A grammar with an unused rule
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_unused_rule.py b/tests/yacc_unused_rule.py
index 4868ef8..2e08d6d 100644
--- a/test/yacc_unused_rule.py
+++ b/tests/yacc_unused_rule.py
@@ -3,9 +3,6 @@
#
# Grammar with an unused rule
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_uprec.py b/tests/yacc_uprec.py
index 569adb8..f173509 100644
--- a/test/yacc_uprec.py
+++ b/tests/yacc_uprec.py
@@ -3,9 +3,6 @@
#
# A grammar with a bad %prec specifier
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
diff --git a/test/yacc_uprec2.py b/tests/yacc_uprec2.py
index 73274bf..85c0f2f 100644
--- a/test/yacc_uprec2.py
+++ b/tests/yacc_uprec2.py
@@ -3,9 +3,6 @@
#
# A grammar with a bad %prec specifier
# -----------------------------------------------------------------------------
-import sys
-
-if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens