summaryrefslogtreecommitdiff
path: root/tests/test_examplefiles.py
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2010-07-22 12:07:25 +0100
committerGeorg Brandl <georg@python.org>2010-07-22 12:07:25 +0100
commit301646912b4ea0cc30668cd4de6066fdb51fff66 (patch)
treef990d27297f424aab8e919ea1fe7a8d8a1990738 /tests/test_examplefiles.py
parent3148fab68d09dcf172ab67056a63dc0b990761e1 (diff)
downloadpygments-301646912b4ea0cc30668cd4de6066fdb51fff66.tar.gz
Allow storing and comparing output between test runs.
Diffstat (limited to 'tests/test_examplefiles.py')
-rw-r--r--tests/test_examplefiles.py37
1 files changed, 35 insertions, 2 deletions
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py
index 691ae92a..74ae6a53 100644
--- a/tests/test_examplefiles.py
+++ b/tests/test_examplefiles.py
@@ -8,19 +8,27 @@
"""
import os
+import pprint
+import difflib
+import cPickle as pickle
from pygments.lexers import get_lexer_for_filename, get_lexer_by_name
from pygments.token import Error
from pygments.util import ClassNotFound, b
+STORE_OUTPUT = False
# generate methods
def test_example_files():
testdir = os.path.dirname(__file__)
+ outdir = os.path.join(testdir, 'examplefiles', 'output')
+ if STORE_OUTPUT and not os.path.isdir(outdir):
+ os.makedirs(outdir)
for fn in os.listdir(os.path.join(testdir, 'examplefiles')):
absfn = os.path.join(testdir, 'examplefiles', fn)
if not os.path.isfile(absfn):
continue
+ outfn = os.path.join(outdir, fn)
try:
lx = get_lexer_for_filename(absfn)
@@ -35,9 +43,9 @@ def test_example_files():
lx = get_lexer_by_name(name)
except ClassNotFound:
raise AssertionError('no lexer found for file %r' % fn)
- yield check_lexer, lx, absfn
+ yield check_lexer, lx, absfn, outfn
-def check_lexer(lx, absfn):
+def check_lexer(lx, absfn, outfn):
text = open(absfn, 'rb').read()
text = text.replace(b('\r\n'), b('\n'))
text = text.strip(b('\n')) + b('\n')
@@ -46,9 +54,34 @@ def check_lexer(lx, absfn):
except UnicodeError:
text = text.decode('latin1')
ntext = []
+ tokens = []
for type, val in lx.get_tokens(text):
ntext.append(val)
assert type != Error, 'lexer %s generated error token for %s' % \
(lx, absfn)
+ tokens.append((type, val))
if u''.join(ntext) != text:
raise AssertionError('round trip failed for ' + absfn)
+
+ # check output against previous run if enabled
+ if STORE_OUTPUT:
+ # no previous output -- store it
+ if not os.path.isfile(outfn):
+ fp = open(outfn, 'wb')
+ try:
+ pickle.dump(tokens, fp)
+ finally:
+ fp.close()
+ return
+ # otherwise load it and compare
+ fp = open(outfn, 'rb')
+ try:
+ stored_tokens = pickle.load(fp)
+ finally:
+ fp.close()
+ if stored_tokens != tokens:
+ f1 = pprint.pformat(stored_tokens)
+ f2 = pprint.pformat(tokens)
+ print '\n'.join(difflib.unified_diff(f1.splitlines(),
+ f2.splitlines()))
+ assert False, absfn