diff options
author | Georg Brandl <georg@python.org> | 2010-08-22 12:02:06 +0200 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2010-08-22 12:02:06 +0200 |
commit | d5122e5bba3d40a2a957eaa2ed230d713f558bb8 (patch) | |
tree | 040ced54e3158e6d8be829bbb7661537773a58c8 /tests/test_examplefiles.py | |
parent | fba2500b4ebbd0b1073280c636f18b923671073d (diff) | |
parent | 71d3ab5ede6b2535f09538d18a7f63037f9e845d (diff) | |
download | pygments-d5122e5bba3d40a2a957eaa2ed230d713f558bb8.tar.gz |
merge with tim
Diffstat (limited to 'tests/test_examplefiles.py')
-rw-r--r-- | tests/test_examplefiles.py | 37 |
1 files changed, 35 insertions, 2 deletions
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py index 25d02233..877cbecf 100644 --- a/tests/test_examplefiles.py +++ b/tests/test_examplefiles.py @@ -8,15 +8,22 @@ """ import os +import pprint +import difflib +import cPickle as pickle from pygments.lexers import get_lexer_for_filename, get_lexer_by_name from pygments.token import Error from pygments.util import ClassNotFound, b +STORE_OUTPUT = False # generate methods def test_example_files(): testdir = os.path.dirname(__file__) + outdir = os.path.join(testdir, 'examplefiles', 'output') + if STORE_OUTPUT and not os.path.isdir(outdir): + os.makedirs(outdir) for fn in os.listdir(os.path.join(testdir, 'examplefiles')): if fn.startswith('.') or fn.endswith('#'): continue @@ -24,6 +31,7 @@ def test_example_files(): absfn = os.path.join(testdir, 'examplefiles', fn) if not os.path.isfile(absfn): continue + outfn = os.path.join(outdir, fn) try: lx = get_lexer_for_filename(absfn) @@ -38,9 +46,9 @@ def test_example_files(): lx = get_lexer_by_name(name) except ClassNotFound: raise AssertionError('no lexer found for file %r' % fn) - yield check_lexer, lx, absfn + yield check_lexer, lx, absfn, outfn -def check_lexer(lx, absfn): +def check_lexer(lx, absfn, outfn): text = open(absfn, 'rb').read() text = text.replace(b('\r\n'), b('\n')) text = text.strip(b('\n')) + b('\n') @@ -49,9 +57,34 @@ def check_lexer(lx, absfn): except UnicodeError: text = text.decode('latin1') ntext = [] + tokens = [] for type, val in lx.get_tokens(text): ntext.append(val) assert type != Error, 'lexer %s generated error token for %s' % \ (lx, absfn) + tokens.append((type, val)) if u''.join(ntext) != text: raise AssertionError('round trip failed for ' + absfn) + + # check output against previous run if enabled + if STORE_OUTPUT: + # no previous output -- store it + if not os.path.isfile(outfn): + fp = open(outfn, 'wb') + try: + pickle.dump(tokens, fp) + finally: + fp.close() + return + # otherwise load it and compare + fp = open(outfn, 'rb') + try: + stored_tokens = pickle.load(fp) + finally: + fp.close() + if stored_tokens != tokens: + f1 = pprint.pformat(stored_tokens) + f2 = pprint.pformat(tokens) + print '\n'.join(difflib.unified_diff(f1.splitlines(), + f2.splitlines())) + assert False, absfn |