diff options
author | Georg Brandl <georg@python.org> | 2014-09-20 10:04:09 +0200 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2014-09-20 10:04:09 +0200 |
commit | 40170ef1e363bfa8f3bfca8c2ce3756c014a63fb (patch) | |
tree | 3235aada6c34acd3318bd5dbfc9dc2042ccd642d /tests/test_examplefiles.py | |
parent | 5390fd6d21e81e8b4e9cb1cfab34b2718a9f15d1 (diff) | |
download | pygments-40170ef1e363bfa8f3bfca8c2ce3756c014a63fb.tar.gz |
Keep track of speed stats in test_examplefiles and print a list of slowest absolute/relative lexers at the end (run with TEST=-s to see).
Diffstat (limited to 'tests/test_examplefiles.py')
-rw-r--r-- | tests/test_examplefiles.py | 24 |
1 files changed, 22 insertions, 2 deletions
diff --git a/tests/test_examplefiles.py b/tests/test_examplefiles.py index 0547ffd3..6e11003a 100644 --- a/tests/test_examplefiles.py +++ b/tests/test_examplefiles.py @@ -20,8 +20,10 @@ from pygments.util import ClassNotFound STORE_OUTPUT = False + # generate methods def test_example_files(): + stats = {} testdir = os.path.dirname(__file__) outdir = os.path.join(testdir, 'examplefiles', 'output') if STORE_OUTPUT and not os.path.isdir(outdir): @@ -57,9 +59,22 @@ def test_example_files(): 'nor is of the form <lexer>_filename ' 'for overriding, thus no lexer found.' % fn) - yield check_lexer, lx, absfn, outfn + yield check_lexer, lx, absfn, outfn, stats + + N = 7 + stats = list(stats.items()) + stats.sort(key=lambda x: x[1][1]) + print('\nExample files that took longest absolute time:') + for fn, t in stats[-N:]: + print('%-30s %6d chars %8.2f ms %7.3f s/char' % ((fn,) + t)) + print() + stats.sort(key=lambda x: x[1][2]) + print('\nExample files that took longest relative time:') + for fn, t in stats[-N:]: + print('%-30s %6d chars %8.2f ms %7.3f s/char' % ((fn,) + t)) + -def check_lexer(lx, absfn, outfn): +def check_lexer(lx, absfn, outfn, stats): fp = open(absfn, 'rb') try: text = fp.read() @@ -75,12 +90,17 @@ def check_lexer(lx, absfn, outfn): text = text.decode('latin1') ntext = [] tokens = [] + import time + t1 = time.time() for type, val in lx.get_tokens(text): ntext.append(val) assert type != Error, \ 'lexer %s generated error token for %s: %r at position %d' % \ (lx, absfn, val, len(u''.join(ntext))) tokens.append((type, val)) + t2 = time.time() + stats[os.path.basename(absfn)] = (len(text), + 1000 * (t2 - t1), 1000 * (t2 - t1) / len(text)) if u''.join(ntext) != text: print('\n'.join(difflib.unified_diff(u''.join(ntext).splitlines(), text.splitlines()))) |