summaryrefslogtreecommitdiff
path: root/tests/conftest.py
diff options
context:
space:
mode:
authorOleh Prypin <oleh@pryp.in>2021-01-20 10:48:45 +0100
committerGitHub <noreply@github.com>2021-01-20 10:48:45 +0100
commit6f4309217326430145564ae8b1bb393ea684f39f (patch)
treebf4025a5e709426dc927c4afc4fd2286f8450ed9 /tests/conftest.py
parentf0445be718da83541ea3401aad882f3937147263 (diff)
downloadpygments-git-6f4309217326430145564ae8b1bb393ea684f39f.tar.gz
Also add auto-updatable output-based tests to examplefiles (#1689)
Co-authored-by: Georg Brandl <georg@python.org>
Diffstat (limited to 'tests/conftest.py')
-rw-r--r--tests/conftest.py108
1 files changed, 108 insertions, 0 deletions
diff --git a/tests/conftest.py b/tests/conftest.py
index def1b770..7ff5ff18 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,5 +1,113 @@
+"""
+ Generated lexer tests
+ ~~~~~~~~~~~~~~~~~~~~~
+
+ Checks that lexers output the expected tokens for each sample
+ under lexers/*/test_*.txt.
+
+ After making a change, rather than updating the samples manually,
+ run `pytest --update-goldens tests/lexers tests/examplefiles`.
+
+ To add a new sample, create a new file matching this pattern.
+ The directory must match the alias of the lexer to be used.
+ Populate only the input, then just `--update-goldens`.
+
+ :copyright: Copyright 2021 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+from pathlib import Path
+
import pytest
+import pygments.lexers
+
def pytest_addoption(parser):
parser.addoption('--update-goldens', action='store_true', help='reset golden master benchmarks')
+
+
+class LexerTestItem(pytest.Item):
+ def __init__(self, name, parent):
+ super().__init__(name, parent)
+ self.lexer = Path(str(self.fspath)).parent.name
+ self.actual = None
+
+ @classmethod
+ def _prettyprint_tokens(cls, tokens):
+ for tok, val in tokens:
+ yield '{!r:<13} {}'.format(val, str(tok)[6:])
+ if val.endswith('\n'):
+ yield ''
+
+ def runtest(self):
+ lexer = pygments.lexers.get_lexer_by_name(self.lexer)
+ tokens = lexer.get_tokens(self.input)
+ self.actual = '\n'.join(self._prettyprint_tokens(tokens)).rstrip('\n') + '\n'
+ if not self.config.getoption('--update-goldens'):
+ assert self.actual == self.expected
+
+ def _test_file_rel_path(self):
+ return Path(str(self.fspath)).relative_to(Path(__file__).parent.parent)
+
+ def repr_failure(self, excinfo):
+ if isinstance(excinfo.value, AssertionError):
+ rel_path = self._test_file_rel_path()
+ message = (
+ 'The tokens produced by the "{}" lexer differ from the '
+ 'expected ones in the file "{}".\n'
+ 'Run `pytest {} --update-goldens` to update it.'
+ ).format(self.lexer, rel_path, Path(*rel_path.parts[:2]))
+ diff = str(excinfo.value).split('\n', 1)[-1]
+ return message + '\n\n' + diff
+
+ def reportinfo(self):
+ return self.fspath, None, str(self._test_file_rel_path())
+
+ def maybe_overwrite(self):
+ if self.actual is not None and self.config.getoption('--update-goldens'):
+ self.overwrite()
+
+
+class LexerSeparateTestItem(LexerTestItem):
+ def __init__(self, name, parent):
+ super().__init__(name, parent)
+
+ self.input = self.fspath.read_binary()
+ output_path = self.fspath + '.output'
+ if output_path.check():
+ self.expected = output_path.read_text(encoding='utf-8')
+ else:
+ self.expected = ''
+
+ def overwrite(self):
+ output_path = self.fspath + '.output'
+ output_path.write_text(self.actual, encoding='utf-8')
+
+
+class LexerInlineTestItem(LexerTestItem):
+ def __init__(self, name, parent):
+ super().__init__(name, parent)
+
+ content = self.fspath.read_text('utf-8')
+ content, _, self.expected = content.partition('\n---tokens---\n')
+ if content.startswith('---input---\n'):
+ content = '\n' + content
+ self.comment, _, self.input = content.rpartition('\n---input---\n')
+ if not self.input.endswith('\n'):
+ self.input += '\n'
+ self.comment = self.comment.strip()
+
+ def overwrite(self):
+ with self.fspath.open('w', encoding='utf-8') as f:
+ f.write(self.comment)
+ if self.comment:
+ f.write('\n\n')
+ f.write('---input---\n')
+ f.write(self.input)
+ f.write('\n---tokens---\n')
+ f.write(self.actual)
+
+
+def pytest_runtest_teardown(item, nextitem):
+ if isinstance(item, LexerTestItem):
+ item.maybe_overwrite()