diff options
author | Georg Brandl <georg@python.org> | 2019-05-06 18:02:47 +0200 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2019-05-06 18:02:47 +0200 |
commit | 4d27daa332b301a8cfcf0cac6345d56bfc17e5fe (patch) | |
tree | 333c0921378f7f2b66afe0f2207a76d3fae04b7b /tests/test_token.py | |
parent | 65873fba7b51171e35d4496fdf5bf38f54285dfa (diff) | |
download | pygments-4d27daa332b301a8cfcf0cac6345d56bfc17e5fe.tar.gz |
Remove unittest classes from the test suite.
Diffstat (limited to 'tests/test_token.py')
-rw-r--r-- | tests/test_token.py | 64 |
1 files changed, 31 insertions, 33 deletions
diff --git a/tests/test_token.py b/tests/test_token.py index 94522373..11e4d375 100644 --- a/tests/test_token.py +++ b/tests/test_token.py @@ -3,52 +3,50 @@ Test suite for the token module ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import copy -import unittest -from pygments import token +import pytest +from pygments import token -class TokenTest(unittest.TestCase): - def test_tokentype(self): - e = self.assertEqual +def test_tokentype(): + t = token.String + assert t.split() == [token.Token, token.Literal, token.String] + assert t.__class__ is token._TokenType - t = token.String - e(t.split(), [token.Token, token.Literal, token.String]) +def test_functions(): + assert token.is_token_subtype(token.String, token.String) + assert token.is_token_subtype(token.String, token.Literal) + assert not token.is_token_subtype(token.Literal, token.String) - e(t.__class__, token._TokenType) + assert token.string_to_tokentype(token.String) is token.String + assert token.string_to_tokentype('') is token.Token + assert token.string_to_tokentype('String') is token.String - def test_functions(self): - self.assertTrue(token.is_token_subtype(token.String, token.String)) - self.assertTrue(token.is_token_subtype(token.String, token.Literal)) - self.assertFalse(token.is_token_subtype(token.Literal, token.String)) - self.assertTrue(token.string_to_tokentype(token.String) is token.String) - self.assertTrue(token.string_to_tokentype('') is token.Token) - self.assertTrue(token.string_to_tokentype('String') is token.String) +def test_sanity_check(): + stp = token.STANDARD_TYPES.copy() + stp[token.Token] = '---' # Token and Text do conflict, that is okay + t = {} + for k, v in stp.items(): + t.setdefault(v, []).append(k) + if len(t) == len(stp): + return # Okay - def test_sanity_check(self): - stp = token.STANDARD_TYPES.copy() - stp[token.Token] = '---' # Token and Text do conflict, that is okay - t = {} - for k, v in stp.items(): - t.setdefault(v, []).append(k) - if len(t) == len(stp): - return # Okay + for k, v in t.items(): + if len(v) > 1: + pytest.fail("%r has more than one key: %r" % (k, v)) - for k, v in t.items(): - if len(v) > 1: - self.fail("%r has more than one key: %r" % (k, v)) - def test_copying(self): - # Token instances are supposed to be singletons, so copying or even - # deepcopying should return themselves - t = token.String - self.assertIs(t, copy.copy(t)) - self.assertIs(t, copy.deepcopy(t)) +def test_copying(): + # Token instances are supposed to be singletons, so copying or even + # deepcopying should return themselves + t = token.String + assert t is copy.copy(t) + assert t is copy.deepcopy(t) |