summaryrefslogtreecommitdiff
path: root/tests/test_token.py
diff options
context:
space:
mode:
authorGeorg Brandl <georg@python.org>2012-03-11 08:17:52 +0100
committerGeorg Brandl <georg@python.org>2012-03-11 08:17:52 +0100
commit6f8e79cdf72575dac0047deaf7b2a32540676198 (patch)
treee0ced08a2f775d7e8c9fc4d523566bc797fbcb43 /tests/test_token.py
parent9e80de2655ac91392e77fdb851cb8af940cff89d (diff)
downloadpygments-6f8e79cdf72575dac0047deaf7b2a32540676198.tar.gz
Fixes #748: clean up deprecation and resource warnings in the test suite when run with python3 -Wd.
Diffstat (limited to 'tests/test_token.py')
-rw-r--r--tests/test_token.py17
1 files changed, 7 insertions, 10 deletions
diff --git a/tests/test_token.py b/tests/test_token.py
index 490c966c..a9d1edeb 100644
--- a/tests/test_token.py
+++ b/tests/test_token.py
@@ -8,8 +8,6 @@
"""
import unittest
-import StringIO
-import sys
from pygments import token
@@ -17,8 +15,7 @@ from pygments import token
class TokenTest(unittest.TestCase):
def test_tokentype(self):
- e = self.assertEquals
- r = self.assertRaises
+ e = self.assertEqual
t = token.String
@@ -27,13 +24,13 @@ class TokenTest(unittest.TestCase):
e(t.__class__, token._TokenType)
def test_functions(self):
- self.assert_(token.is_token_subtype(token.String, token.String))
- self.assert_(token.is_token_subtype(token.String, token.Literal))
- self.failIf(token.is_token_subtype(token.Literal, token.String))
+ self.assertTrue(token.is_token_subtype(token.String, token.String))
+ self.assertTrue(token.is_token_subtype(token.String, token.Literal))
+ self.assertFalse(token.is_token_subtype(token.Literal, token.String))
- self.assert_(token.string_to_tokentype(token.String) is token.String)
- self.assert_(token.string_to_tokentype('') is token.Token)
- self.assert_(token.string_to_tokentype('String') is token.String)
+ self.assertTrue(token.string_to_tokentype(token.String) is token.String)
+ self.assertTrue(token.string_to_tokentype('') is token.Token)
+ self.assertTrue(token.string_to_tokentype('String') is token.String)
def test_sanity_check(self):
stp = token.STANDARD_TYPES.copy()