diff options
author | Georg Brandl <georg@python.org> | 2019-05-06 18:02:47 +0200 |
---|---|---|
committer | Georg Brandl <georg@python.org> | 2019-11-10 10:15:13 +0100 |
commit | 7827966acdb6431636520d20fc3c148ce52de59b (patch) | |
tree | 13a9316eb3eb964c22da0a08f046d44cd81470d0 /tests/test_r.py | |
parent | a281ff8367a3a5f4cc17c9956e9273593558d336 (diff) | |
download | pygments-git-7827966acdb6431636520d20fc3c148ce52de59b.tar.gz |
Remove unittest classes from the test suite.
Diffstat (limited to 'tests/test_r.py')
-rw-r--r-- | tests/test_r.py | 111 |
1 files changed, 58 insertions, 53 deletions
diff --git a/tests/test_r.py b/tests/test_r.py index 70148e53..72cb8afc 100644 --- a/tests/test_r.py +++ b/tests/test_r.py @@ -1,70 +1,75 @@ # -*- coding: utf-8 -*- """ R Tests - ~~~~~~~~~ + ~~~~~~~ - :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. + :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ -import unittest +import pytest from pygments.lexers import SLexer from pygments.token import Token, Name, Punctuation -class RTest(unittest.TestCase): - def setUp(self): - self.lexer = SLexer() +@pytest.fixture(scope='module') +def lexer(): + yield SLexer() - def testCall(self): - fragment = u'f(1, a)\n' - tokens = [ - (Name.Function, u'f'), - (Punctuation, u'('), - (Token.Literal.Number, u'1'), - (Punctuation, u','), - (Token.Text, u' '), - (Token.Name, u'a'), - (Punctuation, u')'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - def testName1(self): - fragment = u'._a_2.c' - tokens = [ - (Name, u'._a_2.c'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) +def test_call(lexer): + fragment = u'f(1, a)\n' + tokens = [ + (Name.Function, u'f'), + (Punctuation, u'('), + (Token.Literal.Number, u'1'), + (Punctuation, u','), + (Token.Text, u' '), + (Token.Name, u'a'), + (Punctuation, u')'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens - def testName2(self): - # Invalid names are valid if backticks are used - fragment = u'`.1 blah`' - tokens = [ - (Name, u'`.1 blah`'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - def testName3(self): - # Internal backticks can be escaped - fragment = u'`.1 \\` blah`' - tokens = [ - (Name, u'`.1 \\` blah`'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) +def test_name1(lexer): + fragment = u'._a_2.c' + tokens = [ + (Name, u'._a_2.c'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens - def testCustomOperator(self): - fragment = u'7 % and % 8' - tokens = [ - (Token.Literal.Number, u'7'), - (Token.Text, u' '), - (Token.Operator, u'% and %'), - (Token.Text, u' '), - (Token.Literal.Number, u'8'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + +def test_name2(lexer): + # Invalid names are valid if backticks are used + fragment = u'`.1 blah`' + tokens = [ + (Name, u'`.1 blah`'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + +def test_name3(lexer): + # Internal backticks can be escaped + fragment = u'`.1 \\` blah`' + tokens = [ + (Name, u'`.1 \\` blah`'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens + + +def test_custom_operator(lexer): + fragment = u'7 % and % 8' + tokens = [ + (Token.Literal.Number, u'7'), + (Token.Text, u' '), + (Token.Operator, u'% and %'), + (Token.Text, u' '), + (Token.Literal.Number, u'8'), + (Token.Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens |