diff options
author | JJ Atria <jjatria@gmail.com> | 2016-03-19 18:32:55 +0000 |
---|---|---|
committer | JJ Atria <jjatria@gmail.com> | 2016-03-19 18:32:55 +0000 |
commit | c90bfd3c4beb542909e7ee9f4c0acdbbd439cd1d (patch) | |
tree | 6f7d213875c18c8ffe6c8067c019f2ea83eecb4e | |
parent | 3c47a863609e2c3fcf69830a58ce934692373162 (diff) | |
download | pygments-c90bfd3c4beb542909e7ee9f4c0acdbbd439cd1d.tar.gz |
Added automated tests for Praat lexer
-rw-r--r-- | tests/test_praat.py | 130 |
1 files changed, 130 insertions, 0 deletions
diff --git a/tests/test_praat.py b/tests/test_praat.py new file mode 100644 index 00000000..471d5e2c --- /dev/null +++ b/tests/test_praat.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +""" + Praat lexer tests + ~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" + +import unittest + +from pygments.token import Token +from pygments.lexers import PraatLexer + +class PraatTest(unittest.TestCase): + + def setUp(self): + self.lexer = PraatLexer() + self.maxDiff = None + + def testNumericAssignment(self): + fragment = u'var = -15e4\n' + tokens = [ + (Token.Text, u'var'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Operator, u'-'), + (Token.Literal.Number, u'15e4'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testStringAssignment(self): + fragment = u'var$ = "foo"\n' + tokens = [ + (Token.Text, u'var$'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'foo'), + (Token.Literal.String, u'"'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testStringEscapedQuotes(self): + fragment = u'"it said ""foo"""\n' + tokens = [ + (Token.Literal.String, u'"'), + (Token.Literal.String, u'it said '), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'foo'), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'"'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testFunctionCall(self): + fragment = u'selected("Sound", i+(a*b))\n' + tokens = [ + (Token.Name.Function, u'selected'), + (Token.Punctuation, u'('), + (Token.Literal.String, u'"'), + (Token.Literal.String, u'Sound'), + (Token.Literal.String, u'"'), + (Token.Punctuation, u','), + (Token.Text, u' '), + (Token.Text, u'i'), + (Token.Operator, u'+'), + (Token.Text, u'('), + (Token.Text, u'a'), + (Token.Operator, u'*'), + (Token.Text, u'b'), + (Token.Text, u')'), + (Token.Punctuation, u')'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testBrokenUnquotedString(self): + fragment = u'printline string\n... \'interpolated\' string\n' + tokens = [ + (Token.Keyword, u'printline'), + (Token.Text, u' '), + (Token.Literal.String, u'string'), + (Token.Text, u'\n'), + (Token.Punctuation, u'...'), + (Token.Text, u' '), + (Token.Literal.String.Interpol, u"'"), + (Token.Literal.String.Interpol, u'interpolated'), + (Token.Literal.String.Interpol, u"'"), + (Token.Text, u' '), + (Token.Literal.String, u'string'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + + def testInlinIf(self): + fragment = u'var = if true == 1 then -1 else 0 fi' + tokens = [ + (Token.Text, u'var'), + (Token.Text, u' '), + (Token.Operator, u'='), + (Token.Text, u' '), + (Token.Keyword, u'if'), + (Token.Text, u' '), + (Token.Text, u'true'), + (Token.Text, u' '), + (Token.Operator, u'=='), + (Token.Text, u' '), + (Token.Literal.Number, u'1'), + (Token.Text, u' '), + (Token.Keyword, u'then'), + (Token.Text, u' '), + (Token.Operator, u'-'), + (Token.Literal.Number, u'1'), + (Token.Text, u' '), + (Token.Keyword, u'else'), + (Token.Text, u' '), + (Token.Literal.Number, u'0'), + (Token.Text, u' '), + (Token.Keyword, u'fi'), + (Token.Text, u'\n'), + ] + self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |