summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordomspad <devnull@localhost>2016-06-02 16:05:43 -0700
committerdomspad <devnull@localhost>2016-06-02 16:05:43 -0700
commit4242a01c1bc53bd76fad358a56b25af961bc2628 (patch)
tree6780c3202453b2c2e41ebd17e67f8fcad2e83cee
parentc45584d2421d92a2134058e86e54f0bea3f121e5 (diff)
downloadpygments-4242a01c1bc53bd76fad358a56b25af961bc2628.tar.gz
test for unicode chars in julia`
-rw-r--r--tests/test_julia.py60
1 files changed, 60 insertions, 0 deletions
diff --git a/tests/test_julia.py b/tests/test_julia.py
new file mode 100644
index 00000000..856aba3b
--- /dev/null
+++ b/tests/test_julia.py
@@ -0,0 +1,60 @@
+# -*- coding: utf-8 -*-
+"""
+ Python Tests
+ ~~~~~~~~~~~~
+
+ :copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
+ :license: BSD, see LICENSE for details.
+"""
+
+import unittest
+
+from pygments.lexers import JuliaLexer
+from pygments.token import Token
+
+
+class Julia(unittest.TestCase):
+ def setUp(self):
+ self.lexer = JuliaLexer()
+
+ def test_unicode(self):
+ """
+ Test that unicode character, √, in an expression is recognized
+ """
+ fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
+ tokens = [
+ (Token.Name, u's'),
+ (Token.Text, u' '),
+ (Token.Operator, u'='),
+ (Token.Text, u' '),
+ (Token.Name, u'\u221a'),
+ (Token.Punctuation, u'('),
+ (Token.Punctuation, u'('),
+ (Token.Literal.Number.Integer, u'1'),
+ (Token.Operator, u'/'),
+ (Token.Name, u'n'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u' '),
+ (Token.Operator, u'*'),
+ (Token.Text, u' '),
+ (Token.Name, u'sum'),
+ (Token.Punctuation, u'('),
+ (Token.Name, u'count'),
+ (Token.Text, u' '),
+ (Token.Operator, u'.'),
+ (Token.Operator, u'^'),
+ (Token.Text, u' '),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u' '),
+ (Token.Operator, u'-'),
+ (Token.Text, u' '),
+ (Token.Name, u'mu'),
+ (Token.Text, u' '),
+ (Token.Operator, u'.'),
+ (Token.Operator, u'^'),
+ (Token.Literal.Number.Integer, u'2'),
+ (Token.Punctuation, u')'),
+ (Token.Text, u'\n'),
+ ]
+ self.assertEqual(tokens, list(self.lexer.get_tokens(fragment)))