diff options
author | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
---|---|---|
committer | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
commit | 1dd3124a9770e11b6684e5dd1e6bc15a0aa3bc67 (patch) | |
tree | 87a171383266dd1f64196589af081bc2f8e497c3 /tests/test_julia.py | |
parent | f1c080e184dc1bbc36eaa7cd729ff3a499de568a (diff) | |
download | pygments-master.tar.gz |
Diffstat (limited to 'tests/test_julia.py')
-rw-r--r-- | tests/test_julia.py | 58 |
1 files changed, 0 insertions, 58 deletions
diff --git a/tests/test_julia.py b/tests/test_julia.py deleted file mode 100644 index eda04b1c..00000000 --- a/tests/test_julia.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Julia Tests - ~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest - -from pygments.lexers import JuliaLexer -from pygments.token import Token - - -class JuliaTests(unittest.TestCase): - def setUp(self): - self.lexer = JuliaLexer() - - def test_unicode(self): - """ - Test that unicode character, √, in an expression is recognized - """ - fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n' - tokens = [ - (Token.Name, u's'), - (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Text, u' '), - (Token.Operator, u'\u221a'), - (Token.Punctuation, u'('), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'1'), - (Token.Operator, u'/'), - (Token.Name, u'n'), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Operator, u'*'), - (Token.Text, u' '), - (Token.Name, u'sum'), - (Token.Punctuation, u'('), - (Token.Name, u'count'), - (Token.Text, u' '), - (Token.Operator, u'.^'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Operator, u'-'), - (Token.Text, u' '), - (Token.Name, u'mu'), - (Token.Text, u' '), - (Token.Operator, u'.^'), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |