summaryrefslogtreecommitdiff
path: root/tests/test_julia.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/test_julia.py')
-rw-r--r--tests/test_julia.py64
1 files changed, 32 insertions, 32 deletions
diff --git a/tests/test_julia.py b/tests/test_julia.py
index e041377b..14bcdee0 100644
--- a/tests/test_julia.py
+++ b/tests/test_julia.py
@@ -22,38 +22,38 @@ def test_unicode(lexer):
"""
Test that unicode character, √, in an expression is recognized
"""
- fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
+ fragment = 's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n'
tokens = [
- (Token.Name, u's'),
- (Token.Text, u' '),
- (Token.Operator, u'='),
- (Token.Text, u' '),
- (Token.Operator, u'\u221a'),
- (Token.Punctuation, u'('),
- (Token.Punctuation, u'('),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Operator, u'/'),
- (Token.Name, u'n'),
- (Token.Punctuation, u')'),
- (Token.Text, u' '),
- (Token.Operator, u'*'),
- (Token.Text, u' '),
- (Token.Name, u'sum'),
- (Token.Punctuation, u'('),
- (Token.Name, u'count'),
- (Token.Text, u' '),
- (Token.Operator, u'.^'),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u')'),
- (Token.Text, u' '),
- (Token.Operator, u'-'),
- (Token.Text, u' '),
- (Token.Name, u'mu'),
- (Token.Text, u' '),
- (Token.Operator, u'.^'),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u')'),
- (Token.Text, u'\n'),
+ (Token.Name, 's'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Operator, '\u221a'),
+ (Token.Punctuation, '('),
+ (Token.Punctuation, '('),
+ (Token.Literal.Number.Integer, '1'),
+ (Token.Operator, '/'),
+ (Token.Name, 'n'),
+ (Token.Punctuation, ')'),
+ (Token.Text, ' '),
+ (Token.Operator, '*'),
+ (Token.Text, ' '),
+ (Token.Name, 'sum'),
+ (Token.Punctuation, '('),
+ (Token.Name, 'count'),
+ (Token.Text, ' '),
+ (Token.Operator, '.^'),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ')'),
+ (Token.Text, ' '),
+ (Token.Operator, '-'),
+ (Token.Text, ' '),
+ (Token.Name, 'mu'),
+ (Token.Text, ' '),
+ (Token.Operator, '.^'),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ')'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens