diff options
author | Matthäus G. Chajdas <dev@anteru.net> | 2020-09-08 20:33:25 +0200 |
---|---|---|
committer | Matthäus G. Chajdas <dev@anteru.net> | 2020-09-08 20:33:25 +0200 |
commit | 203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch) | |
tree | 7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_julia.py | |
parent | e09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff) | |
parent | b2c91c70ee536b0472100d1273818f8bb45529fe (diff) | |
download | pygments-git-bug/angular-html.tar.gz |
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts:
# tests/test_shell.py
Diffstat (limited to 'tests/test_julia.py')
-rw-r--r-- | tests/test_julia.py | 64 |
1 files changed, 32 insertions, 32 deletions
diff --git a/tests/test_julia.py b/tests/test_julia.py index e041377b..14bcdee0 100644 --- a/tests/test_julia.py +++ b/tests/test_julia.py @@ -22,38 +22,38 @@ def test_unicode(lexer): """ Test that unicode character, √, in an expression is recognized """ - fragment = u's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n' + fragment = 's = \u221a((1/n) * sum(count .^ 2) - mu .^2)\n' tokens = [ - (Token.Name, u's'), - (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Text, u' '), - (Token.Operator, u'\u221a'), - (Token.Punctuation, u'('), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'1'), - (Token.Operator, u'/'), - (Token.Name, u'n'), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Operator, u'*'), - (Token.Text, u' '), - (Token.Name, u'sum'), - (Token.Punctuation, u'('), - (Token.Name, u'count'), - (Token.Text, u' '), - (Token.Operator, u'.^'), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Operator, u'-'), - (Token.Text, u' '), - (Token.Name, u'mu'), - (Token.Text, u' '), - (Token.Operator, u'.^'), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), + (Token.Name, 's'), + (Token.Text, ' '), + (Token.Operator, '='), + (Token.Text, ' '), + (Token.Operator, '\u221a'), + (Token.Punctuation, '('), + (Token.Punctuation, '('), + (Token.Literal.Number.Integer, '1'), + (Token.Operator, '/'), + (Token.Name, 'n'), + (Token.Punctuation, ')'), + (Token.Text, ' '), + (Token.Operator, '*'), + (Token.Text, ' '), + (Token.Name, 'sum'), + (Token.Punctuation, '('), + (Token.Name, 'count'), + (Token.Text, ' '), + (Token.Operator, '.^'), + (Token.Text, ' '), + (Token.Literal.Number.Integer, '2'), + (Token.Punctuation, ')'), + (Token.Text, ' '), + (Token.Operator, '-'), + (Token.Text, ' '), + (Token.Name, 'mu'), + (Token.Text, ' '), + (Token.Operator, '.^'), + (Token.Literal.Number.Integer, '2'), + (Token.Punctuation, ')'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == tokens |