summaryrefslogtreecommitdiff
path: root/tests/test_python.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_python.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-203ef1eff6daebab6f95b0b49e6e6a58168073fb.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_python.py')
-rw-r--r--tests/test_python.py118
1 files changed, 59 insertions, 59 deletions
diff --git a/tests/test_python.py b/tests/test_python.py
index 8e53677b..73993936 100644
--- a/tests/test_python.py
+++ b/tests/test_python.py
@@ -61,57 +61,57 @@ def test_needs_name(lexer3):
"""
Tests that '@' is recognized as an Operator
"""
- fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
+ fragment = 'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n'
tokens = [
- (Token.Name, u'S'),
- (Token.Text, u' '),
- (Token.Operator, u'='),
- (Token.Text, u' '),
- (Token.Punctuation, u'('),
- (Token.Name, u'H'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Name, u'beta'),
- (Token.Text, u' '),
- (Token.Operator, u'-'),
- (Token.Text, u' '),
- (Token.Name, u'r'),
- (Token.Punctuation, u')'),
- (Token.Operator, u'.'),
- (Token.Name, u'T'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Name, u'inv'),
- (Token.Punctuation, u'('),
- (Token.Name, u'H'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Name, u'V'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Name, u'H'),
- (Token.Operator, u'.'),
- (Token.Name, u'T'),
- (Token.Punctuation, u')'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Punctuation, u'('),
- (Token.Name, u'H'),
- (Token.Text, u' '),
- (Token.Operator, u'@'),
- (Token.Text, u' '),
- (Token.Name, u'beta'),
- (Token.Text, u' '),
- (Token.Operator, u'-'),
- (Token.Text, u' '),
- (Token.Name, u'r'),
- (Token.Punctuation, u')'),
- (Token.Text, u'\n'),
+ (Token.Name, 'S'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Punctuation, '('),
+ (Token.Name, 'H'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Name, 'beta'),
+ (Token.Text, ' '),
+ (Token.Operator, '-'),
+ (Token.Text, ' '),
+ (Token.Name, 'r'),
+ (Token.Punctuation, ')'),
+ (Token.Operator, '.'),
+ (Token.Name, 'T'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Name, 'inv'),
+ (Token.Punctuation, '('),
+ (Token.Name, 'H'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Name, 'V'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Name, 'H'),
+ (Token.Operator, '.'),
+ (Token.Name, 'T'),
+ (Token.Punctuation, ')'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '('),
+ (Token.Name, 'H'),
+ (Token.Text, ' '),
+ (Token.Operator, '@'),
+ (Token.Text, ' '),
+ (Token.Name, 'beta'),
+ (Token.Text, ' '),
+ (Token.Operator, '-'),
+ (Token.Text, ' '),
+ (Token.Name, 'r'),
+ (Token.Punctuation, ')'),
+ (Token.Text, '\n'),
]
assert list(lexer3.get_tokens(fragment)) == tokens
@@ -121,18 +121,18 @@ def test_pep_515(lexer3):
Tests that the lexer can parse numeric literals with underscores
"""
fragments = (
- (Token.Literal.Number.Integer, u'1_000_000'),
- (Token.Literal.Number.Float, u'1_000.000_001'),
- (Token.Literal.Number.Float, u'1_000e1_000j'),
- (Token.Literal.Number.Hex, u'0xCAFE_F00D'),
- (Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'),
- (Token.Literal.Number.Oct, u'0o_777_123'),
+ (Token.Literal.Number.Integer, '1_000_000'),
+ (Token.Literal.Number.Float, '1_000.000_001'),
+ (Token.Literal.Number.Float, '1_000e1_000j'),
+ (Token.Literal.Number.Hex, '0xCAFE_F00D'),
+ (Token.Literal.Number.Bin, '0b_0011_1111_0100_1110'),
+ (Token.Literal.Number.Oct, '0o_777_123'),
)
for token, fragment in fragments:
tokens = [
(token, fragment),
- (Token.Text, u'\n'),
+ (Token.Text, '\n'),
]
assert list(lexer3.get_tokens(fragment)) == tokens
@@ -141,7 +141,7 @@ def test_walrus_operator(lexer3):
"""
Tests that ':=' is recognized as an Operator
"""
- fragment = u'if (a := 2) > 4:'
+ fragment = 'if (a := 2) > 4:'
tokens = [
(Token.Keyword, 'if'),
(Token.Text, ' '),
@@ -191,7 +191,7 @@ def test_fstring(lexer3):
(Token.Literal.String.Interpol, '}'),
(Token.Literal.String.Single, '.'),
(Token.Literal.String.Single, "'"),
- (Token.Text, u'\n')
+ (Token.Text, '\n')
]
), (
"f'He said his name is {name!r}.'\n",