diff options
author | Matthäus G. Chajdas <dev@anteru.net> | 2020-09-08 20:33:25 +0200 |
---|---|---|
committer | Matthäus G. Chajdas <dev@anteru.net> | 2020-09-08 20:33:25 +0200 |
commit | 203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch) | |
tree | 7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_objectiveclexer.py | |
parent | e09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff) | |
parent | b2c91c70ee536b0472100d1273818f8bb45529fe (diff) | |
download | pygments-git-bug/angular-html.tar.gz |
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts:
# tests/test_shell.py
Diffstat (limited to 'tests/test_objectiveclexer.py')
-rw-r--r-- | tests/test_objectiveclexer.py | 84 |
1 files changed, 42 insertions, 42 deletions
diff --git a/tests/test_objectiveclexer.py b/tests/test_objectiveclexer.py index 31f833cf..7264bad8 100644 --- a/tests/test_objectiveclexer.py +++ b/tests/test_objectiveclexer.py @@ -19,78 +19,78 @@ def lexer(): def test_literal_number_int(lexer): - fragment = u'@(1);\n' + fragment = '@(1);\n' expected = [ - (Token.Literal, u'@('), - (Token.Literal.Number.Integer, u'1'), - (Token.Literal, u')'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Literal, '@('), + (Token.Literal.Number.Integer, '1'), + (Token.Literal, ')'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected def test_literal_number_expression(lexer): - fragment = u'@(1+2);\n' + fragment = '@(1+2);\n' expected = [ - (Token.Literal, u'@('), - (Token.Literal.Number.Integer, u'1'), - (Token.Operator, u'+'), - (Token.Literal.Number.Integer, u'2'), - (Token.Literal, u')'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Literal, '@('), + (Token.Literal.Number.Integer, '1'), + (Token.Operator, '+'), + (Token.Literal.Number.Integer, '2'), + (Token.Literal, ')'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected def test_literal_number_nested_expression(lexer): - fragment = u'@(1+(2+3));\n' + fragment = '@(1+(2+3));\n' expected = [ - (Token.Literal, u'@('), - (Token.Literal.Number.Integer, u'1'), - (Token.Operator, u'+'), - (Token.Punctuation, u'('), - (Token.Literal.Number.Integer, u'2'), - (Token.Operator, u'+'), - (Token.Literal.Number.Integer, u'3'), - (Token.Punctuation, u')'), - (Token.Literal, u')'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Literal, '@('), + (Token.Literal.Number.Integer, '1'), + (Token.Operator, '+'), + (Token.Punctuation, '('), + (Token.Literal.Number.Integer, '2'), + (Token.Operator, '+'), + (Token.Literal.Number.Integer, '3'), + (Token.Punctuation, ')'), + (Token.Literal, ')'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected def test_literal_number_bool(lexer): - fragment = u'@NO;\n' + fragment = '@NO;\n' expected = [ - (Token.Literal.Number, u'@NO'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Literal.Number, '@NO'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected def test_literal_number_bool_expression(lexer): - fragment = u'@(YES);\n' + fragment = '@(YES);\n' expected = [ - (Token.Literal, u'@('), - (Token.Name.Builtin, u'YES'), - (Token.Literal, u')'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Literal, '@('), + (Token.Name.Builtin, 'YES'), + (Token.Literal, ')'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected def test_module_import(lexer): - fragment = u'@import ModuleA;\n' + fragment = '@import ModuleA;\n' expected = [ - (Token.Keyword, u'@import'), - (Token.Text, u' '), - (Token.Name, u'ModuleA'), - (Token.Punctuation, u';'), - (Token.Text, u'\n'), + (Token.Keyword, '@import'), + (Token.Text, ' '), + (Token.Name, 'ModuleA'), + (Token.Punctuation, ';'), + (Token.Text, '\n'), ] assert list(lexer.get_tokens(fragment)) == expected |