summaryrefslogtreecommitdiff
path: root/tests/test_textfmts.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_textfmts.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-bug/angular-html.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_textfmts.py')
-rw-r--r--tests/test_textfmts.py64
1 files changed, 32 insertions, 32 deletions
diff --git a/tests/test_textfmts.py b/tests/test_textfmts.py
index f4ce9b33..b8e25b51 100644
--- a/tests/test_textfmts.py
+++ b/tests/test_textfmts.py
@@ -19,62 +19,62 @@ def lexer():
def test_http_status_line(lexer):
- fragment = u'HTTP/1.1 200 OK\n'
+ fragment = 'HTTP/1.1 200 OK\n'
tokens = [
- (Token.Keyword.Reserved, u'HTTP'),
- (Token.Operator, u'/'),
- (Token.Number, u'1.1'),
- (Token.Text, u' '),
- (Token.Number, u'200'),
- (Token.Text, u' '),
- (Token.Name.Exception, u'OK'),
- (Token.Text, u'\n'),
+ (Token.Keyword.Reserved, 'HTTP'),
+ (Token.Operator, '/'),
+ (Token.Number, '1.1'),
+ (Token.Text, ' '),
+ (Token.Number, '200'),
+ (Token.Text, ' '),
+ (Token.Name.Exception, 'OK'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_http_status_line_without_reason_phrase(lexer):
- fragment = u'HTTP/1.1 200\n'
+ fragment = 'HTTP/1.1 200\n'
tokens = [
- (Token.Keyword.Reserved, u'HTTP'),
- (Token.Operator, u'/'),
- (Token.Number, u'1.1'),
- (Token.Text, u' '),
- (Token.Number, u'200'),
- (Token.Text, u'\n'),
+ (Token.Keyword.Reserved, 'HTTP'),
+ (Token.Operator, '/'),
+ (Token.Number, '1.1'),
+ (Token.Text, ' '),
+ (Token.Number, '200'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_http_status_line_without_reason_phrase_rfc_7230(lexer):
- fragment = u'HTTP/1.1 200 \n'
+ fragment = 'HTTP/1.1 200 \n'
tokens = [
- (Token.Keyword.Reserved, u'HTTP'),
- (Token.Operator, u'/'),
- (Token.Number, u'1.1'),
- (Token.Text, u' '),
- (Token.Number, u'200'),
- (Token.Text, u' '),
- (Token.Text, u'\n'),
+ (Token.Keyword.Reserved, 'HTTP'),
+ (Token.Operator, '/'),
+ (Token.Number, '1.1'),
+ (Token.Text, ' '),
+ (Token.Number, '200'),
+ (Token.Text, ' '),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_application_xml(lexer):
- fragment = u'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
+ fragment = 'GET / HTTP/1.0\nContent-Type: application/xml\n\n<foo>\n'
tokens = [
- (Token.Name.Tag, u'<foo'),
- (Token.Name.Tag, u'>'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, '<foo'),
+ (Token.Name.Tag, '>'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens
def test_application_calendar_xml(lexer):
- fragment = u'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
+ fragment = 'GET / HTTP/1.0\nContent-Type: application/calendar+xml\n\n<foo>\n'
tokens = [
- (Token.Name.Tag, u'<foo'),
- (Token.Name.Tag, u'>'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, '<foo'),
+ (Token.Name.Tag, '>'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment))[-len(tokens):] == tokens