summaryrefslogtreecommitdiff
path: root/tests/test_data.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_data.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-bug/angular-html.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_data.py')
-rw-r--r--tests/test_data.py102
1 files changed, 51 insertions, 51 deletions
diff --git a/tests/test_data.py b/tests/test_data.py
index 9724d235..22d4ee79 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -29,29 +29,29 @@ def lexer_yaml():
def test_basic_json(lexer_json):
- fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
+ fragment = '{"foo": "bar", "foo2": [1, 2, 3]}\n'
tokens = [
- (Token.Punctuation, u'{'),
- (Token.Name.Tag, u'"foo"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"bar"'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Name.Tag, u'"foo2"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Punctuation, u'['),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'3'),
- (Token.Punctuation, u']'),
- (Token.Punctuation, u'}'),
- (Token.Text, u'\n'),
+ (Token.Punctuation, '{'),
+ (Token.Name.Tag, '"foo"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"bar"'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Name.Tag, '"foo2"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '['),
+ (Token.Literal.Number.Integer, '1'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '3'),
+ (Token.Punctuation, ']'),
+ (Token.Punctuation, '}'),
+ (Token.Text, '\n'),
]
assert list(lexer_json.get_tokens(fragment)) == tokens
@@ -62,7 +62,7 @@ def test_json_escape_backtracking(lexer_json):
# this test will hang and that's how we know it's broken :(
fragment = r'{"\u00D0000\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\63CD'
tokens = (
- [(Token.Punctuation, u'{'),
+ [(Token.Punctuation, '{'),
(Token.Error, r'"'),
(Token.Error, '\\'),
(Token.Error, r'u'),
@@ -87,27 +87,27 @@ def test_json_escape_backtracking(lexer_json):
def test_basic_bare(lexer_bare):
# This is the same as testBasic for JsonLexer above, except the
# enclosing curly braces are removed.
- fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
+ fragment = '"foo": "bar", "foo2": [1, 2, 3]\n'
tokens = [
- (Token.Name.Tag, u'"foo"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"bar"'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Name.Tag, u'"foo2"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Punctuation, u'['),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'3'),
- (Token.Punctuation, u']'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, '"foo"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"bar"'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Name.Tag, '"foo2"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '['),
+ (Token.Literal.Number.Integer, '1'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '3'),
+ (Token.Punctuation, ']'),
+ (Token.Text, '\n'),
]
assert list(lexer_bare.get_tokens(fragment)) == tokens
@@ -139,14 +139,14 @@ def test_closing_curly_in_value(lexer_bare):
def test_yaml(lexer_yaml):
# Bug #1528: This previously parsed 'token # innocent' as a tag
- fragment = u'here: token # innocent: comment\n'
+ fragment = 'here: token # innocent: comment\n'
tokens = [
- (Token.Name.Tag, u'here'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.Scalar.Plain, u'token'),
- (Token.Text, u' '),
- (Token.Comment.Single, u'# innocent: comment'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, 'here'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.Scalar.Plain, 'token'),
+ (Token.Text, ' '),
+ (Token.Comment.Single, '# innocent: comment'),
+ (Token.Text, '\n'),
]
assert list(lexer_yaml.get_tokens(fragment)) == tokens