summaryrefslogtreecommitdiff
path: root/tests/test_data.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/test_data.py')
-rw-r--r--tests/test_data.py102
1 files changed, 51 insertions, 51 deletions
diff --git a/tests/test_data.py b/tests/test_data.py
index 9724d235..22d4ee79 100644
--- a/tests/test_data.py
+++ b/tests/test_data.py
@@ -29,29 +29,29 @@ def lexer_yaml():
def test_basic_json(lexer_json):
- fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n'
+ fragment = '{"foo": "bar", "foo2": [1, 2, 3]}\n'
tokens = [
- (Token.Punctuation, u'{'),
- (Token.Name.Tag, u'"foo"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"bar"'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Name.Tag, u'"foo2"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Punctuation, u'['),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'3'),
- (Token.Punctuation, u']'),
- (Token.Punctuation, u'}'),
- (Token.Text, u'\n'),
+ (Token.Punctuation, '{'),
+ (Token.Name.Tag, '"foo"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"bar"'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Name.Tag, '"foo2"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '['),
+ (Token.Literal.Number.Integer, '1'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '3'),
+ (Token.Punctuation, ']'),
+ (Token.Punctuation, '}'),
+ (Token.Text, '\n'),
]
assert list(lexer_json.get_tokens(fragment)) == tokens
@@ -62,7 +62,7 @@ def test_json_escape_backtracking(lexer_json):
# this test will hang and that's how we know it's broken :(
fragment = r'{"\u00D0000\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\63CD'
tokens = (
- [(Token.Punctuation, u'{'),
+ [(Token.Punctuation, '{'),
(Token.Error, r'"'),
(Token.Error, '\\'),
(Token.Error, r'u'),
@@ -87,27 +87,27 @@ def test_json_escape_backtracking(lexer_json):
def test_basic_bare(lexer_bare):
# This is the same as testBasic for JsonLexer above, except the
# enclosing curly braces are removed.
- fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n'
+ fragment = '"foo": "bar", "foo2": [1, 2, 3]\n'
tokens = [
- (Token.Name.Tag, u'"foo"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"bar"'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Name.Tag, u'"foo2"'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Punctuation, u'['),
- (Token.Literal.Number.Integer, u'1'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.Number.Integer, u'3'),
- (Token.Punctuation, u']'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, '"foo"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"bar"'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Name.Tag, '"foo2"'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '['),
+ (Token.Literal.Number.Integer, '1'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.Number.Integer, '3'),
+ (Token.Punctuation, ']'),
+ (Token.Text, '\n'),
]
assert list(lexer_bare.get_tokens(fragment)) == tokens
@@ -139,14 +139,14 @@ def test_closing_curly_in_value(lexer_bare):
def test_yaml(lexer_yaml):
# Bug #1528: This previously parsed 'token # innocent' as a tag
- fragment = u'here: token # innocent: comment\n'
+ fragment = 'here: token # innocent: comment\n'
tokens = [
- (Token.Name.Tag, u'here'),
- (Token.Punctuation, u':'),
- (Token.Text, u' '),
- (Token.Literal.Scalar.Plain, u'token'),
- (Token.Text, u' '),
- (Token.Comment.Single, u'# innocent: comment'),
- (Token.Text, u'\n'),
+ (Token.Name.Tag, 'here'),
+ (Token.Punctuation, ':'),
+ (Token.Text, ' '),
+ (Token.Literal.Scalar.Plain, 'token'),
+ (Token.Text, ' '),
+ (Token.Comment.Single, '# innocent: comment'),
+ (Token.Text, '\n'),
]
assert list(lexer_yaml.get_tokens(fragment)) == tokens