summaryrefslogtreecommitdiff
path: root/tests/test_ruby.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_ruby.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-203ef1eff6daebab6f95b0b49e6e6a58168073fb.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_ruby.py')
-rw-r--r--tests/test_ruby.py186
1 files changed, 93 insertions, 93 deletions
diff --git a/tests/test_ruby.py b/tests/test_ruby.py
index 86a9ee77..24e3bef1 100644
--- a/tests/test_ruby.py
+++ b/tests/test_ruby.py
@@ -19,131 +19,131 @@ def lexer():
def test_range_syntax1(lexer):
- fragment = u'1..3\n'
+ fragment = '1..3\n'
tokens = [
- (Number.Integer, u'1'),
- (Operator, u'..'),
- (Number.Integer, u'3'),
- (Text, u'\n'),
+ (Number.Integer, '1'),
+ (Operator, '..'),
+ (Number.Integer, '3'),
+ (Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_range_syntax2(lexer):
- fragment = u'1...3\n'
+ fragment = '1...3\n'
tokens = [
- (Number.Integer, u'1'),
- (Operator, u'...'),
- (Number.Integer, u'3'),
- (Text, u'\n'),
+ (Number.Integer, '1'),
+ (Operator, '...'),
+ (Number.Integer, '3'),
+ (Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_range_syntax3(lexer):
- fragment = u'1 .. 3\n'
+ fragment = '1 .. 3\n'
tokens = [
- (Number.Integer, u'1'),
- (Text, u' '),
- (Operator, u'..'),
- (Text, u' '),
- (Number.Integer, u'3'),
- (Text, u'\n'),
+ (Number.Integer, '1'),
+ (Text, ' '),
+ (Operator, '..'),
+ (Text, ' '),
+ (Number.Integer, '3'),
+ (Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_interpolation_nested_curly(lexer):
fragment = (
- u'"A#{ (3..5).group_by { |x| x/2}.map '
- u'do |k,v| "#{k}" end.join }" + "Z"\n')
+ '"A#{ (3..5).group_by { |x| x/2}.map '
+ 'do |k,v| "#{k}" end.join }" + "Z"\n')
tokens = [
- (Token.Literal.String.Double, u'"'),
- (Token.Literal.String.Double, u'A'),
- (Token.Literal.String.Interpol, u'#{'),
- (Token.Text, u' '),
- (Token.Punctuation, u'('),
- (Token.Literal.Number.Integer, u'3'),
- (Token.Operator, u'..'),
- (Token.Literal.Number.Integer, u'5'),
- (Token.Punctuation, u')'),
- (Token.Operator, u'.'),
- (Token.Name, u'group_by'),
- (Token.Text, u' '),
- (Token.Literal.String.Interpol, u'{'),
- (Token.Text, u' '),
- (Token.Operator, u'|'),
- (Token.Name, u'x'),
- (Token.Operator, u'|'),
- (Token.Text, u' '),
- (Token.Name, u'x'),
- (Token.Operator, u'/'),
- (Token.Literal.Number.Integer, u'2'),
- (Token.Literal.String.Interpol, u'}'),
- (Token.Operator, u'.'),
- (Token.Name, u'map'),
- (Token.Text, u' '),
- (Token.Keyword, u'do'),
- (Token.Text, u' '),
- (Token.Operator, u'|'),
- (Token.Name, u'k'),
- (Token.Punctuation, u','),
- (Token.Name, u'v'),
- (Token.Operator, u'|'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"'),
- (Token.Literal.String.Interpol, u'#{'),
- (Token.Name, u'k'),
- (Token.Literal.String.Interpol, u'}'),
- (Token.Literal.String.Double, u'"'),
- (Token.Text, u' '),
- (Token.Keyword, u'end'),
- (Token.Operator, u'.'),
- (Token.Name, u'join'),
- (Token.Text, u' '),
- (Token.Literal.String.Interpol, u'}'),
- (Token.Literal.String.Double, u'"'),
- (Token.Text, u' '),
- (Token.Operator, u'+'),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"'),
- (Token.Literal.String.Double, u'Z'),
- (Token.Literal.String.Double, u'"'),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Literal.String.Double, 'A'),
+ (Token.Literal.String.Interpol, '#{'),
+ (Token.Text, ' '),
+ (Token.Punctuation, '('),
+ (Token.Literal.Number.Integer, '3'),
+ (Token.Operator, '..'),
+ (Token.Literal.Number.Integer, '5'),
+ (Token.Punctuation, ')'),
+ (Token.Operator, '.'),
+ (Token.Name, 'group_by'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Interpol, '{'),
+ (Token.Text, ' '),
+ (Token.Operator, '|'),
+ (Token.Name, 'x'),
+ (Token.Operator, '|'),
+ (Token.Text, ' '),
+ (Token.Name, 'x'),
+ (Token.Operator, '/'),
+ (Token.Literal.Number.Integer, '2'),
+ (Token.Literal.String.Interpol, '}'),
+ (Token.Operator, '.'),
+ (Token.Name, 'map'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'do'),
+ (Token.Text, ' '),
+ (Token.Operator, '|'),
+ (Token.Name, 'k'),
+ (Token.Punctuation, ','),
+ (Token.Name, 'v'),
+ (Token.Operator, '|'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"'),
+ (Token.Literal.String.Interpol, '#{'),
+ (Token.Name, 'k'),
+ (Token.Literal.String.Interpol, '}'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'end'),
+ (Token.Operator, '.'),
+ (Token.Name, 'join'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Interpol, '}'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Text, ' '),
+ (Token.Operator, '+'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"'),
+ (Token.Literal.String.Double, 'Z'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_operator_methods(lexer):
- fragment = u'x.==4\n'
+ fragment = 'x.==4\n'
tokens = [
- (Token.Name, u'x'),
- (Token.Operator, u'.'),
- (Token.Name.Operator, u'=='),
- (Token.Literal.Number.Integer, u'4'),
- (Token.Text, u'\n'),
+ (Token.Name, 'x'),
+ (Token.Operator, '.'),
+ (Token.Name.Operator, '=='),
+ (Token.Literal.Number.Integer, '4'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_escaped_bracestring(lexer):
- fragment = u'str.gsub(%r{\\\\\\\\}, "/")\n'
+ fragment = 'str.gsub(%r{\\\\\\\\}, "/")\n'
tokens = [
- (Token.Name, u'str'),
- (Token.Operator, u'.'),
- (Token.Name, u'gsub'),
- (Token.Punctuation, u'('),
- (Token.Literal.String.Regex, u'%r{'),
- (Token.Literal.String.Regex, u'\\\\'),
- (Token.Literal.String.Regex, u'\\\\'),
- (Token.Literal.String.Regex, u'}'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Literal.String.Double, u'"'),
- (Token.Literal.String.Double, u'/'),
- (Token.Literal.String.Double, u'"'),
- (Token.Punctuation, u')'),
- (Token.Text, u'\n'),
+ (Token.Name, 'str'),
+ (Token.Operator, '.'),
+ (Token.Name, 'gsub'),
+ (Token.Punctuation, '('),
+ (Token.Literal.String.Regex, '%r{'),
+ (Token.Literal.String.Regex, '\\\\'),
+ (Token.Literal.String.Regex, '\\\\'),
+ (Token.Literal.String.Regex, '}'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Literal.String.Double, '"'),
+ (Token.Literal.String.Double, '/'),
+ (Token.Literal.String.Double, '"'),
+ (Token.Punctuation, ')'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens