summaryrefslogtreecommitdiff
path: root/tests/test_qbasiclexer.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_qbasiclexer.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-bug/angular-html.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_qbasiclexer.py')
-rw-r--r--tests/test_qbasiclexer.py34
1 files changed, 17 insertions, 17 deletions
diff --git a/tests/test_qbasiclexer.py b/tests/test_qbasiclexer.py
index fb721437..83077e7e 100644
--- a/tests/test_qbasiclexer.py
+++ b/tests/test_qbasiclexer.py
@@ -19,23 +19,23 @@ def lexer():
def test_keywords_with_dollar(lexer):
- fragment = u'DIM x\nx = RIGHT$("abc", 1)\n'
+ fragment = 'DIM x\nx = RIGHT$("abc", 1)\n'
expected = [
- (Token.Keyword.Declaration, u'DIM'),
- (Token.Text.Whitespace, u' '),
- (Token.Name.Variable.Global, u'x'),
- (Token.Text, u'\n'),
- (Token.Name.Variable.Global, u'x'),
- (Token.Text.Whitespace, u' '),
- (Token.Operator, u'='),
- (Token.Text.Whitespace, u' '),
- (Token.Keyword.Reserved, u'RIGHT$'),
- (Token.Punctuation, u'('),
- (Token.Literal.String.Double, u'"abc"'),
- (Token.Punctuation, u','),
- (Token.Text.Whitespace, u' '),
- (Token.Literal.Number.Integer.Long, u'1'),
- (Token.Punctuation, u')'),
- (Token.Text, u'\n'),
+ (Token.Keyword.Declaration, 'DIM'),
+ (Token.Text.Whitespace, ' '),
+ (Token.Name.Variable.Global, 'x'),
+ (Token.Text, '\n'),
+ (Token.Name.Variable.Global, 'x'),
+ (Token.Text.Whitespace, ' '),
+ (Token.Operator, '='),
+ (Token.Text.Whitespace, ' '),
+ (Token.Keyword.Reserved, 'RIGHT$'),
+ (Token.Punctuation, '('),
+ (Token.Literal.String.Double, '"abc"'),
+ (Token.Punctuation, ','),
+ (Token.Text.Whitespace, ' '),
+ (Token.Literal.Number.Integer.Long, '1'),
+ (Token.Punctuation, ')'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == expected