summaryrefslogtreecommitdiff
path: root/tests/test_basic_api.py
diff options
context:
space:
mode:
authorMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
committerMatthäus G. Chajdas <dev@anteru.net>2020-09-08 20:33:25 +0200
commit203ef1eff6daebab6f95b0b49e6e6a58168073fb (patch)
tree7defa199f48a34787f980b6400d8bbaa9380039a /tests/test_basic_api.py
parente09d4e0cf23d7c6069ddc690942ceb4cd23fd556 (diff)
parentb2c91c70ee536b0472100d1273818f8bb45529fe (diff)
downloadpygments-git-203ef1eff6daebab6f95b0b49e6e6a58168073fb.tar.gz
Merge branch 'master' into bug/angular-htmlbug/angular-html
# Conflicts: # tests/test_shell.py
Diffstat (limited to 'tests/test_basic_api.py')
-rw-r--r--tests/test_basic_api.py20
1 files changed, 10 insertions, 10 deletions
diff --git a/tests/test_basic_api.py b/tests/test_basic_api.py
index 378ea5e4..bbfbb14e 100644
--- a/tests/test_basic_api.py
+++ b/tests/test_basic_api.py
@@ -173,20 +173,20 @@ def test_formatter_encodings():
# unicode output
fmt = HtmlFormatter()
- tokens = [(Text, u"ä")]
+ tokens = [(Text, "ä")]
out = format(tokens, fmt)
assert type(out) is str
- assert u"ä" in out
+ assert "ä" in out
# encoding option
fmt = HtmlFormatter(encoding="latin1")
- tokens = [(Text, u"ä")]
- assert u"ä".encode("latin1") in format(tokens, fmt)
+ tokens = [(Text, "ä")]
+ assert "ä".encode("latin1") in format(tokens, fmt)
# encoding and outencoding option
fmt = HtmlFormatter(encoding="latin1", outencoding="utf8")
- tokens = [(Text, u"ä")]
- assert u"ä".encode("utf8") in format(tokens, fmt)
+ tokens = [(Text, "ä")]
+ assert "ä".encode("utf8") in format(tokens, fmt)
@pytest.mark.parametrize('cls', [getattr(formatters, name)
@@ -307,7 +307,7 @@ class TestFilters:
def test_codetag(self):
lx = lexers.PythonLexer()
lx.add_filter('codetagify')
- text = u'# BUG: text'
+ text = '# BUG: text'
tokens = list(lx.get_tokens(text))
assert '# ' == tokens[0][1]
assert 'BUG' == tokens[1][1]
@@ -316,15 +316,15 @@ class TestFilters:
# ticket #368
lx = lexers.PythonLexer()
lx.add_filter('codetagify')
- text = u'# DEBUG: text'
+ text = '# DEBUG: text'
tokens = list(lx.get_tokens(text))
assert '# DEBUG: text' == tokens[0][1]
def test_symbols(self):
lx = lexers.IsabelleLexer()
lx.add_filter('symbols')
- text = u'lemma "A \\<Longrightarrow> B"'
+ text = 'lemma "A \\<Longrightarrow> B"'
tokens = list(lx.get_tokens(text))
assert 'lemma' == tokens[0][1]
assert 'A ' == tokens[3][1]
- assert u'\U000027f9' == tokens[4][1]
+ assert '\U000027f9' == tokens[4][1]