diff options
| author | Fredrik Larsen <fredrik.h.larsen@gmail.com> | 2017-11-10 17:22:12 +0100 |
|---|---|---|
| committer | Fredrik Larsen <fredrik.h.larsen@gmail.com> | 2017-11-10 17:22:12 +0100 |
| commit | fb4871a83060c2dff8cac3ce9e9e230248ddc19a (patch) | |
| tree | 4dea38ce2e0f95c385a4558a49a33488ace401a3 | |
| parent | 1b966038502c0b386a6645d4b5125f623d0947bb (diff) | |
| download | pygments-git-fb4871a83060c2dff8cac3ce9e9e230248ddc19a.tar.gz | |
Fix issue with markdown lexer code fences
Fixes the bug reported in #1389, where the markdown lexer will not issue a
token for the closing code fence.
Issue: #1389
| -rw-r--r-- | pygments/lexers/markup.py | 7 | ||||
| -rw-r--r-- | tests/test_markdown_lexer.py | 31 |
2 files changed, 34 insertions, 4 deletions
diff --git a/pygments/lexers/markup.py b/pygments/lexers/markup.py index 92dc9e7a..e6265f40 100644 --- a/pygments/lexers/markup.py +++ b/pygments/lexers/markup.py @@ -536,10 +536,9 @@ class MarkdownLexer(RegexLexer): # no lexer for this language. handle it like it was a code block if lexer is None: yield match.start(4), String, code - return - - for item in do_insertions([], lexer.get_tokens_unprocessed(code)): - yield item + else: + for item in do_insertions([], lexer.get_tokens_unprocessed(code)): + yield item yield match.start(5), String , match.group(5) diff --git a/tests/test_markdown_lexer.py b/tests/test_markdown_lexer.py new file mode 100644 index 00000000..16d1f28d --- /dev/null +++ b/tests/test_markdown_lexer.py @@ -0,0 +1,31 @@ +# -*- coding: utf-8 -*- +""" + Pygments regex lexer tests + ~~~~~~~~~~~~~~~~~~~~~~~~~~ + + :copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS. + :license: BSD, see LICENSE for details. +""" +import unittest + +from pygments.lexers.markup import MarkdownLexer + + +class SameTextTests(unittest.TestCase): + + lexer = MarkdownLexer() + + def assert_same_text(self, text): + """Show that lexed markdown does not remove any content. """ + tokens = list(self.lexer.get_tokens_unprocessed(text)) + output = ''.join(t[2] for t in tokens) + self.assertEqual(text, output) + + def test_code_fence(self): + self.assert_same_text(r'```\nfoo\n```\n') + + def test_code_fence_gsm(self): + self.assert_same_text(r'```markdown\nfoo\n```\n') + + def test_code_fence_gsm_with_no_lexer(self): + self.assert_same_text(r'```invalid-lexer\nfoo\n```\n') |
