diff options
author | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
---|---|---|
committer | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
commit | 1dd3124a9770e11b6684e5dd1e6bc15a0aa3bc67 (patch) | |
tree | 87a171383266dd1f64196589af081bc2f8e497c3 /tests/test_data.py | |
parent | f1c080e184dc1bbc36eaa7cd729ff3a499de568a (diff) | |
download | pygments-master.tar.gz |
Diffstat (limited to 'tests/test_data.py')
-rw-r--r-- | tests/test_data.py | 117 |
1 files changed, 0 insertions, 117 deletions
diff --git a/tests/test_data.py b/tests/test_data.py deleted file mode 100644 index 20e74be0..00000000 --- a/tests/test_data.py +++ /dev/null @@ -1,117 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Data Tests - ~~~~~~~~~~ - - :copyright: Copyright 2006-2016 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest - -from pygments.lexers import JsonLexer, JsonBareObjectLexer, YamlLexer -from pygments.token import Token - - -class JsonTest(unittest.TestCase): - def setUp(self): - self.lexer = JsonLexer() - - def testBasic(self): - fragment = u'{"foo": "bar", "foo2": [1, 2, 3]}\n' - tokens = [ - (Token.Punctuation, u'{'), - (Token.Name.Tag, u'"foo"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.String.Double, u'"bar"'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Name.Tag, u'"foo2"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Punctuation, u'['), - (Token.Literal.Number.Integer, u'1'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'3'), - (Token.Punctuation, u']'), - (Token.Punctuation, u'}'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - -class JsonBareObjectTest(unittest.TestCase): - def setUp(self): - self.lexer = JsonBareObjectLexer() - - def testBasic(self): - # This is the same as testBasic for JsonLexer above, except the - # enclosing curly braces are removed. - fragment = u'"foo": "bar", "foo2": [1, 2, 3]\n' - tokens = [ - (Token.Name.Tag, u'"foo"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.String.Double, u'"bar"'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Name.Tag, u'"foo2"'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Punctuation, u'['), - (Token.Literal.Number.Integer, u'1'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'2'), - (Token.Punctuation, u','), - (Token.Text, u' '), - (Token.Literal.Number.Integer, u'3'), - (Token.Punctuation, u']'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testClosingCurly(self): - # This can be an Error token, but should not be a can't-pop-from-stack - # exception. - fragment = '}"a"\n' - tokens = [ - (Token.Error, '}'), - (Token.Name.Tag, '"a"'), - (Token.Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def testClosingCurlyInValue(self): - fragment = '"": ""}\n' - tokens = [ - (Token.Name.Tag, '""'), - (Token.Punctuation, ':'), - (Token.Text, ' '), - (Token.Literal.String.Double, '""'), - (Token.Error, '}'), - (Token.Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - -class YamlTest(unittest.TestCase): - def setUp(self): - self.lexer = YamlLexer() - - def testColonInComment(self): - # Bug #1528: This previously parsed 'token # innocent' as a tag - fragment = u'here: token # innocent: comment\n' - tokens = [ - (Token.Name.Tag, u'here'), - (Token.Punctuation, u':'), - (Token.Text, u' '), - (Token.Literal.Scalar.Plain, u'token'), - (Token.Text, u' '), - (Token.Comment.Single, u'# innocent: comment'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |