diff options
author | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
---|---|---|
committer | Matth?us G. Chajdas <dev@anteru.net> | 2019-11-10 13:56:53 +0100 |
commit | 1dd3124a9770e11b6684e5dd1e6bc15a0aa3bc67 (patch) | |
tree | 87a171383266dd1f64196589af081bc2f8e497c3 /tests/test_python.py | |
parent | f1c080e184dc1bbc36eaa7cd729ff3a499de568a (diff) | |
download | pygments-master.tar.gz |
Diffstat (limited to 'tests/test_python.py')
-rw-r--r-- | tests/test_python.py | 133 |
1 files changed, 0 insertions, 133 deletions
diff --git a/tests/test_python.py b/tests/test_python.py deleted file mode 100644 index b9c6c49b..00000000 --- a/tests/test_python.py +++ /dev/null @@ -1,133 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Python Tests - ~~~~~~~~~~~~ - - :copyright: Copyright 2006-2019 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import unittest - -from pygments.lexers import PythonLexer, Python3Lexer -from pygments.token import Token - - -class PythonTest(unittest.TestCase): - def setUp(self): - self.lexer = PythonLexer() - - def test_cls_builtin(self): - """ - Tests that a cls token gets interpreted as a Token.Name.Builtin.Pseudo - - """ - fragment = 'class TestClass():\n @classmethod\n def hello(cls):\n pass\n' - tokens = [ - (Token.Keyword, 'class'), - (Token.Text, ' '), - (Token.Name.Class, 'TestClass'), - (Token.Punctuation, '('), - (Token.Punctuation, ')'), - (Token.Punctuation, ':'), - (Token.Text, '\n'), - (Token.Text, ' '), - (Token.Name.Decorator, '@classmethod'), - (Token.Text, '\n'), - (Token.Text, ' '), - (Token.Keyword, 'def'), - (Token.Text, ' '), - (Token.Name.Function, 'hello'), - (Token.Punctuation, '('), - (Token.Name.Builtin.Pseudo, 'cls'), - (Token.Punctuation, ')'), - (Token.Punctuation, ':'), - (Token.Text, '\n'), - (Token.Text, ' '), - (Token.Keyword, 'pass'), - (Token.Text, '\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - -class Python3Test(unittest.TestCase): - def setUp(self): - self.lexer = Python3Lexer() - - def testNeedsName(self): - """ - Tests that '@' is recognized as an Operator - """ - fragment = u'S = (H @ beta - r).T @ inv(H @ V @ H.T) @ (H @ beta - r)\n' - tokens = [ - (Token.Name, u'S'), - (Token.Text, u' '), - (Token.Operator, u'='), - (Token.Text, u' '), - (Token.Punctuation, u'('), - (Token.Name, u'H'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Name, u'beta'), - (Token.Text, u' '), - (Token.Operator, u'-'), - (Token.Text, u' '), - (Token.Name, u'r'), - (Token.Punctuation, u')'), - (Token.Operator, u'.'), - (Token.Name, u'T'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Name, u'inv'), - (Token.Punctuation, u'('), - (Token.Name, u'H'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Name, u'V'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Name, u'H'), - (Token.Operator, u'.'), - (Token.Name, u'T'), - (Token.Punctuation, u')'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Punctuation, u'('), - (Token.Name, u'H'), - (Token.Text, u' '), - (Token.Operator, u'@'), - (Token.Text, u' '), - (Token.Name, u'beta'), - (Token.Text, u' '), - (Token.Operator, u'-'), - (Token.Text, u' '), - (Token.Name, u'r'), - (Token.Punctuation, u')'), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) - - def test_pep_515(self): - """ - Tests that the lexer can parse numeric literals with underscores - """ - fragments = ( - (Token.Literal.Number.Integer, u'1_000_000'), - (Token.Literal.Number.Float, u'1_000.000_001'), - (Token.Literal.Number.Float, u'1_000e1_000j'), - (Token.Literal.Number.Hex, u'0xCAFE_F00D'), - (Token.Literal.Number.Bin, u'0b_0011_1111_0100_1110'), - (Token.Literal.Number.Oct, u'0o_777_123'), - ) - - for token, fragment in fragments: - tokens = [ - (token, fragment), - (Token.Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) |