diff options
| author | Georg Brandl <georg@python.org> | 2019-05-06 18:02:47 +0200 |
|---|---|---|
| committer | Georg Brandl <georg@python.org> | 2019-11-10 10:15:13 +0100 |
| commit | 7827966acdb6431636520d20fc3c148ce52de59b (patch) | |
| tree | 13a9316eb3eb964c22da0a08f046d44cd81470d0 /tests/test_java.py | |
| parent | a281ff8367a3a5f4cc17c9956e9273593558d336 (diff) | |
| download | pygments-git-7827966acdb6431636520d20fc3c148ce52de59b.tar.gz | |
Remove unittest classes from the test suite.
Diffstat (limited to 'tests/test_java.py')
| -rw-r--r-- | tests/test_java.py | 122 |
1 files changed, 61 insertions, 61 deletions
diff --git a/tests/test_java.py b/tests/test_java.py index 5f520853..40a1ec1b 100644 --- a/tests/test_java.py +++ b/tests/test_java.py @@ -7,72 +7,72 @@ :license: BSD, see LICENSE for details. """ -import unittest +import pytest from pygments.token import Text, Name, Operator, Keyword, Number from pygments.lexers import JavaLexer -class JavaTest(unittest.TestCase): +@pytest.fixture(scope='module') +def lexer(): + yield JavaLexer() - def setUp(self): - self.lexer = JavaLexer() - self.maxDiff = None - def testEnhancedFor(self): - fragment = u'label:\nfor(String var2: var1) {}\n' - tokens = [ - (Name.Label, u'label:'), - (Text, u'\n'), - (Keyword, u'for'), - (Operator, u'('), - (Name, u'String'), - (Text, u' '), - (Name, u'var2'), - (Operator, u':'), - (Text, u' '), - (Name, u'var1'), - (Operator, u')'), - (Text, u' '), - (Operator, u'{'), - (Operator, u'}'), - (Text, u'\n'), - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) +def testEnhancedFor(lexer): + fragment = u'label:\nfor(String var2: var1) {}\n' + tokens = [ + (Name.Label, u'label:'), + (Text, u'\n'), + (Keyword, u'for'), + (Operator, u'('), + (Name, u'String'), + (Text, u' '), + (Name, u'var2'), + (Operator, u':'), + (Text, u' '), + (Name, u'var1'), + (Operator, u')'), + (Text, u' '), + (Operator, u'{'), + (Operator, u'}'), + (Text, u'\n'), + ] + assert list(lexer.get_tokens(fragment)) == tokens - def testNumericLiterals(self): - fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0' - fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n' - tokens = [ - (Number.Integer, '0'), - (Text, ' '), - (Number.Integer, '5L'), - (Text, ' '), - (Number.Integer, '9__542_72l'), - (Text, ' '), - (Number.Hex, '0xbEEf'), - (Text, ' '), - (Number.Hex, '0X9_A'), - (Text, ' '), - (Number.Oct, '0_35'), - (Text, ' '), - (Number.Oct, '01'), - (Text, ' '), - (Number.Bin, '0b0___101_0'), - (Text, ' '), - (Number.Float, '0.'), - (Text, ' '), - (Number.Float, '.7_17F'), - (Text, ' '), - (Number.Float, '3e-1_3d'), - (Text, ' '), - (Number.Float, '1f'), - (Text, ' '), - (Number.Float, '6_01.9e+3'), - (Text, ' '), - (Number.Float, '0x.1Fp3'), - (Text, ' '), - (Number.Float, '0XEP8D'), - (Text, '\n') - ] - self.assertEqual(tokens, list(self.lexer.get_tokens(fragment))) + +def test_numeric_literals(lexer): + fragment = '0 5L 9__542_72l 0xbEEf 0X9_A 0_35 01 0b0___101_0' + fragment += ' 0. .7_17F 3e-1_3d 1f 6_01.9e+3 0x.1Fp3 0XEP8D\n' + tokens = [ + (Number.Integer, '0'), + (Text, ' '), + (Number.Integer, '5L'), + (Text, ' '), + (Number.Integer, '9__542_72l'), + (Text, ' '), + (Number.Hex, '0xbEEf'), + (Text, ' '), + (Number.Hex, '0X9_A'), + (Text, ' '), + (Number.Oct, '0_35'), + (Text, ' '), + (Number.Oct, '01'), + (Text, ' '), + (Number.Bin, '0b0___101_0'), + (Text, ' '), + (Number.Float, '0.'), + (Text, ' '), + (Number.Float, '.7_17F'), + (Text, ' '), + (Number.Float, '3e-1_3d'), + (Text, ' '), + (Number.Float, '1f'), + (Text, ' '), + (Number.Float, '6_01.9e+3'), + (Text, ' '), + (Number.Float, '0x.1Fp3'), + (Text, ' '), + (Number.Float, '0XEP8D'), + (Text, '\n') + ] + assert list(lexer.get_tokens(fragment)) == tokens |
