summaryrefslogtreecommitdiff
path: root/tests/test_kotlin.py
blob: 9b9e898d43384cc4157f805e994714617993ea3f (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# -*- coding: utf-8 -*-
"""
    Basic JavaLexer Test
    ~~~~~~~~~~~~~~~~~~~~

    :copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
    :license: BSD, see LICENSE for details.
"""

import pytest

from pygments.token import Text, Name, Keyword, Punctuation, String
from pygments.lexers import KotlinLexer


@pytest.fixture(scope='module')
def lexer():
    yield KotlinLexer()


def test_can_cope_with_backtick_names_in_functions(lexer):
    fragment = 'fun `wo bble`'
    tokens = [
        (Keyword, 'fun'),
        (Text, ' '),
        (Name.Function, '`wo bble`'),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens


def test_can_cope_with_commas_and_dashes_in_backtick_Names(lexer):
    fragment = 'fun `wo,-bble`'
    tokens = [
        (Keyword, 'fun'),
        (Text, ' '),
        (Name.Function, '`wo,-bble`'),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens


def test_can_cope_with_destructuring(lexer):
    fragment = 'val (a, b) = '
    tokens = [
        (Keyword, 'val'),
        (Text, ' '),
        (Punctuation, '('),
        (Name.Property, 'a'),
        (Punctuation, ','),
        (Text, ' '),
        (Name.Property, 'b'),
        (Punctuation, ')'),
        (Text, ' '),
        (Punctuation, '='),
        (Text, ' '),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens


def test_can_cope_generics_in_destructuring(lexer):
    fragment = 'val (a: List<Something>, b: Set<Wobble>) ='
    tokens = [
        (Keyword, 'val'),
        (Text, ' '),
        (Punctuation, '('),
        (Name.Property, 'a'),
        (Punctuation, ':'),
        (Text, ' '),
        (Name.Property, 'List'),
        (Punctuation, '<'),
        (Name, 'Something'),
        (Punctuation, '>'),
        (Punctuation, ','),
        (Text, ' '),
        (Name.Property, 'b'),
        (Punctuation, ':'),
        (Text, ' '),
        (Name.Property, 'Set'),
        (Punctuation, '<'),
        (Name, 'Wobble'),
        (Punctuation, '>'),
        (Punctuation, ')'),
        (Text, ' '),
        (Punctuation, '='),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens


def test_can_cope_with_generics(lexer):
    fragment = 'inline fun <reified T : ContractState> VaultService.queryBy(): Vault.Page<T> {'
    tokens = [
        (Keyword, 'inline fun'),
        (Text, ' '),
        (Punctuation, '<'),
        (Keyword, 'reified'),
        (Text, ' '),
        (Name, 'T'),
        (Text, ' '),
        (Punctuation, ':'),
        (Text, ' '),
        (Name, 'ContractState'),
        (Punctuation, '>'),
        (Text, ' '),
        (Name.Class, 'VaultService'),
        (Punctuation, '.'),
        (Name.Function, 'queryBy'),
        (Punctuation, '('),
        (Punctuation, ')'),
        (Punctuation, ':'),
        (Text, ' '),
        (Name, 'Vault'),
        (Punctuation, '.'),
        (Name, 'Page'),
        (Punctuation, '<'),
        (Name, 'T'),
        (Punctuation, '>'),
        (Text, ' '),
        (Punctuation, '{'),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens


def test_should_cope_with_multiline_comments(lexer):
    fragment = '"""\nthis\nis\na\ncomment"""'
    tokens = [
        (String, '"""\nthis\nis\na\ncomment"""'),
        (Text, '\n')
    ]
    assert list(lexer.get_tokens(fragment)) == tokens