1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
|
# -*- coding: utf-8 -*-
"""
Basic GasLexer/NasmLexer Test
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: Copyright 2006-2020 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import pytest
from pygments.token import Token
from pygments.lexers import NasmLexer, GasLexer
@pytest.fixture(scope='module')
def lexer_gas():
yield GasLexer()
@pytest.fixture(scope='module')
def lexer_nasm():
yield NasmLexer()
def test_comments(lexer_gas):
fragment = '''
lock addq $0, /* comments */ (%rsp) /*
// comments
*/ xorq %rax, %rax // comments
'''
tokens = [
(Token.Text, ' '),
(Token.Name.Attribute, 'lock'),
(Token.Text, ' '),
(Token.Name.Function, 'addq'),
(Token.Text, ' '),
(Token.Name.Constant, '$0'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Comment.Multiline, '/* comments */'),
(Token.Text, ' '),
(Token.Punctuation, '('),
(Token.Name.Variable, '%rsp'),
(Token.Punctuation, ')'),
(Token.Text, ' '),
(Token.Comment.Multiline, '/*\n // comments\n */'),
(Token.Text, ' '),
(Token.Name.Function, 'xorq'),
(Token.Text, ' '),
(Token.Name.Variable, '%rax'),
(Token.Punctuation, ','),
(Token.Text, ' '),
(Token.Name.Variable, '%rax'),
(Token.Text, ' '),
(Token.Comment.Single, '// comments\n'),
(Token.Text, ' \n')
]
assert list(lexer_gas.get_tokens(fragment)) == tokens
def test_cpuid(lexer_nasm):
# CPU is a valid directive, and we don't want to parse this as
# cpu id, but as a single token. See bug #1517
fragment = 'cpuid'
expected = [
(Token.Name.Function, 'cpuid'),
(Token.Text, '\n'),
]
assert expected == list(lexer_nasm.get_tokens(fragment))
|