summaryrefslogtreecommitdiff
path: root/tests/test_praat.py
diff options
context:
space:
mode:
Diffstat (limited to 'tests/test_praat.py')
-rw-r--r--tests/test_praat.py230
1 files changed, 119 insertions, 111 deletions
diff --git a/tests/test_praat.py b/tests/test_praat.py
index e3997671..cced8983 100644
--- a/tests/test_praat.py
+++ b/tests/test_praat.py
@@ -19,187 +19,195 @@ def lexer():
def test_numeric_assignment(lexer):
- fragment = u'var = -15e4\n'
+ fragment = 'var = -15e4\n'
tokens = [
- (Token.Text, u'var'),
- (Token.Text, u' '),
- (Token.Operator, u'='),
- (Token.Text, u' '),
- (Token.Operator, u'-'),
- (Token.Literal.Number, u'15e4'),
- (Token.Text, u'\n'),
+ (Token.Text, 'var'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Operator, '-'),
+ (Token.Literal.Number, '15e4'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def testStringAssignment(lexer):
- fragment = u'var$ = "foo"\n'
+ fragment = 'var$ = "foo"\n'
tokens = [
- (Token.Text, u'var$'),
- (Token.Text, u' '),
- (Token.Operator, u'='),
- (Token.Text, u' '),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'foo'),
- (Token.Literal.String, u'"'),
- (Token.Text, u'\n'),
+ (Token.Text, 'var$'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, 'foo'),
+ (Token.Literal.String, '"'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_string_escaped_quotes(lexer):
- fragment = u'"it said ""foo"""\n'
+ fragment = '"it said ""foo"""\n'
tokens = [
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'it said '),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'foo'),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'"'),
- (Token.Text, u'\n'),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, 'it said '),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, 'foo'),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, '"'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_function_call(lexer):
- fragment = u'selected("Sound", i+(a*b))\n'
+ fragment = 'selected("Sound", i+(a*b))\n'
tokens = [
- (Token.Name.Function, u'selected'),
- (Token.Punctuation, u'('),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u'Sound'),
- (Token.Literal.String, u'"'),
- (Token.Punctuation, u','),
- (Token.Text, u' '),
- (Token.Text, u'i'),
- (Token.Operator, u'+'),
- (Token.Text, u'('),
- (Token.Text, u'a'),
- (Token.Operator, u'*'),
- (Token.Text, u'b'),
- (Token.Text, u')'),
- (Token.Punctuation, u')'),
- (Token.Text, u'\n'),
+ (Token.Name.Function, 'selected'),
+ (Token.Punctuation, '('),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, 'Sound'),
+ (Token.Literal.String, '"'),
+ (Token.Punctuation, ','),
+ (Token.Text, ' '),
+ (Token.Text, 'i'),
+ (Token.Operator, '+'),
+ (Token.Text, '('),
+ (Token.Text, 'a'),
+ (Token.Operator, '*'),
+ (Token.Text, 'b'),
+ (Token.Text, ')'),
+ (Token.Punctuation, ')'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_broken_unquoted_string(lexer):
- fragment = u'printline string\n... \'interpolated\' string\n'
+ fragment = 'printline string\n... \'interpolated\' string\n'
tokens = [
- (Token.Keyword, u'printline'),
- (Token.Text, u' '),
- (Token.Literal.String, u'string'),
- (Token.Text, u'\n'),
- (Token.Punctuation, u'...'),
- (Token.Text, u' '),
- (Token.Literal.String.Interpol, u"'interpolated'"),
- (Token.Text, u' '),
- (Token.Literal.String, u'string'),
- (Token.Text, u'\n'),
+ (Token.Keyword, 'printline'),
+ (Token.Text, ' '),
+ (Token.Literal.String, 'string'),
+ (Token.Text, '\n'),
+ (Token.Punctuation, '...'),
+ (Token.Text, ' '),
+ (Token.Literal.String.Interpol, "'interpolated'"),
+ (Token.Text, ' '),
+ (Token.Literal.String, 'string'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
def test_inline_if(lexer):
- fragment = u'var = if true == 1 then -1 else 0 fi'
+ fragment = 'var = if true == 1 then -1 else 0 fi'
tokens = [
- (Token.Text, u'var'),
- (Token.Text, u' '),
- (Token.Operator, u'='),
- (Token.Text, u' '),
- (Token.Keyword, u'if'),
- (Token.Text, u' '),
- (Token.Text, u'true'),
- (Token.Text, u' '),
- (Token.Operator, u'=='),
- (Token.Text, u' '),
- (Token.Literal.Number, u'1'),
- (Token.Text, u' '),
- (Token.Keyword, u'then'),
- (Token.Text, u' '),
- (Token.Operator, u'-'),
- (Token.Literal.Number, u'1'),
- (Token.Text, u' '),
- (Token.Keyword, u'else'),
- (Token.Text, u' '),
- (Token.Literal.Number, u'0'),
- (Token.Text, u' '),
- (Token.Keyword, u'fi'),
- (Token.Text, u'\n'),
+ (Token.Text, 'var'),
+ (Token.Text, ' '),
+ (Token.Operator, '='),
+ (Token.Text, ' '),
+ (Token.Keyword, 'if'),
+ (Token.Text, ' '),
+ (Token.Text, 'true'),
+ (Token.Text, ' '),
+ (Token.Operator, '=='),
+ (Token.Text, ' '),
+ (Token.Literal.Number, '1'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'then'),
+ (Token.Text, ' '),
+ (Token.Operator, '-'),
+ (Token.Literal.Number, '1'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'else'),
+ (Token.Text, ' '),
+ (Token.Literal.Number, '0'),
+ (Token.Text, ' '),
+ (Token.Keyword, 'fi'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolation_boundary(lexer):
- fragment = u'"\'" + "\'"'
+ fragment = '"\'" + "\'"'
tokens = [
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u"'"),
- (Token.Literal.String, u'"'),
- (Token.Text, u' '),
- (Token.Operator, u'+'),
- (Token.Text, u' '),
- (Token.Literal.String, u'"'),
- (Token.Literal.String, u"'"),
- (Token.Literal.String, u'"'),
- (Token.Text, u'\n'),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, "'"),
+ (Token.Literal.String, '"'),
+ (Token.Text, ' '),
+ (Token.Operator, '+'),
+ (Token.Text, ' '),
+ (Token.Literal.String, '"'),
+ (Token.Literal.String, "'"),
+ (Token.Literal.String, '"'),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_numeric_indexed(lexer):
- fragment = u"'a[3]'"
+ fragment = "'a[3]'"
tokens = [
- (Token.Literal.String.Interpol, u"'a[3]'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a[3]'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_numeric_hash(lexer):
- fragment = u"'a[\"b\"]'"
+ fragment = "'a[\"b\"]'"
tokens = [
- (Token.Literal.String.Interpol, u"'a[\"b\"]'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a[\"b\"]'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_string_indexed(lexer):
- fragment = u"'a$[3]'"
+ fragment = "'a$[3]'"
tokens = [
- (Token.Literal.String.Interpol, u"'a$[3]'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a$[3]'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_string_hash(lexer):
- fragment = u"'a$[\"b\"]'"
+ fragment = "'a$[\"b\"]'"
tokens = [
- (Token.Literal.String.Interpol, u"'a$[\"b\"]'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a$[\"b\"]'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_numeric_with_precision(lexer):
- fragment = u"'a:3'"
+ fragment = "'a:3'"
tokens = [
- (Token.Literal.String.Interpol, u"'a:3'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a:3'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_indexed_numeric_with_precision(lexer):
- fragment = u"'a[3]:3'"
+ fragment = "'a[3]:3'"
tokens = [
- (Token.Literal.String.Interpol, u"'a[3]:3'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a[3]:3'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens
+
def test_interpolated_local_numeric_with_precision(lexer):
- fragment = u"'a.a:3'"
+ fragment = "'a.a:3'"
tokens = [
- (Token.Literal.String.Interpol, u"'a.a:3'"),
- (Token.Text, u'\n'),
+ (Token.Literal.String.Interpol, "'a.a:3'"),
+ (Token.Text, '\n'),
]
assert list(lexer.get_tokens(fragment)) == tokens