summaryrefslogtreecommitdiff
path: root/tests
diff options
context:
space:
mode:
authorBob Halley <halley@nominum.com>2010-01-12 13:31:58 -0800
committerBob Halley <halley@nominum.com>2010-01-12 13:31:58 -0800
commit45f795046415a44d41b203e2ba10b72a2abcf610 (patch)
tree8a0c5145bdfda87b2cc739bdaeddb450a616fbaf /tests
parent7f397a5a53b65a5c434c6686318c501f8548cc18 (diff)
downloaddnspython-45f795046415a44d41b203e2ba10b72a2abcf610.tar.gz
make a proper token object to facilitate future tokenization work
Diffstat (limited to 'tests')
-rw-r--r--tests/tokenizer.py95
1 files changed, 45 insertions, 50 deletions
diff --git a/tests/tokenizer.py b/tests/tokenizer.py
index 7ae981b..c5d41ac 100644
--- a/tests/tokenizer.py
+++ b/tests/tokenizer.py
@@ -18,112 +18,106 @@ import unittest
import dns.exception
import dns.tokenizer
+Token = dns.tokenizer.Token
+
class TokenizerTestCase(unittest.TestCase):
-
+
def testQuotedString1(self):
tok = dns.tokenizer.Tokenizer(r'"foo"')
- (ttype, value) = tok.get()
- self.failUnless(ttype == dns.tokenizer.QUOTED_STRING and
- value == 'foo')
+ token = tok.get()
+ self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo'))
def testQuotedString2(self):
tok = dns.tokenizer.Tokenizer(r'""')
- (ttype, value) = tok.get()
- self.failUnless(ttype == dns.tokenizer.QUOTED_STRING and
- value == '')
+ token = tok.get()
+ self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, ''))
def testQuotedString3(self):
tok = dns.tokenizer.Tokenizer(r'"\"foo\""')
- (ttype, value) = tok.get()
- self.failUnless(ttype == dns.tokenizer.QUOTED_STRING and
- value == '"foo"')
+ token = tok.get()
+ self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, '"foo"'))
def testQuotedString4(self):
tok = dns.tokenizer.Tokenizer(r'"foo\010bar"')
- (ttype, value) = tok.get()
- self.failUnless(ttype == dns.tokenizer.QUOTED_STRING and
- value == 'foo\x0abar')
+ token = tok.get()
+ self.failUnless(token == Token(dns.tokenizer.QUOTED_STRING, 'foo\x0abar'))
def testQuotedString5(self):
def bad():
tok = dns.tokenizer.Tokenizer(r'"foo')
- (ttype, value) = tok.get()
+ token = tok.get()
self.failUnlessRaises(dns.exception.UnexpectedEnd, bad)
def testQuotedString6(self):
def bad():
tok = dns.tokenizer.Tokenizer(r'"foo\01')
- (ttype, value) = tok.get()
+ token = tok.get()
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testQuotedString7(self):
def bad():
tok = dns.tokenizer.Tokenizer('"foo\nbar"')
- (ttype, value) = tok.get()
+ token = tok.get()
self.failUnlessRaises(dns.exception.SyntaxError, bad)
def testEmpty1(self):
tok = dns.tokenizer.Tokenizer('')
- (ttype, value) = tok.get()
- self.failUnless(ttype == dns.tokenizer.EOF)
+ token = tok.get()
+ self.failUnless(token.is_eof())
def testEmpty2(self):
tok = dns.tokenizer.Tokenizer('')
- (ttype1, value1) = tok.get()
- (ttype2, value2) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.EOF and
- ttype2 == dns.tokenizer.EOF)
+ token1 = tok.get()
+ token2 = tok.get()
+ self.failUnless(token1.is_eof() and token2.is_eof())
def testEOL(self):
tok = dns.tokenizer.Tokenizer('\n')
- (ttype1, value1) = tok.get()
- (ttype2, value2) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.EOL and
- ttype2 == dns.tokenizer.EOF)
+ token1 = tok.get()
+ token2 = tok.get()
+ self.failUnless(token1.is_eol() and token2.is_eof())
def testWS1(self):
tok = dns.tokenizer.Tokenizer(' \n')
- (ttype1, value1) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.EOL)
+ token1 = tok.get()
+ self.failUnless(token1.is_eol())
def testWS2(self):
tok = dns.tokenizer.Tokenizer(' \n')
- (ttype1, value1) = tok.get(want_leading=True)
- self.failUnless(ttype1 == dns.tokenizer.WHITESPACE)
+ token1 = tok.get(want_leading=True)
+ self.failUnless(token1.is_whitespace())
def testComment1(self):
tok = dns.tokenizer.Tokenizer(' ;foo\n')
- (ttype1, value1) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.EOL)
+ token1 = tok.get()
+ self.failUnless(token1.is_eol())
def testComment2(self):
tok = dns.tokenizer.Tokenizer(' ;foo\n')
- (ttype1, value1) = tok.get(want_comment = True)
- (ttype2, value2) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.COMMENT and
- value1 == 'foo' and
- ttype2 == dns.tokenizer.EOL)
+ token1 = tok.get(want_comment = True)
+ token2 = tok.get()
+ self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo') and
+ token2.is_eol())
def testComment3(self):
tok = dns.tokenizer.Tokenizer(' ;foo bar\n')
- (ttype1, value1) = tok.get(want_comment = True)
- (ttype2, value2) = tok.get()
- self.failUnless(ttype1 == dns.tokenizer.COMMENT and
- value1 == 'foo bar' and
- ttype2 == dns.tokenizer.EOL)
+ token1 = tok.get(want_comment = True)
+ token2 = tok.get()
+ self.failUnless(token1 == Token(dns.tokenizer.COMMENT, 'foo bar') and
+ token2.is_eol())
def testMultiline1(self):
tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)')
tokens = list(iter(tok))
- self.failUnless(tokens == [(dns.tokenizer.IDENTIFIER, 'foo'),
- (dns.tokenizer.IDENTIFIER, 'bar')])
+ self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
+ Token(dns.tokenizer.IDENTIFIER, 'bar')])
def testMultiline2(self):
tok = dns.tokenizer.Tokenizer('( foo\n\n bar\n)\n')
tokens = list(iter(tok))
- self.failUnless(tokens == [(dns.tokenizer.IDENTIFIER, 'foo'),
- (dns.tokenizer.IDENTIFIER, 'bar'),
- (dns.tokenizer.EOL, '\n')])
+ self.failUnless(tokens == [Token(dns.tokenizer.IDENTIFIER, 'foo'),
+ Token(dns.tokenizer.IDENTIFIER, 'bar'),
+ Token(dns.tokenizer.EOL, '\n')])
def testMultiline3(self):
def bad():
tok = dns.tokenizer.Tokenizer('foo)')
@@ -141,7 +135,8 @@ class TokenizerTestCase(unittest.TestCase):
t1 = tok.get()
tok.unget(t1)
t2 = tok.get()
- self.failUnless(t1 == t2 and t1 == (dns.tokenizer.IDENTIFIER, 'foo'))
+ self.failUnless(t1 == t2 and t1.ttype == dns.tokenizer.IDENTIFIER and \
+ t1.value == 'foo')
def testUnget2(self):
def bad():
@@ -164,12 +159,12 @@ class TokenizerTestCase(unittest.TestCase):
def testEscapedDelimiter1(self):
tok = dns.tokenizer.Tokenizer(r'ch\ ld')
t = tok.get()
- self.failUnless(t == (dns.tokenizer.IDENTIFIER, r'ch ld'))
+ self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch ld')
def testEscapedDelimiter2(self):
tok = dns.tokenizer.Tokenizer(r'ch\0ld')
t = tok.get()
- self.failUnless(t == (dns.tokenizer.IDENTIFIER, r'ch\0ld'))
+ self.failUnless(t.ttype == dns.tokenizer.IDENTIFIER and t.value == r'ch\0ld')
if __name__ == '__main__':
unittest.main()