diff options
| author | Andi Albrecht <albrecht.andi@gmail.com> | 2009-05-03 20:54:57 +0200 |
|---|---|---|
| committer | Andi Albrecht <albrecht.andi@gmail.com> | 2009-05-03 20:54:57 +0200 |
| commit | 45db01becdc17108bc1c504503ee7b997059970d (patch) | |
| tree | 33d2369fbe6b9be39d71e8a833588e268839e272 | |
| parent | 1ac975c3af6b7a69b86935c549888cf10d70dfb5 (diff) | |
| download | sqlparse-45db01becdc17108bc1c504503ee7b997059970d.tar.gz | |
Added unittest for issue1 (linebreaks).
| -rw-r--r-- | tests/test_tokenize.py | 14 |
1 files changed, 14 insertions, 0 deletions
diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 74cf1fc..127b4db 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -24,3 +24,17 @@ class TestTokenize(unittest.TestCase): tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 3) self.assertEqual(tokens[0], (Name, u'`foo`')) + + def test_linebreaks(self): # issue1 + sql = 'foo\nbar\n' + tokens = lexer.tokenize(sql) + self.assertEqual(''.join(str(x[1]) for x in tokens), sql) + sql = 'foo\rbar\r' + tokens = lexer.tokenize(sql) + self.assertEqual(''.join(str(x[1]) for x in tokens), sql) + sql = 'foo\r\nbar\r\n' + tokens = lexer.tokenize(sql) + self.assertEqual(''.join(str(x[1]) for x in tokens), sql) + sql = 'foo\r\nbar\n' + tokens = lexer.tokenize(sql) + self.assertEqual(''.join(str(x[1]) for x in tokens), sql) |
