diff options
| author | Andi Albrecht <albrecht.andi@gmail.com> | 2009-06-24 06:13:29 +0200 |
|---|---|---|
| committer | Andi Albrecht <albrecht.andi@gmail.com> | 2009-06-24 06:13:29 +0200 |
| commit | d47657d07ab900604547efd0a4eb288775ad43b9 (patch) | |
| tree | 2c3587ac075c52ef3fce096081a505820f55f51a /tests | |
| parent | af3c47a5a5410ac48284e118d7f1c0fd7cfbd18a (diff) | |
| download | sqlparse-d47657d07ab900604547efd0a4eb288775ad43b9.tar.gz | |
Fix incorrect detection of keyword fragments in names (fixes issue7, reported and initial patch by andyboyko).
Diffstat (limited to 'tests')
| -rw-r--r-- | tests/test_tokenize.py | 21 |
1 files changed, 21 insertions, 0 deletions
diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 690ef31..a1dd4a8 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -41,6 +41,27 @@ class TestTokenize(unittest.TestCase): tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql) + def test_inline_keywords(self): # issue 7 + sql = "create created_foo" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 3) + self.assertEqual(tokens[0][0], Keyword.DDL) + self.assertEqual(tokens[2][0], Name) + self.assertEqual(tokens[2][1], u'created_foo') + sql = "enddate" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 1) + self.assertEqual(tokens[0][0], Name) + sql = "join_col" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 1) + self.assertEqual(tokens[0][0], Name) + sql = "left join_col" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 3) + self.assertEqual(tokens[2][0], Name) + self.assertEqual(tokens[2][1], 'join_col') + class TestToken(unittest.TestCase): |
