diff options
| author | Andi Albrecht <albrecht.andi@gmail.com> | 2009-06-24 06:13:29 +0200 |
|---|---|---|
| committer | Andi Albrecht <albrecht.andi@gmail.com> | 2009-06-24 06:13:29 +0200 |
| commit | d47657d07ab900604547efd0a4eb288775ad43b9 (patch) | |
| tree | 2c3587ac075c52ef3fce096081a505820f55f51a | |
| parent | af3c47a5a5410ac48284e118d7f1c0fd7cfbd18a (diff) | |
| download | sqlparse-d47657d07ab900604547efd0a4eb288775ad43b9.tar.gz | |
Fix incorrect detection of keyword fragments in names (fixes issue7, reported and initial patch by andyboyko).
| -rw-r--r-- | CHANGES | 8 | ||||
| -rw-r--r-- | sqlparse/lexer.py | 6 | ||||
| -rw-r--r-- | tests/test_tokenize.py | 21 |
3 files changed, 32 insertions, 3 deletions
@@ -1,3 +1,11 @@ +In Development +-------------- + +Bug Fixes + * Fixed incorrect detection of keyword fragments embed in names (issue7, + reported and initial patch by andyboyko). + + Release 0.1.1 (May 6, 2009) --------------------------- diff --git a/sqlparse/lexer.py b/sqlparse/lexer.py index 8ccc7de..1cdb98c 100644 --- a/sqlparse/lexer.py +++ b/sqlparse/lexer.py @@ -176,9 +176,9 @@ class Lexer: # TODO: Backslash escapes? (r"'(''|[^'])*'", String.Single), (r'"(""|[^"])*"', String.Symbol), # not a real string literal in ANSI SQL - (r'(LEFT |RIGHT )?(INNER |OUTER )?JOIN', Keyword), - (r'END( IF| LOOP)?', Keyword), - (r'CREATE( OR REPLACE)?', Keyword.DDL), + (r'(LEFT |RIGHT )?(INNER |OUTER )?JOIN\b', Keyword), + (r'END( IF| LOOP)?\b', Keyword), + (r'CREATE( OR REPLACE)?\b', Keyword.DDL), (r'[a-zA-Z_][a-zA-Z0-9_]*', is_keyword), (r'\$([a-zA-Z_][a-zA-Z0-9_]*)?\$', Name.Builtin), (r'[;:()\[\],\.]', Punctuation), diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 690ef31..a1dd4a8 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -41,6 +41,27 @@ class TestTokenize(unittest.TestCase): tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql) + def test_inline_keywords(self): # issue 7 + sql = "create created_foo" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 3) + self.assertEqual(tokens[0][0], Keyword.DDL) + self.assertEqual(tokens[2][0], Name) + self.assertEqual(tokens[2][1], u'created_foo') + sql = "enddate" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 1) + self.assertEqual(tokens[0][0], Name) + sql = "join_col" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 1) + self.assertEqual(tokens[0][0], Name) + sql = "left join_col" + tokens = list(lexer.tokenize(sql)) + self.assertEqual(len(tokens), 3) + self.assertEqual(tokens[2][0], Name) + self.assertEqual(tokens[2][1], 'join_col') + class TestToken(unittest.TestCase): |
