diff options
| author | Andi Albrecht <albrecht.andi@gmail.com> | 2011-07-25 18:24:02 +0200 |
|---|---|---|
| committer | Andi Albrecht <albrecht.andi@gmail.com> | 2011-07-25 18:24:02 +0200 |
| commit | 0ec64528c5fb1efb71122985eab9c7137a80603d (patch) | |
| tree | dbd4330ac149c12cb7dba54b0ad3fa54f108facd | |
| parent | c078f16a40a832df5c0dcb58c982d6941dbe879f (diff) | |
| download | sqlparse-0ec64528c5fb1efb71122985eab9c7137a80603d.tar.gz | |
Detection of placeholders in paramterized queries (fixes issue22).
| -rw-r--r-- | CHANGES | 2 | ||||
| -rw-r--r-- | sqlparse/lexer.py | 4 | ||||
| -rw-r--r-- | tests/test_parse.py | 20 |
3 files changed, 25 insertions, 1 deletions
@@ -14,6 +14,8 @@ Bug Fixes (issue36, nyuhu...@gmail.com). * Don't treat single characters as keywords (issue32). * Improve parsing of stand-alone comments (issue26). + * Detection of placeholders in paramterized queries (issue22, + reported by Glyph Lefkowitz). Release 0.1.2 (Nov 23, 2010) diff --git a/sqlparse/lexer.py b/sqlparse/lexer.py index 507217a..950ef1b 100644 --- a/sqlparse/lexer.py +++ b/sqlparse/lexer.py @@ -175,6 +175,9 @@ class Lexer: (r'CASE\b', tokens.Keyword), # extended CASE(foo) (r"`(``|[^`])*`", tokens.Name), (r"´(´´|[^´])*´", tokens.Name), + (r'\$([a-zA-Z_][a-zA-Z0-9_]*)?\$', tokens.Name.Builtin), + (r'\?{1}', tokens.Name.Placeholder), + (r'[$:?%][a-zA-Z0-9_]+[^$:?%]?', tokens.Name.Placeholder), (r'@[a-zA-Z_][a-zA-Z0-9_]+', tokens.Name), (r'[a-zA-Z_][a-zA-Z0-9_]*(?=[.(])', tokens.Name), # see issue39 (r'[<>=~!]+', tokens.Operator.Comparison), @@ -191,7 +194,6 @@ class Lexer: (r'NOT NULL\b', tokens.Keyword), (r'CREATE( OR REPLACE)?\b', tokens.Keyword.DDL), (r'[a-zA-Z_][a-zA-Z0-9_]*', is_keyword), - (r'\$([a-zA-Z_][a-zA-Z0-9_]*)?\$', tokens.Name.Builtin), (r'[;:()\[\],\.]', tokens.Punctuation), ], 'multiline-comments': [ diff --git a/tests/test_parse.py b/tests/test_parse.py index bcde674..6ff8dd8 100644 --- a/tests/test_parse.py +++ b/tests/test_parse.py @@ -75,3 +75,23 @@ class SQLParseTest(TestCaseBase): t = sqlparse.parse('12.5')[0].tokens self.assertEqual(len(t), 1) self.assert_(t[0].ttype is sqlparse.tokens.Number.Float) + + def test_placeholder(self): + def _get_tokens(sql): + return sqlparse.parse(sql)[0].tokens[-1].tokens + t = _get_tokens('select * from foo where user = ?') + self.assert_(t[-1].ttype is sqlparse.tokens.Name.Placeholder) + self.assertEqual(t[-1].value, '?') + t = _get_tokens('select * from foo where user = :1') + self.assert_(t[-1].ttype is sqlparse.tokens.Name.Placeholder) + self.assertEqual(t[-1].value, ':1') + t = _get_tokens('select * from foo where user = :name') + self.assert_(t[-1].ttype is sqlparse.tokens.Name.Placeholder) + self.assertEqual(t[-1].value, ':name') + t = _get_tokens('select * from foo where user = %s') + self.assert_(t[-1].ttype is sqlparse.tokens.Name.Placeholder) + self.assertEqual(t[-1].value, '%s') + t = _get_tokens('select * from foo where user = $a') + self.assert_(t[-1].ttype is sqlparse.tokens.Name.Placeholder) + self.assertEqual(t[-1].value, '$a') + |
