diff options
| author | Andi Albrecht <albrecht.andi@gmail.com> | 2011-04-13 16:04:56 +0200 |
|---|---|---|
| committer | Andi Albrecht <albrecht.andi@gmail.com> | 2011-04-13 16:04:56 +0200 |
| commit | 47e7f56a52870bdb278225fc05866efcb5e65a1d (patch) | |
| tree | 1b35d2b9e25a375cbc256fc406c0d55ac979f120 /sqlparse | |
| parent | ea2b23a46b4dacf1d76cdbdaa172e25e3b733466 (diff) | |
| download | sqlparse-47e7f56a52870bdb278225fc05866efcb5e65a1d.tar.gz | |
Don't group trailing whitepsace in WHERE clauses (fixes issue35).
Diffstat (limited to 'sqlparse')
| -rw-r--r-- | sqlparse/engine/grouping.py | 3 | ||||
| -rw-r--r-- | sqlparse/sql.py | 11 |
2 files changed, 10 insertions, 4 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index 3a2e3c3..d233772 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -253,7 +253,8 @@ def group_where(tlist): else: end = tlist.tokens[tlist.token_index(end) - 1] group = tlist.group_tokens(sql.Where, - tlist.tokens_between(token, end)) + tlist.tokens_between(token, end), + ignore_ws=True) idx = tlist.token_index(group) token = tlist.token_next_match(idx, T.Keyword, 'WHERE') diff --git a/sqlparse/sql.py b/sqlparse/sql.py index 8e2e82b..722204d 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -287,16 +287,21 @@ class TokenList(Token): If *exclude_end* is ``True`` (default is ``False``) the end token is included too. """ + # FIXME(andi): rename exclude_end to inlcude_end if exclude_end: offset = 0 else: offset = 1 - return self.tokens[ - self.token_index(start):self.token_index(end) + offset] + end_idx = self.token_index(end) + offset + start_idx = self.token_index(start) + return self.tokens[start_idx:end_idx] - def group_tokens(self, grp_cls, tokens): + def group_tokens(self, grp_cls, tokens, ignore_ws=False): """Replace tokens by an instance of *grp_cls*.""" idx = self.token_index(tokens[0]) + if ignore_ws: + while tokens and tokens[-1].is_whitespace(): + tokens = tokens[:-1] for t in tokens: self.tokens.remove(t) grp = grp_cls(tokens) |
