summaryrefslogtreecommitdiff
path: root/sqlparse
diff options
context:
space:
mode:
Diffstat (limited to 'sqlparse')
-rw-r--r--sqlparse/engine/grouping.py10
-rw-r--r--sqlparse/filters/reindent.py1
-rw-r--r--sqlparse/sql.py13
3 files changed, 9 insertions, 15 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py
index e7072d0..c52a759 100644
--- a/sqlparse/engine/grouping.py
+++ b/sqlparse/engine/grouping.py
@@ -64,7 +64,9 @@ def _group_matching(tlist, cls):
# this indicates invalid sql and unbalanced tokens.
# instead of break, continue in case other "valid" groups exist
continue
- tlist.group_tokens(cls, open_token, token)
+ oidx = tlist.token_index(open_token)
+ cidx = tlist.token_index(token)
+ tlist.group_tokens(cls, oidx, cidx)
def group_if(tlist):
@@ -196,10 +198,9 @@ def group_parenthesis(tlist):
def group_comments(tlist):
tidx, token = tlist.token_next_by(t=T.Comment)
while token:
- end = tlist.token_not_matching(
+ eidx, end = tlist.token_not_matching(
lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace(), idx=tidx)
if end is not None:
- eidx = tlist.token_index(end)
eidx, end = tlist.token_prev(eidx, skip_ws=False)
tlist.group_tokens(sql.Comment, tidx, eidx)
@@ -218,7 +219,8 @@ def group_where(tlist):
end = tlist.tokens[eidx - 1]
# TODO: convert this to eidx instead of end token.
# i think above values are len(tlist) and eidx-1
- tlist.group_tokens(sql.Where, tidx, end)
+ eidx = tlist.token_index(end)
+ tlist.group_tokens(sql.Where, tidx, eidx)
tidx, token = tlist.token_next_by(m=sql.Where.M_OPEN, idx=tidx)
diff --git a/sqlparse/filters/reindent.py b/sqlparse/filters/reindent.py
index 1b539c6..68595a5 100644
--- a/sqlparse/filters/reindent.py
+++ b/sqlparse/filters/reindent.py
@@ -62,7 +62,6 @@ class ReindentFilter(object):
def _split_kwds(self, tlist):
tidx, token = self._next_token(tlist)
while token:
- tidx = tlist.token_index(token)
pidx, prev_ = tlist.token_prev(tidx, skip_ws=False)
uprev = text_type(prev_)
diff --git a/sqlparse/sql.py b/sqlparse/sql.py
index 4b6abf1..9656390 100644
--- a/sqlparse/sql.py
+++ b/sqlparse/sql.py
@@ -248,7 +248,7 @@ class TokenList(Token):
def token_not_matching(self, funcs, idx):
funcs = (funcs,) if not isinstance(funcs, (list, tuple)) else funcs
funcs = [lambda tk: not func(tk) for func in funcs]
- return self._token_matching(funcs, idx)[1]
+ return self._token_matching(funcs, idx)
def token_matching(self, funcs, idx):
return self._token_matching(funcs, idx)[1]
@@ -297,13 +297,9 @@ class TokenList(Token):
def group_tokens(self, grp_cls, start, end, include_end=True,
extend=False):
"""Replace tokens by an instance of *grp_cls*."""
- if isinstance(start, int):
- start_idx = start
- start = self.tokens[start_idx]
- else:
- start_idx = self.token_index(start)
+ start_idx = start
+ start = self.tokens[start_idx]
- end = end if isinstance(end, int) else self.token_index(end, start_idx)
end_idx = end + include_end
# will be needed later for new group_clauses
@@ -390,9 +386,6 @@ class TokenList(Token):
def _get_first_name(self, idx=None, reverse=False, keywords=False):
"""Returns the name of the first token with a name"""
- if idx and not isinstance(idx, int):
- idx = self.token_index(idx) + 1
-
tokens = self.tokens[idx:] if idx else self.tokens
tokens = reversed(tokens) if reverse else tokens
types = [T.Name, T.Wildcard, T.String.Symbol]