summaryrefslogtreecommitdiff
path: root/sqlparse/sql.py
diff options
context:
space:
mode:
authorSjoerd Job Postmus <sjoerdjob@sjec.nl>2016-06-02 10:08:00 +0200
committerVictor Uriarte <victor.m.uriarte@intel.com>2016-06-12 17:38:13 -0700
commit67dc823e1174eee9ea2159674c8eb016b2f95b54 (patch)
tree72338f09570c72f56934f7618f1b74eef8ff30f1 /sqlparse/sql.py
parent237575ef726e4232b60a5043177c43a72f370238 (diff)
downloadsqlparse-67dc823e1174eee9ea2159674c8eb016b2f95b54.tar.gz
Use specialized token_idx_next_by in group_aliased.
The method group_aliased was making a lot of calls to token_index. By specializing token_next_by to token_idx_next_by, the calls to token_index became superfluous. Also use token_idx_next_by in group_identifier_list. It was making a lot of calls, which is now more than reduced in half.
Diffstat (limited to 'sqlparse/sql.py')
-rw-r--r--sqlparse/sql.py20
1 files changed, 20 insertions, 0 deletions
diff --git a/sqlparse/sql.py b/sqlparse/sql.py
index dfe0430..928b784 100644
--- a/sqlparse/sql.py
+++ b/sqlparse/sql.py
@@ -225,6 +225,22 @@ class TokenList(Token):
def _groupable_tokens(self):
return self.tokens
+ def _token_idx_matching(self, funcs, start=0, end=None, reverse=False):
+ """next token that match functions"""
+ if start is None:
+ return None
+
+ if not isinstance(funcs, (list, tuple)):
+ funcs = (funcs,)
+
+ iterable = enumerate(self.tokens[start:end], start=start)
+
+ for idx, token in iterable:
+ for func in funcs:
+ if func(token):
+ return idx, token
+ return None, None
+
def _token_matching(self, funcs, start=0, end=None, reverse=False):
"""next token that match functions"""
if start is None:
@@ -259,6 +275,10 @@ class TokenList(Token):
(ignore_comments and imt(tk, i=Comment)))
return self._token_matching(funcs)
+ def token_idx_next_by(self, i=None, m=None, t=None, idx=0, end=None):
+ funcs = lambda tk: imt(tk, i, m, t)
+ return self._token_idx_matching(funcs, idx, end)
+
def token_next_by(self, i=None, m=None, t=None, idx=0, end=None):
funcs = lambda tk: imt(tk, i, m, t)
return self._token_matching(funcs, idx, end)