diff options
| author | Sjoerd Job Postmus <sjoerdjob@sjec.nl> | 2016-06-02 08:30:27 +0200 |
|---|---|---|
| committer | Victor Uriarte <victor.m.uriarte@intel.com> | 2016-06-12 17:33:15 -0700 |
| commit | d4cc0644c8348da5e49c58df5e26a3e969045249 (patch) | |
| tree | a23fcaa75e313369194a27e0f7ac040b20b9e23c /sqlparse/engine | |
| parent | 896774cb5298924abbcea81b9b90f1c7c10b3d6a (diff) | |
| download | sqlparse-d4cc0644c8348da5e49c58df5e26a3e969045249.tar.gz | |
Replace _group_matching with an inward-out grouping algorithm
All the matching between open/close was done all the time, first finding
the matching closing token, and then grouping the tokens in between, and
recurse over the newly created list.
Instead, it is more efficient to look for the previous open-token on
finding a closing-token, group these two together, and then continue on.
squashed: Handle token indices in group_tokens_between and find_matching.
Diffstat (limited to 'sqlparse/engine')
| -rw-r--r-- | sqlparse/engine/grouping.py | 28 |
1 files changed, 20 insertions, 8 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index ad7da9f..e004eae 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -2,7 +2,7 @@ from sqlparse import sql from sqlparse import tokens as T -from sqlparse.utils import recurse, imt, find_matching +from sqlparse.utils import recurse, imt M_ROLE = (T.Keyword, ('null', 'role')) M_SEMICOLON = (T.Punctuation, ';') @@ -39,13 +39,25 @@ def _group_matching(tlist, cls): """Groups Tokens that have beginning and end. ie. parenthesis, brackets..""" idx = 1 if imt(tlist, i=cls) else 0 - token = tlist.token_next_by(m=cls.M_OPEN, idx=idx) - while token: - end = find_matching(tlist, token, cls.M_OPEN, cls.M_CLOSE) - if end is not None: - token = tlist.group_tokens_between(cls, token, end) - _group_matching(token, cls) - token = tlist.token_next_by(m=cls.M_OPEN, idx=tlist.token_index(token) + 1) + opens = [] + + while True: + try: + token = tlist.tokens[idx] + except IndexError: + break + + if token.match(*cls.M_OPEN): + opens.append(idx) + elif token.match(*cls.M_CLOSE): + try: + open_idx = opens.pop() + except IndexError: + break + tlist.group_tokens_between(cls, open_idx, idx) + idx = open_idx + + idx += 1 def group_if(tlist): |
