diff options
| author | Victor Uriarte <victor.m.uriarte@intel.com> | 2016-06-15 10:52:35 -0700 |
|---|---|---|
| committer | Victor Uriarte <victor.m.uriarte@intel.com> | 2016-06-15 14:05:16 -0700 |
| commit | caefd84779d1115b6775ea7c0039f969f3b10d8a (patch) | |
| tree | 17fb2c8dbf8b8e165c546051408c2f4915280b3d /sqlparse | |
| parent | 228059eb4da5ed2389fc7e987dba37c6d05ea3ea (diff) | |
| download | sqlparse-caefd84779d1115b6775ea7c0039f969f3b10d8a.tar.gz | |
Change group_matching back to idx
Diffstat (limited to 'sqlparse')
| -rw-r--r-- | sqlparse/engine/grouping.py | 22 |
1 files changed, 16 insertions, 6 deletions
diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index b2b46bb..62f37a6 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -17,7 +17,16 @@ T_NAME = (T.Name, T.Name.Placeholder) def _group_matching(tlist, cls): """Groups Tokens that have beginning and end.""" opens = [] - for token in list(tlist): + tidx_offset = 0 + for idx, token in enumerate(list(tlist)): + tidx = idx - tidx_offset + + if token.is_whitespace(): + # ~50% of tokens will be whitespace. Will checking early + # for them avoid 3 comparisons, but then add 1 more comparison + # for the other ~50% of tokens... + continue + if token.is_group() and not isinstance(token, cls): # Check inside previously grouped (ie. parenthesis) if group # of differnt type is inside (ie, case). though ideally should @@ -26,17 +35,18 @@ def _group_matching(tlist, cls): continue if token.match(*cls.M_OPEN): - opens.append(token) + opens.append(tidx) + elif token.match(*cls.M_CLOSE): try: - open_token = opens.pop() + open_idx = opens.pop() except IndexError: # this indicates invalid sql and unbalanced tokens. # instead of break, continue in case other "valid" groups exist continue - oidx = tlist.token_index(open_token) - cidx = tlist.token_index(token) - tlist.group_tokens(cls, oidx, cidx) + close_idx = tidx + tlist.group_tokens(cls, open_idx, close_idx) + tidx_offset += close_idx - open_idx def group_brackets(tlist): |
