From bcaf3269b3a8746a4ee8c848256d5f2b11b25d27 Mon Sep 17 00:00:00 2001 From: Andi Albrecht Date: Tue, 6 Sep 2011 08:01:12 +0200 Subject: Code cleanup. --- sqlparse/engine/filter.py | 5 +++-- sqlparse/engine/grouping.py | 6 ++++-- sqlparse/sql.py | 4 ++-- 3 files changed, 9 insertions(+), 6 deletions(-) (limited to 'sqlparse') diff --git a/sqlparse/engine/filter.py b/sqlparse/engine/filter.py index 89d9b15..421b3f3 100644 --- a/sqlparse/engine/filter.py +++ b/sqlparse/engine/filter.py @@ -61,14 +61,15 @@ class StatementFilter(TokenFilter): if unified == 'END': # Should this respect a preceeding BEGIN? # In CASE ... WHEN ... END this results in a split level -1. - self._begin_depth = max(0, self._begin_depth-1) + self._begin_depth = max(0, self._begin_depth - 1) return -1 if ttype is T.Keyword.DDL and unified.startswith('CREATE'): self._is_create = True return 0 - if unified in ('IF', 'FOR') and self._is_create and self._begin_depth > 0: + if (unified in ('IF', 'FOR') + and self._is_create and self._begin_depth > 0): return 1 # Default diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index 9bc9612..72f919b 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -267,13 +267,15 @@ def group_aliased(tlist): token = tlist.token_next_by_instance(idx, (sql.Identifier, sql.Function)) while token: next_ = tlist.token_next(tlist.token_index(token)) - if next_ is not None and isinstance(next_, (sql.Identifier, sql.Function)): + if next_ is not None and isinstance(next_, + (sql.Identifier, sql.Function)): grp = tlist.tokens_between(token, next_)[1:] token.tokens.extend(grp) for t in grp: tlist.tokens.remove(t) idx = tlist.token_index(token) + 1 - token = tlist.token_next_by_instance(idx, (sql.Identifier, sql.Function)) + token = tlist.token_next_by_instance(idx, + (sql.Identifier, sql.Function)) def group_typecasts(tlist): diff --git a/sqlparse/sql.py b/sqlparse/sql.py index 5b8f067..4d56bf3 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -146,7 +146,7 @@ class TokenList(Token): def _pprint_tree(self, max_depth=None, depth=0): """Pretty-print the object tree.""" - indent = ' '*(depth*2) + indent = ' ' * (depth * 2) for idx, token in enumerate(self.tokens): if token.is_group(): pre = ' +-' @@ -156,7 +156,7 @@ class TokenList(Token): token._get_repr_name(), token._get_repr_value()) if (token.is_group() and (max_depth is None or depth < max_depth)): - token._pprint_tree(max_depth, depth+1) + token._pprint_tree(max_depth, depth + 1) def flatten(self): """Generator yielding ungrouped tokens. -- cgit v1.2.1