diff options
| author | Jesús Leganés Combarro "Piranna <piranna@gmail.com> | 2012-04-21 00:33:06 +0200 |
|---|---|---|
| committer | Jesús Leganés Combarro "Piranna <piranna@gmail.com> | 2012-04-21 00:33:06 +0200 |
| commit | 91e44ff73d6bf0762b8bda4d18957e29ce2d52ae (patch) | |
| tree | b7b2a906ce92f6001b113c99fe5ea9b7fca7c274 /sqlparse | |
| parent | 3f23d26a4fe42e204ea3c337e2ccb69602cd5365 (diff) | |
| parent | 0afebf47e24d8a1ee1981faef39c0a15a798f7fd (diff) | |
| download | sqlparse-91e44ff73d6bf0762b8bda4d18957e29ce2d52ae.tar.gz | |
Merge branch 'master' of github.com:andialbrecht/sqlparse
Diffstat (limited to 'sqlparse')
| -rw-r--r-- | sqlparse/__init__.py | 4 | ||||
| -rw-r--r-- | sqlparse/engine/grouping.py | 32 | ||||
| -rw-r--r-- | sqlparse/filters.py | 12 | ||||
| -rw-r--r-- | sqlparse/sql.py | 10 |
4 files changed, 37 insertions, 21 deletions
diff --git a/sqlparse/__init__.py b/sqlparse/__init__.py index 5ccf092..f924c04 100644 --- a/sqlparse/__init__.py +++ b/sqlparse/__init__.py @@ -6,7 +6,7 @@ """Parse SQL statements.""" -__version__ = '0.1.3' +__version__ = '0.1.4' class SQLParseError(Exception): @@ -58,4 +58,4 @@ def split(sql): from sqlparse.engine.filter import StatementFilter def split2(stream): splitter = StatementFilter() - return list(splitter.process(None, stream))
\ No newline at end of file + return list(splitter.process(None, stream)) diff --git a/sqlparse/engine/grouping.py b/sqlparse/engine/grouping.py index b0c6c2c..55ec7e2 100644 --- a/sqlparse/engine/grouping.py +++ b/sqlparse/engine/grouping.py @@ -194,9 +194,12 @@ def group_identifier_list(tlist): lambda t: t.ttype == T.Name, lambda t: t.ttype == T.Wildcard, lambda t: t.match(T.Keyword, 'null'), + lambda t: t.match(T.Keyword, 'role'), lambda t: t.ttype == T.Number.Integer, lambda t: t.ttype == T.String.Single, + lambda t: t.ttype == T.Name.Placeholder, lambda t: isinstance(t, sql.Comparison), + lambda t: isinstance(t, sql.Comment), ] tcomma = tlist.token_next_match(idx, T.Punctuation, ',') start = None @@ -314,18 +317,19 @@ def group_functions(tlist): def group(tlist): - for func in [group_parenthesis, - group_functions, - group_comments, - group_where, - group_case, - group_identifier, - group_typecasts, - group_as, - group_aliased, - group_assignment, - group_comparison, - group_identifier_list, - group_if, - group_for]: + for func in [ + group_comments, + group_parenthesis, + group_functions, + group_where, + group_case, + group_identifier, + group_typecasts, + group_as, + group_aliased, + group_assignment, + group_comparison, + group_identifier_list, + group_if, + group_for]: func(tlist) diff --git a/sqlparse/filters.py b/sqlparse/filters.py index 6bb3415..bfa757d 100644 --- a/sqlparse/filters.py +++ b/sqlparse/filters.py @@ -218,10 +218,11 @@ class StripWhitespaceFilter(Filter): tlist.tokens.pop(-2) self._stripws_default(tlist) - def process(self, stack, stmt): - [self.process(stack, sgroup) for sgroup in stmt.get_sublists()] + def process(self, stack, stmt, depth=0): + [self.process(stack, sgroup, depth+1) + for sgroup in stmt.get_sublists()] self._stripws(stmt) - if stmt.tokens[-1].is_whitespace(): + if depth == 0 and stmt.tokens[-1].is_whitespace(): stmt.tokens.pop(-1) @@ -331,6 +332,9 @@ class ReindentFilter(Filter): self.offset += num_offset for token in identifiers[1:]: tlist.insert_before(token, self.nl()) + for token in tlist.tokens: + if isinstance(token, sql.Comment): + tlist.insert_after(token, self.nl()) self.offset -= num_offset self._process_default(tlist) @@ -610,4 +614,4 @@ class Limit(Filter): if index and token_type in Keyword and value == 'LIMIT': return stream[4 - index][1] - return -1
\ No newline at end of file + return -1 diff --git a/sqlparse/sql.py b/sqlparse/sql.py index 72609e2..9c7aeee 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -137,7 +137,7 @@ class TokenList(Token): if tokens is None: tokens = [] self.tokens = tokens - Token.__init__(self, None, None) + Token.__init__(self, None, unicode(self)) def __unicode__(self): return ''.join(unicode(x) for x in self.flatten()) @@ -322,6 +322,14 @@ class TokenList(Token): """Inserts *token* before *where*.""" self.tokens.insert(self.token_index(where), token) + def insert_after(self, where, token): + """Inserts *token* after *where*.""" + next_token = self.token_next(where) + if next_token is None: + self.tokens.append(token) + else: + self.tokens.insert(self.token_index(next_token), token) + def has_alias(self): """Returns ``True`` if an alias is present.""" return self.get_alias() is not None |
