summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAndi Albrecht <albrecht.andi@googlemail.com>2012-04-27 22:16:30 -0700
committerAndi Albrecht <albrecht.andi@googlemail.com>2012-04-27 22:16:30 -0700
commit210dce4cdf97441fc9a4e9bfe6c96b8f34612e5b (patch)
treebd39f9ae5cb46f05b47d5bda0c6362d2ea6b0a61
parent5cc2604dea7d6004ec9607fe77b48abf27d8e3ee (diff)
parent28f9c777545bb18fd3141568e2a25de685c3c30f (diff)
downloadsqlparse-210dce4cdf97441fc9a4e9bfe6c96b8f34612e5b.tar.gz
Merge pull request #65 from piranna/master
parse() and parsestream() unification
-rw-r--r--AUTHORS5
-rw-r--r--sqlparse/__init__.py6
-rw-r--r--sqlparse/filters.py3
3 files changed, 6 insertions, 8 deletions
diff --git a/AUTHORS b/AUTHORS
index 10b68b8..dd60070 100644
--- a/AUTHORS
+++ b/AUTHORS
@@ -4,6 +4,5 @@ This module contains code (namely the lexer and filter mechanism) from
the pygments project that was written by Georg Brandl.
Alphabetical list of contributors:
-* Jesús Leganés Combarro
-
-
+* Jesús Leganés Combarro "Piranna" <piranna@gmail.com>
+* quest <quest@wonky.windwards.net> \ No newline at end of file
diff --git a/sqlparse/__init__.py b/sqlparse/__init__.py
index 58a560c..a380e33 100644
--- a/sqlparse/__init__.py
+++ b/sqlparse/__init__.py
@@ -26,13 +26,11 @@ def parse(sql):
Returns a tuple of :class:`~sqlparse.sql.Statement` instances.
"""
- stack = engine.FilterStack()
- stack.full_analyze()
- return tuple(stack.run(sql))
+ return tuple(parsestream(sql))
def parsestream(stream):
- """Pares sql statements from file-like object.
+ """Parse sql statements from file-like object.
Returns a generator of Statement instances.
"""
diff --git a/sqlparse/filters.py b/sqlparse/filters.py
index bfa757d..99ef80c 100644
--- a/sqlparse/filters.py
+++ b/sqlparse/filters.py
@@ -219,7 +219,7 @@ class StripWhitespaceFilter(Filter):
self._stripws_default(tlist)
def process(self, stack, stmt, depth=0):
- [self.process(stack, sgroup, depth+1)
+ [self.process(stack, sgroup, depth + 1)
for sgroup in stmt.get_sublists()]
self._stripws(stmt)
if depth == 0 and stmt.tokens[-1].is_whitespace():
@@ -493,6 +493,7 @@ class SerializerUnicode(Filter):
res += '\n'
return res
+
def Tokens2Unicode(stream):
result = ""