diff options
-rw-r--r-- | sqlparse/filters/create_table_info.py | 3 | ||||
-rw-r--r-- | tests/test_create_table_info.py | 5 |
2 files changed, 6 insertions, 2 deletions
diff --git a/sqlparse/filters/create_table_info.py b/sqlparse/filters/create_table_info.py index 3496221..178386d 100644 --- a/sqlparse/filters/create_table_info.py +++ b/sqlparse/filters/create_table_info.py @@ -9,6 +9,7 @@ from sqlparse import tokens as T from types import GeneratorType +# FIXME: Don't use Pipeline if not necessary. Replace with stream class Pipeline(list): """Pipeline to process filters sequentially""" @@ -49,6 +50,7 @@ def get_create_table_info(stream): return pipe(stream) +# FIXME: Use StripWhitespace Filter instead of removed StripWhitespace def StripWhitespace(stream): "Strip the useless whitespaces from a stream leaving only the minimal ones" last_type = None @@ -77,6 +79,7 @@ def StripWhitespace(stream): last_type = token_type +# FIXME: Refactor code into smaller functions class InfoCreateTable(object): # sqlparse outputs some tokens as Keyword at places where they are names ALLOWED_KEYWORD_AS_NAME = 'data', 'source', 'type' diff --git a/tests/test_create_table_info.py b/tests/test_create_table_info.py index d31922e..67f3124 100644 --- a/tests/test_create_table_info.py +++ b/tests/test_create_table_info.py @@ -7,6 +7,8 @@ from sqlparse.filters import get_create_table_info from sqlparse.lexer import tokenize +# TODO: Use Py.Test for testing compat from 2.6, 2.7, 3.3-3.5+ +# TODO: Format test class TestCasePy27Features(object): class __AssertRaisesContext(object): def __init__(self, expected_exception, expected_regexp): @@ -35,8 +37,7 @@ class TestCasePy27Features(object): return self.__AssertRaisesContext(expected_exception, expected_regexp) - - +# TODO: Update exception in test for Py3 compat class Test_GetCreateTableInfo(TestCase, TestCasePy27Features): sql1 = """ CREATE TABLE item ( |