diff options
-rw-r--r-- | sqlparse/sql.py | 13 | ||||
-rw-r--r-- | tests/test_tokenize.py | 6 |
2 files changed, 17 insertions, 2 deletions
diff --git a/sqlparse/sql.py b/sqlparse/sql.py index 53c16be..8b530bd 100644 --- a/sqlparse/sql.py +++ b/sqlparse/sql.py @@ -44,7 +44,12 @@ class Token(object): def __repr__(self): cls = self._get_repr_name() value = self._get_repr_value() - return "<{cls} '{value}' at 0x{id:2X}>".format(id=id(self), **locals()) + if value.startswith("'") and value.endswith("'"): + q = '"' + else: + q = "'" + return "<{cls} {q}{value}{q} at 0x{id:2X}>".format( + id=id(self), **locals()) def _get_repr_name(self): return str(self.ttype).split('.')[-1] @@ -165,7 +170,11 @@ class TokenList(Token): for idx, token in enumerate(self.tokens): cls = token._get_repr_name() value = token._get_repr_value() - print("{indent}{idx:2d} {cls} '{value}'" + if value.startswith("'") and value.endswith("'"): + q = '"' + else: + q = "'" + print("{indent}{idx:2d} {cls} {q}{value}{q}" .format(**locals()), file=f) if token.is_group() and (max_depth is None or depth < max_depth): diff --git a/tests/test_tokenize.py b/tests/test_tokenize.py index 6cc0dfa..93645d8 100644 --- a/tests/test_tokenize.py +++ b/tests/test_tokenize.py @@ -95,6 +95,12 @@ def test_tokenlist_repr(): assert repr(p.tokens[0])[:len(tst)] == tst +def test_single_quotes(): + p = sqlparse.parse("'test'")[0] + tst = "<Single \"'test'\" at 0x" + assert repr(p.tokens[0])[:len(tst)] == tst + + def test_tokenlist_first(): p = sqlparse.parse(' select foo')[0] first = p.token_first() |