summaryrefslogtreecommitdiff
path: root/sqlparse/utils.py
diff options
context:
space:
mode:
Diffstat (limited to 'sqlparse/utils.py')
-rw-r--r--sqlparse/utils.py75
1 files changed, 26 insertions, 49 deletions
diff --git a/sqlparse/utils.py b/sqlparse/utils.py
index 3a49ac2..7595e9d 100644
--- a/sqlparse/utils.py
+++ b/sqlparse/utils.py
@@ -5,65 +5,42 @@ Created on 17/05/2012
'''
import re
+from collections import OrderedDict
-try:
- from collections import OrderedDict
-except ImportError:
- OrderedDict = None
+class Cache(OrderedDict):
+ """Cache with LRU algorithm using an OrderedDict as basis."""
+ def __init__(self, maxsize=100):
+ OrderedDict.__init__(self)
-if OrderedDict:
- class Cache(OrderedDict):
- """Cache with LRU algorithm using an OrderedDict as basis
- """
- def __init__(self, maxsize=100):
- OrderedDict.__init__(self)
+ self._maxsize = maxsize
- self._maxsize = maxsize
+ def __getitem__(self, key, *args, **kwargs):
+ # Get the key and remove it from the cache, or raise KeyError
+ value = OrderedDict.__getitem__(self, key)
+ del self[key]
- def __getitem__(self, key, *args, **kwargs):
- # Get the key and remove it from the cache, or raise KeyError
- value = OrderedDict.__getitem__(self, key)
- del self[key]
-
- # Insert the (key, value) pair on the front of the cache
- OrderedDict.__setitem__(self, key, value)
-
- # Return the value from the cache
- return value
-
- def __setitem__(self, key, value, *args, **kwargs):
- # Key was inserted before, remove it so we put it at front later
- if key in self:
- del self[key]
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value)
- # Too much items on the cache, remove the least recent used
- elif len(self) >= self._maxsize:
- self.popitem(False)
+ # Return the value from the cache
+ return value
- # Insert the (key, value) pair on the front of the cache
- OrderedDict.__setitem__(self, key, value, *args, **kwargs)
-
-else:
- class Cache(dict):
- """Cache that reset when gets full
- """
- def __init__(self, maxsize=100):
- dict.__init__(self)
-
- self._maxsize = maxsize
+ def __setitem__(self, key, value, *args, **kwargs):
+ # Key was inserted before, remove it so we put it at front later
+ if key in self:
+ del self[key]
- def __setitem__(self, key, value, *args, **kwargs):
- # Reset the cache if we have too much cached entries and start over
- if len(self) >= self._maxsize:
- self.clear()
+ # Too much items on the cache, remove the least recent used
+ elif len(self) >= self._maxsize:
+ self.popitem(False)
- # Insert the (key, value) pair on the front of the cache
- dict.__setitem__(self, key, value, *args, **kwargs)
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value, *args, **kwargs)
def memoize_generator(func):
- """Memoize decorator for generators
+ """Memoize decorator for generators.
Store `func` results in a cache according to their arguments as 'memoize'
does but instead this works on decorators instead of regular functions.
@@ -73,7 +50,6 @@ def memoize_generator(func):
cache = Cache()
def wrapped_func(*args, **kwargs):
-# params = (args, kwargs)
params = (args, tuple(sorted(kwargs.items())))
# Look if cached
@@ -120,6 +96,7 @@ SPLIT_REGEX = re.compile(r"""
LINE_MATCH = re.compile(r'(\r\n|\r|\n)')
+
def split_unquoted_newlines(text):
"""Split a string on all unquoted newlines.
@@ -134,4 +111,4 @@ def split_unquoted_newlines(text):
outputlines.append('')
else:
outputlines[-1] += line
- return outputlines \ No newline at end of file
+ return outputlines