summaryrefslogtreecommitdiff
path: root/sqlparse
diff options
context:
space:
mode:
authorJesús Leganés Combarro "Piranna" <piranna@gmail.com>2012-05-19 23:35:39 +0200
committerJesús Leganés Combarro "Piranna" <piranna@gmail.com>2012-05-19 23:35:39 +0200
commit3a7da1ca3402580e008ed8cd81addd43287acd47 (patch)
tree92157ac6a2a499acec0c9422f78b42b885c5fc78 /sqlparse
parent0a43050ab370a90f1036f37bf27b082e40419546 (diff)
downloadsqlparse-3a7da1ca3402580e008ed8cd81addd43287acd47.tar.gz
Improved cache system
Diffstat (limited to 'sqlparse')
-rw-r--r--sqlparse/utils.py62
1 files changed, 55 insertions, 7 deletions
diff --git a/sqlparse/utils.py b/sqlparse/utils.py
index 6321353..a349900 100644
--- a/sqlparse/utils.py
+++ b/sqlparse/utils.py
@@ -4,6 +4,60 @@ Created on 17/05/2012
@author: piranna
'''
+try:
+ from collections import OrderedDict
+except ImportError:
+ OrderedDict = None
+
+
+if OrderedDict:
+ class Cache(OrderedDict):
+ """Cache with LRU algorithm using an OrderedDict as basis
+ """
+ def __init__(self, maxsize=100):
+ OrderedDict.__init__(self)
+
+ self._maxsize = maxsize
+
+ def __getitem__(self, key, *args, **kwargs):
+ # Remove the (key, value) pair from the cache, or raise KeyError
+ value = self.pop(key)
+
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value)
+
+ # Return the value from the cache
+ return value
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ # Key was inserted before, remove it so we put it at front later
+ if key in self:
+ del self[key]
+
+ # Too much items on the cache, remove the least recent used
+ elif len(self) >= self._maxsize:
+ self.popitem(False)
+
+ # Insert the (key, value) pair on the front of the cache
+ OrderedDict.__setitem__(self, key, value, *args, **kwargs)
+
+else:
+ class Cache(dict):
+ """Cache that reset when gets full
+ """
+ def __init__(self, maxsize=100):
+ dict.__init__(self)
+
+ self._maxsize = maxsize
+
+ def __setitem__(self, key, value, *args, **kwargs):
+ # Reset the cache if we have too much cached entries and start over
+ if len(self) >= self._maxsize:
+ self.clear()
+
+ # Insert the (key, value) pair on the front of the cache
+ dict.__setitem__(self, key, value, *args, **kwargs)
+
def memoize_generator(func):
"""Memoize decorator for generators
@@ -13,7 +67,7 @@ def memoize_generator(func):
Obviusly, this is only useful if the generator will always return the same
values for each specific parameters...
"""
- cache = {}
+ cache = Cache()
def wrapped_func(*args, **kwargs):
# params = (args, kwargs)
@@ -26,12 +80,6 @@ def memoize_generator(func):
# Not cached, exec and store it
except KeyError:
- # Reset the cache if we have too much cached entries and start over
- # In the future would be better to use an OrderedDict and drop the
- # Least Recent Used entries
- if len(cache) >= 10:
- cache.clear()
-
cached = []
for item in func(*args, **kwargs):