Skip to content

Instantly share code, notes, and snippets.

@idanz
Created September 16, 2012 05:51
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save idanz/3731181 to your computer and use it in GitHub Desktop.
Save idanz/3731181 to your computer and use it in GitHub Desktop.
ChainedCache Backend for Django
from django.core.cache import BaseCache
from django.core.cache import get_cache
from lock_factory import LockFactory
class ChainedCache(BaseCache):
def __init__(self, name, params):
BaseCache.__init__(self, params)
self.caches = [get_cache(cache_name) for cache_name in params.get('CACHES', [])]
self.debug = params.get('DEBUG', False)
def add(self, key, value, timeout=None, version=None):
"""
Set a value in the cache if the key does not already exist. If
timeout is given, that timeout will be used for the key; otherwise
the default cache timeout will be used.
Returns True if the value was stored, False otherwise.
"""
if self.has_key(key, version=version):
return False
self.set(key, value, timeout=timeout, version=version)
return True
def get(self, key, default=None, version=None):
"""
Fetch a given key from the cache. If the key does not exist, return
default, which itself defaults to None.
"""
def recurse_get(cache_number = 0):
if cache_number >= len(self.caches): return None
cache = self.caches[cache_number]
value = cache.get(key, version=version)
if value is None:
value = recurse_get(cache_number + 1)
# Keep the value from the next cache in this cache for next time
if value is not None: cache.set(key, value, version = version) # Got to use the default timeout...
else:
if self.debug: print 'CACHE HIT FOR', key, 'ON LEVEL', cache_number
return value
value = recurse_get()
if value is None:
if self.debug: print 'CACHE MISS FOR', key
return default
return value
def set(self, key, value, timeout=None, version=None):
"""
Set a value in the cache. If timeout is given, that timeout will be
used for the key; otherwise the default cache timeout will be used.
"""
# Just to be sure we don't get a race condition between different caches, lets use a lock here
with LockFactory.get_lock(self.make_key(key, version = version)):
for cache in self.caches:
cache.set(key, value, timeout = timeout, version = version)
def delete(self, key, version=None):
"""
Delete a key from the cache, failing silently.
"""
# Just to be sure we don't get a race condition between different caches, lets use a lock here
with LockFactory.get_lock(self.make_key(key, version = version)):
for cache in self.caches:
cache.delete(key, version = version)
def clear(self):
"""Remove *all* values from the cache at once."""
for cache in reversed(self.caches):
cache.clear()
# For backwards compatibility
class CacheClass(ChainedCache):
pass
from weakref import WeakValueDictionary
from threading import RLock
class LockFactory(object):
def __init__(self):
self._dct_locks = WeakValueDictionary() # {key -> WeakReference(RLock)}
self._lock = RLock()
def get_lock(self,key):
""" Get a lock by unique key or return the lock object if already exists for this key
"""
with self._lock:
lock = self._dct_locks.get(key)
if not lock:
# we must get a stack variable for the lock, to ensure it is not removed
lock = RLock()
self._dct_locks[key] = lock
return lock
LockFactory = LockFactory()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment