Skip to content

Instantly share code, notes, and snippets.

@kylegibson
Created Jul 5, 2020
Embed
What would you like to do?
Distributed Memcache Linked List (Queue)
from contextlib import contextmanager
# cache = ...
class DistributedLinkedList(object):
def __init__(self, cache_key_name, cache_timeout=86400):
self.cache_key_name = cache_key_name
self.cache_timeout = cache_timeout
self.head_key_ref = f'distributed-list-{cache_key_name}-head'
self.tail_key_ref = f'distributed-list-{cache_key_name}-tail'
def _lock(self):
return wait_for_cache_lock(f'{self.cache_key_name}-lock')
@property
def head(self):
return cache.get(self.head_key_ref)
@head.setter
def head(self, value):
cache.set(self.head_key_ref, value, self.cache_timeout)
@property
def tail(self):
return cache.get(self.tail_key_ref)
@tail.setter
def tail(self, value):
cache.set(self.tail_key_ref, value, self.cache_timeout)
def add(self, data):
this_key = f'{self.cache_key_name}-{uuid.uuid4()}'
with self._lock():
head_key, tail_key = self.head, self.tail
cache.set(this_key, (tail_key, data), self.cache_timeout)
self.tail = this_key
if head_key is None:
self.head = this_key
def clear(self):
with self._lock():
items = list(self)
self.tail = None
self.head = None
return items
def items(self, stop=None):
head_key, tail_key = self.head, self.tail
if stop is None:
stop = head_key
while tail_key is not None:
tail = cache.get(tail_key, (None, None))
prev_key, data = tail
yield data
if tail_key == stop:
break
tail_key = prev_key
def __iter__(self):
return self.items()
@contextmanager
def wait_for_cache_lock(name):
key = f'{name}-cache_lock'
value = 1
timeout = 10
try:
while cache.add(key, value, timeout) is False:
time.sleep(0.1)
yield
finally:
cache.delete(key)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment