Skip to content

Instantly share code, notes, and snippets.

@deedy5
Last active May 9, 2023 11:02
Show Gist options
  • Save deedy5/bb5b6651aa37a683b7bcdfd9dde7843c to your computer and use it in GitHub Desktop.
Save deedy5/bb5b6651aa37a683b7bcdfd9dde7843c to your computer and use it in GitHub Desktop.
LRUCache (capacity + optional thread-safe)
import threading
class LRUCache:
def __init__(self, capacity, concurrent=False):
self._capacity = capacity
self._concurrent = concurrent
self._cache = {}
self._lock = threading.Lock()
def __getitem__(self, key):
return self.get(key)
def __setitem__(self, key, value):
self.put(key, value)
def __delitem__(self, key):
self._cache.pop(key)
def __contains__(self, key):
return key in self._cache
def __iter__(self):
return iter(self._cache)
def __len__(self):
return len(self._cache)
def get(self, key, default=None):
try:
return self._cache.keys().mapping[key]
except KeyError:
if default:
return default
else:
raise KeyError
def _put(self, key, value):
if key in self._cache:
self._cache.pop(key)
elif len(self._cache) >= self._capacity:
self._cache.pop(next(iter(self._cache)))
self._cache[key] = value
def put(self, key, value):
if self._concurrent:
with self._lock:
value = self._put(key, value)
else:
value = value = self._put(key, value)
return value
def items(self):
return self._cache.items()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment