Skip to content

Instantly share code, notes, and snippets.

@helix84
Forked from jmdacruz/timed_cache.py
Last active December 12, 2022 06:49
Show Gist options
  • Star 3 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save helix84/05ee246d6c80bc7bacdfa6a62fbff3fa to your computer and use it in GitHub Desktop.
Save helix84/05ee246d6c80bc7bacdfa6a62fbff3fa to your computer and use it in GitHub Desktop.
Python lru_cache with timeout
from datetime import datetime, timedelta
import functools
def timed_cache(**timedelta_kwargs):
def _wrapper(f):
maxsize = timedelta_kwargs.pop('maxsize', 128)
typed = timedelta_kwargs.pop('typed', False)
update_delta = timedelta(**timedelta_kwargs)
next_update = datetime.utcnow() - update_delta
# Apply @lru_cache to f
f = functools.lru_cache(maxsize=maxsize, typed=typed)(f)
@functools.wraps(f)
def _wrapped(*args, **kwargs):
timed_cache_clear()
return f(*args, **kwargs)
def timed_cache_clear():
"""Clear cache when time expires"""
nonlocal next_update
now = datetime.utcnow()
if now >= next_update:
f.cache_clear()
next_update = now + update_delta
def cache_info():
"""Report cache statistics"""
timed_cache_clear()
return f.cache_info()
_wrapped.cache_info = cache_info
_wrapped.cache_clear = f.cache_clear
return _wrapped
return _wrapper
def timed_cache_test():
import timed_cache
from time import sleep
@timed_cache.timed_cache(maxsize=128, seconds=0.1)
def test(a):
return a+1
assert test.cache_info().currsize == 0
test(1)
assert test.cache_info().currsize == 1
sleep(0.11)
assert test.cache_info().currsize == 0
test(1)
assert test.cache_info().currsize == 1
sleep(0.11)
test(1)
test(2)
assert test.cache_info().currsize == 2
sleep(0.11)
assert test.cache_info().currsize == 0
# python -c 'import timed_cache; timed_cache.timed_cache_test()'
@helix84
Copy link
Author

helix84 commented Nov 21, 2018

This was just an experiment. The disadvantage is that there's a single timeout for all items in the cache.
I'll be using cachetools.TTLCache with timeouts for individual cache items instead.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment