Skip to content

Instantly share code, notes, and snippets.

@crazy4groovy
Last active December 1, 2023 17:24
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save crazy4groovy/1f711f2db7043663ea5f2976f98fc709 to your computer and use it in GitHub Desktop.
Save crazy4groovy/1f711f2db7043663ea5f2976f98fc709 to your computer and use it in GitHub Desktop.
simple TTL cache, and cache repo; excellent Promise/async deduper (JavaScript, TypeScript)
export function cacher<T>(thunk: () => Promise<T>, ttlSeconds = 60) {
const noCache: unique symbol = Symbol("cache");
let cache: Promise<T> | symbol = noCache;
return function execThunk(): Promise<T> {
if (typeof cache !== "symbol") {
return cache;
}
cache = thunk();
setTimeout(() => {
cache = noCache;
}, 1000 * ttlSeconds);
return cache;
};
}
// This cache repo is really just a map of cache keys to cachers
// callback is the function to cache, props are the call arguments
export function cacheRepo<Props, Result, This = any>(
callback: (this: This, props: Props) => Promise<Result>,
ttlSeconds = 60
) {
const repo = new Map<string, () => Promise<Result>>();
return function execCallback(this: This, props: Props): Promise<Result> {
const cacheKey = JSON.stringify(props);
let myCacher = repo.get(cacheKey);
if (myCacher) {
return myCacher();
}
const myThunk = () => callback.call(this, props);
myCacher = cacher(myThunk, ttlSeconds);
repo.set(cacheKey, myCacher);
setTimeout(() => {
// Note: this critical delete step helps minimize the cache mem size, avoid mem leaks
repo.delete(cacheKey);
}, 1000 * ttlSeconds);
return myCacher();
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment