Skip to content

Instantly share code, notes, and snippets.

@jviide
Last active March 19, 2021 12:06
Show Gist options
  • Save jviide/7bb86c381be2db3e775fec94d6266dab to your computer and use it in GitHub Desktop.
Save jviide/7bb86c381be2db3e775fec94d6266dab to your computer and use it in GitHub Desktop.
A small-ish TypeScript LRU cache implementation for string inputs
import { memoized } from "./lru";
const double = memoized((a) => {
return a + a;
});
console.log(double("jee"));
type CacheNode<T> = {
input: string;
output: T;
prev: CacheNode<T>;
next: CacheNode<T>;
};
// A LRU (Least Recently Used) caching implementation for string inputs.
export function memoized<T>(
transform: (s: string) => T,
minStringLength = 16, // Minimum size for single cached input string
maxStringLength = 1024, // Maximum size for single cached input string
maxTotalLength = 1024 * 1024 // Maximum TOTAL size of cached input strings
): (s: string) => T {
// A queue node representing both the head and tail of the LRU queue.
// We need some trickery to create this kind of self-referencing node ¯\_(ツ)_/¯
const root: CacheNode<T> = {
input: "",
output: (null as unknown) as T,
prev: (null as unknown) as CacheNode<T>,
next: (null as unknown) as CacheNode<T>,
};
root.next = root;
root.prev = root;
// A map for associating input strings with cached LRU queue nodes.
const map: Map<string, CacheNode<T>> = new Map();
// For keeping track of the total length of the cached input strings.
let totalLength = 0;
return (input) => {
// Skip caching if the input string is not worth it or too large.
const length = input.length;
if (length < minStringLength || length > maxStringLength) {
return transform(input);
}
let node = map.get(input);
if (node) {
// If the input string is already in the cache, then remove the cache node
// from its current place in the LRU queue. We'll put it into its new place
// in the head of the queue later.
node.next.prev = node.prev;
node.prev.next = node.next;
} else {
// If the input hasn't been cached already then create a new cache node.
node = { input, output: transform(input), next: root, prev: root };
totalLength += input.length;
map.set(input, node);
}
// Insert the cache node to its new position: right next to the
// LRU queue's head (root) node.
root.next.prev = node;
node.next = root.next;
root.next = node;
node.prev = root;
// Walk back from the tail (root) node, purging the cache if the total
// size of cached input strings has become too large.
let last = root.prev;
while (last !== root && totalLength > maxTotalLength) {
totalLength -= last.input.length;
map.delete(last.input);
last = last.prev;
}
// Remove the deleted nodes from the queue.
last.next = root;
root.prev = last;
return node.output;
};
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment