The interface was greatly inspired by Rails' low level caching implementation.
import { MemoryStore, Cache } from './cache'
const store = new MemoryStore({ ttl: 30 * 1000, max: 10 })
const cache = new Cache(store)
The interface was greatly inspired by Rails' low level caching implementation.
import { MemoryStore, Cache } from './cache'
const store = new MemoryStore({ ttl: 30 * 1000, max: 10 })
const cache = new Cache(store)
import MemoryStore from './memory-store' | |
class Cache { | |
constructor(store = new MemoryStore()) { | |
this.store = store | |
} | |
async fetch(key, fetchFunction) { | |
let value = await this.store.fetch(key) | |
if (!value) { | |
value = await fetchFunction() | |
this.store.write(key, value) | |
} | |
return value | |
} | |
} | |
export default Cache |
export { default as Cache } from './cache' | |
export { default as MemoryStore } from './memory-store' |
import { LRUCache } from 'lru-cache' | |
class MemoryStore { | |
constructor(options = {}) { | |
this.lruCache = this.createCache(options) | |
} | |
createOptions(options) { | |
return { | |
// how long to live in ms (defaults to 5m) | |
ttl: 5 * 60 * 1000, | |
// max number of items to store (defaults to 500) | |
max: 500, | |
...options, | |
} | |
} | |
createCache(options) { | |
const opts = this.createOptions(options) | |
return new LRUCache(opts) | |
} | |
async fetch(key) { | |
return this.lruCache.get(key) | |
} | |
async write(key, value) { | |
return this.lruCache.set(key, value) | |
} | |
} | |
export default MemoryStore |