Skip to content

Commit

Permalink
Add async cache.fetch() method, fetchMethod option
Browse files Browse the repository at this point in the history
Fix: #102
  • Loading branch information
isaacs committed Mar 17, 2022
1 parent 23708c7 commit 0191d32
Show file tree
Hide file tree
Showing 3 changed files with 208 additions and 4 deletions.
26 changes: 26 additions & 0 deletions README.md
Expand Up @@ -132,6 +132,13 @@ If you put more stuff in it, then items will fall out.

Deprecated alias: `length`

* `fetchMethod` Function that is used to make background asynchronous
fetches. Called with `fetchMethod(key, staleValue)`. May return a
Promise.

If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent
to `Promise.resolve(cache.get(key))`.

* `dispose` Function that is called on items when they are dropped
from the cache, as `this.dispose(value, key, reason)`.

Expand Down Expand Up @@ -308,6 +315,25 @@ If you put more stuff in it, then items will fall out.
`cache.set(key, undefined)`. Use `cache.has()` to determine whether a
key is present in the cache at all.

* `async fetch(key, { updateAgeOnGet, allowStale } = {}) => Promise`

If the value is in the cache and not stale, then the returned Promise
resolves to the value.

If not in the cache, or beyond its TTL staleness, then
`fetchMethod(key, staleValue)` is called, and the value returned will
be added to the cache once resolved.

If called with `allowStale`, and an asynchronous fetch is currently in
progress to reload a stale value, then the former stale value will be
returned.

Multiple fetches for the same `key` will only call `fetchMethod` a
single time, and all will be resolved when the value is resolved.

If `fetchMethod` is not specified, then this is an alias for
`Promise.resolve(cache.get(key))`.

* `peek(key, { allowStale } = {}) => value`

Like `get()` but doesn't update recency or delete stale items.
Expand Down
89 changes: 85 additions & 4 deletions index.js
Expand Up @@ -85,6 +85,7 @@ class LRUCache {
noUpdateTTL,
maxSize,
sizeCalculation,
fetchMethod,
} = options

// deprecated options, don't trigger a warning for getting them if
Expand Down Expand Up @@ -115,6 +116,13 @@ class LRUCache {
throw new TypeError('sizeCalculating set to non-function')
}
}

this.fetchMethod = fetchMethod || null
if (this.fetchMethod && typeof this.fetchMethod !== 'function') {
throw new TypeError('fetchMethod must be a function if specified')
}


this.keyMap = new Map()
this.keyList = new Array(max).fill(null)
this.valList = new Array(max).fill(null)
Expand Down Expand Up @@ -479,22 +487,95 @@ class LRUCache {
}
}

backgroundFetch (k, index) {
const v = index === undefined ? undefined : this.valList[index]
if (this.isBackgroundFetch(v)) {
return v
}
const p = Promise.resolve(this.fetchMethod(k, v)).then(v => {
if (this.keyMap.get(k) === index && p === this.valList[index]) {
this.set(k, v)
}
return v
})
p.__staleWhileFetching = v
if (index === undefined) {
this.set(k, p)
index = this.keyMap.get(k)
} else {
this.valList[index] = p
}
return p
}

isBackgroundFetch (p) {
return p && typeof p === 'object' && typeof p.then === 'function' &&
Object.prototype.hasOwnProperty.call(p, '__staleWhileFetching')
}

async fetch (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}) {
if (!this.fetchMethod) {
return this.get(k, {allowStale, updateAgeOnGet})
}

let index = this.keyMap.get(k)
if (index === undefined) {
return this.backgroundFetch(k, index)
} else {
// in cache, maybe already fetching
const v = this.valList[index]
if (this.isBackgroundFetch(v)) {
return allowStale && v.__staleWhileFetching !== undefined
? v.__staleWhileFetching : v
}

if (!this.isStale(index)) {
this.moveToTail(index)
if (updateAgeOnGet) {
this.updateItemAge(index)
}
return v
}

// ok, it is stale, and not already fetching
// refresh the cache.
const p = this.backgroundFetch(k, index)
return allowStale && p.__staleWhileFetching !== undefined
? p.__staleWhileFetching : p
}
}

get (k, {
allowStale = this.allowStale,
updateAgeOnGet = this.updateAgeOnGet,
} = {}) {
const index = this.keyMap.get(k)
if (index !== undefined) {
const value = this.valList[index]
const fetching = this.isBackgroundFetch(value)
if (this.isStale(index)) {
const value = allowStale ? this.valList[index] : undefined
this.delete(k)
return value
// delete only if not an in-flight background fetch
if (!fetching) {
this.delete(k)
return allowStale ? value : undefined
} else {
return allowStale ? value.__staleWhileFetching : undefined
}
} else {
// if we're currently fetching it, we don't actually have it yet
// it's not stale, which means this isn't a staleWhileRefetching,
// so we just return undefined
if (fetching) {
return undefined
}
this.moveToTail(index)
if (updateAgeOnGet) {
this.updateItemAge(index)
}
return this.valList[index]
return value
}
}
}
Expand Down
97 changes: 97 additions & 0 deletions test/fetch.js
@@ -0,0 +1,97 @@
const t = require('tap')
const fn = async (k, v) =>
new Promise(res => setImmediate(() => res(v === undefined ? 0 : (v + 1))))

const Clock = require('clock-mock')
const clock = new Clock()
t.teardown(clock.enter())
clock.advance(1)

const LRU = require('../')
const c = new LRU({
fetchMethod: fn,
max: 5,
ttl: 5,
})

t.test('asynchronous fetching', async t => {
const v1 = await c.fetch('key')
t.equal(v1, 0, 'first fetch, no stale data, wait for initial value')
const v2 = await c.fetch('key')
t.equal(v2, 0, 'got same cached value')

clock.advance(10)

const v3 = await c.fetch('key', { allowStale: true })
t.equal(v3, 0, 'fetch while stale, allowStale, get stale data')
t.equal(await c.fetch('key', { allowStale: true }), 0,
'get stale data again while re-fetching because stale previously')
const v4 = await c.fetch('key')
t.equal(v4, 1, 'no allow stale, wait until fresh data available')
const v5 = await c.fetch('key')
t.equal(v5, 1, 'fetch while not stale, just get from cache')

clock.advance(10)

const v6 = await c.fetch('key', { allowStale: true })
t.equal(v6, 1, 'fetch while stale, starts new fetch, return stale data')
const v = c.valList[0]
t.equal(c.isBackgroundFetch(v), true)
t.equal(c.backgroundFetch('key', 0), v)
await v
const v7 = await c.fetch('key', { allowStale: true, updateAgeOnGet: true })
t.equal(v7, 2, 'fetch completed, so get new data')

clock.advance(100)

const v8 = await c.fetch('key', { allowStale: true })
const v9 = c.get('key', { allowStale: true })
t.equal(v8, 2, 'fetch returned stale while fetching')
t.equal(v9, 2, 'get() returned stale while fetching')

const v10 = c.fetch('key2')
const v11 = c.get('key2')
t.equal(v11, undefined, 'get while fetching but not yet returned')
t.equal(await v10, 0, 'eventually 0 is returned')
const v12 = c.get('key2')
t.equal(v12, 0, 'get cached value after fetch')

const v13 = c.fetch('key3')
c.delete('key3')
t.equal(await v13, 0, 'returned 0 eventually')
t.equal(c.has('key3'), false, 'but not inserted into cache')

const v14 = c.fetch('key4')
clock.advance(100)
const v15 = await c.fetch('key4', { allowStale: true })
t.equal(v15, 0, 'there was no stale data, even though we were ok with that')

c.set('key5', 0)
clock.advance(100)
const v16 = await c.fetch('key5')
t.equal(v16, 1, 'waited for new data, data in cache was stale')

const v17 = c.fetch('key4')
clock.advance(100)
const v18 = c.get('key4')
t.equal(v18, undefined, 'get while fetching, but did not want stale data')

const v19 = c.fetch('key6')
clock.advance(100)
const v20 = c.get('key6', { allowStale: true })
t.equal(v20, undefined, 'get while fetching, but no stale data to return')
})

t.test('fetchMethod must be a function', async t => {
t.throws(() => new LRU({fetchMethod: true, max: 2}))
})

t.test('fetch without fetch method', async t => {
const c = new LRU({ max: 3 })
c.set(0, 0)
c.set(1, 1)
t.same(await Promise.all([
c.fetch(0),
c.fetch(1),
]), [0, 1])
})

0 comments on commit 0191d32

Please sign in to comment.