diff --git a/.eslintrc.cjs b/.eslintrc.cjs index 602a670ef1..38299b57e4 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -37,6 +37,7 @@ module.exports = { }, ], 'unicorn/filename-case': ['error', { case: 'kebabCase' }], + 'unicorn/no-nested-ternary': 'off', 'unicorn/numeric-separators-style': 'off', }, overrides: [ diff --git a/src/run/storage/request-scoped-in-memory-cache.cts b/src/run/storage/request-scoped-in-memory-cache.cts index ddd803b991..420cbad5ca 100644 --- a/src/run/storage/request-scoped-in-memory-cache.cts +++ b/src/run/storage/request-scoped-in-memory-cache.cts @@ -9,13 +9,34 @@ import { recordWarning } from '../handlers/tracer.cjs' // lru-cache types don't like using `null` for values, so we use a symbol to represent it and do conversion // so it doesn't leak outside const NullValue = Symbol.for('null-value') -type BlobLRUCache = LRUCache> +type DataWithEtag = { data: BlobType; etag: string } + +const isDataWithEtag = (value: unknown): value is DataWithEtag => { + return typeof value === 'object' && value !== null && 'data' in value && 'etag' in value +} + +type BlobLRUCache = LRUCache< + string, + BlobType | typeof NullValue | Promise | DataWithEtag +> const IN_MEMORY_CACHE_MAX_SIZE = Symbol.for('nf-in-memory-cache-max-size') const IN_MEMORY_LRU_CACHE = Symbol.for('nf-in-memory-lru-cache') const extendedGlobalThis = globalThis as typeof globalThis & { [IN_MEMORY_CACHE_MAX_SIZE]?: number - [IN_MEMORY_LRU_CACHE]?: BlobLRUCache | null + [IN_MEMORY_LRU_CACHE]?: { + /** + * entries are scoped to request IDs + */ + perRequest: BlobLRUCache + /** + * global cache shared between requests, does not allow immediate re-use, but is used for + * conditional blob gets with etags and given blob key is first tried in given request. + * Map values are weak references to avoid this map strongly referencing blobs and allowing + * GC based on per request LRU cache evictions alone. + */ + global: Map> + } | null } const DEFAULT_FALLBACK_MAX_SIZE = 50 * 1024 * 1024 // 50MB, same as default Next.js config @@ -31,40 +52,46 @@ const isPositiveNumber = (value: unknown): value is PositiveNumber => { } const BASE_BLOB_SIZE = 25 as PositiveNumber +const BASE_BLOB_WITH_ETAG_SIZE = (BASE_BLOB_SIZE + 34) as PositiveNumber const estimateBlobKnownTypeSize = ( - valueToStore: BlobType | null | Promise, + valueToStore: BlobType | null | Promise | DataWithEtag, ): number | undefined => { // very approximate size calculation to avoid expensive exact size calculation // inspired by https://github.com/vercel/next.js/blob/ed10f7ed0246fcc763194197eb9beebcbd063162/packages/next/src/server/lib/incremental-cache/file-system-cache.ts#L60-L79 - if (valueToStore === null || isPromise(valueToStore) || isTagManifest(valueToStore)) { + if (valueToStore === null || isPromise(valueToStore)) { return BASE_BLOB_SIZE } - if (isHtmlBlob(valueToStore)) { - return BASE_BLOB_SIZE + valueToStore.html.length + + const { data, baseSize } = isDataWithEtag(valueToStore) + ? { data: valueToStore.data, baseSize: BASE_BLOB_WITH_ETAG_SIZE } + : { data: valueToStore, baseSize: BASE_BLOB_SIZE } + + if (isTagManifest(data)) { + return baseSize + } + + if (isHtmlBlob(data)) { + return baseSize + data.html.length } - if (valueToStore.value?.kind === 'FETCH') { - return BASE_BLOB_SIZE + valueToStore.value.data.body.length + if (data.value?.kind === 'FETCH') { + return baseSize + data.value.data.body.length } - if (valueToStore.value?.kind === 'APP_PAGE') { - return ( - BASE_BLOB_SIZE + valueToStore.value.html.length + (valueToStore.value.rscData?.length ?? 0) - ) + if (data.value?.kind === 'APP_PAGE') { + return baseSize + data.value.html.length + (data.value.rscData?.length ?? 0) } - if (valueToStore.value?.kind === 'PAGE' || valueToStore.value?.kind === 'PAGES') { - return ( - BASE_BLOB_SIZE + - valueToStore.value.html.length + - JSON.stringify(valueToStore.value.pageData).length - ) + if (data.value?.kind === 'PAGE' || data.value?.kind === 'PAGES') { + return baseSize + data.value.html.length + JSON.stringify(data.value.pageData).length } - if (valueToStore.value?.kind === 'ROUTE' || valueToStore.value?.kind === 'APP_ROUTE') { - return BASE_BLOB_SIZE + valueToStore.value.body.length + if (data.value?.kind === 'ROUTE' || data.value?.kind === 'APP_ROUTE') { + return baseSize + data.value.body.length } } -const estimateBlobSize = (valueToStore: BlobType | null | Promise): PositiveNumber => { +const estimateBlobSize = ( + valueToStore: BlobType | null | Promise | DataWithEtag, +): PositiveNumber => { let estimatedKnownTypeSize: number | undefined let estimateBlobKnownTypeSizeError: unknown try { @@ -98,23 +125,41 @@ function getInMemoryLRUCache() { ? extendedGlobalThis[IN_MEMORY_CACHE_MAX_SIZE] : DEFAULT_FALLBACK_MAX_SIZE - extendedGlobalThis[IN_MEMORY_LRU_CACHE] = - maxSize === 0 - ? null // if user sets 0 in their config, we should honor that and not use in-memory cache - : new LRUCache>({ - max: 1000, - maxSize, - sizeCalculation: (valueToStore) => { - return estimateBlobSize(valueToStore === NullValue ? null : valueToStore) - }, - }) + if (maxSize === 0) { + extendedGlobalThis[IN_MEMORY_LRU_CACHE] = null + } else { + const global = new Map>() + + const perRequest = new LRUCache< + string, + BlobType | typeof NullValue | Promise | DataWithEtag + >({ + max: 1000, + maxSize, + sizeCalculation: (valueToStore) => { + return estimateBlobSize(valueToStore === NullValue ? null : valueToStore) + }, + }) + + extendedGlobalThis[IN_MEMORY_LRU_CACHE] = { + perRequest, + global, + } + } } return extendedGlobalThis[IN_MEMORY_LRU_CACHE] } interface RequestScopedInMemoryCache { - get(key: string): BlobType | null | Promise | undefined - set(key: string, value: BlobType | null | Promise): void + get(key: string): + | { conditional: false; currentRequestValue: BlobType | null | Promise } + | { + conditional: true + globalValue: BlobType + etag: string + } + | undefined + set(key: string, value: BlobType | null | Promise | DataWithEtag): void } export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => { @@ -125,8 +170,35 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => { get(key) { if (!requestContext) return try { - const value = inMemoryLRUCache?.get(`${requestContext.requestID}:${key}`) - return value === NullValue ? null : value + const currentRequestValue = inMemoryLRUCache?.perRequest.get( + `${requestContext.requestID}:${key}`, + ) + if (currentRequestValue) { + return { + conditional: false, + currentRequestValue: + currentRequestValue === NullValue + ? null + : isDataWithEtag(currentRequestValue) + ? currentRequestValue.data + : currentRequestValue, + } + } + + const globalEntry = inMemoryLRUCache?.global.get(key) + if (globalEntry) { + const derefencedGlobalEntry = globalEntry.deref() + if (derefencedGlobalEntry) { + return { + conditional: true, + globalValue: derefencedGlobalEntry.data, + etag: derefencedGlobalEntry.etag, + } + } + + // value has been GC'ed so we can cleanup entry from the map as it no longer points to existing value + inMemoryLRUCache?.global.delete(key) + } } catch (error) { // using in-memory store is perf optimization not requirement // trying to use optimization should NOT cause crashes @@ -137,7 +209,10 @@ export const getRequestScopedInMemoryCache = (): RequestScopedInMemoryCache => { set(key, value) { if (!requestContext) return try { - inMemoryLRUCache?.set(`${requestContext?.requestID}:${key}`, value ?? NullValue) + if (isDataWithEtag(value)) { + inMemoryLRUCache?.global.set(key, new WeakRef(value)) + } + inMemoryLRUCache?.perRequest.set(`${requestContext.requestID}:${key}`, value ?? NullValue) } catch (error) { // using in-memory store is perf optimization not requirement // trying to use optimization should NOT cause crashes diff --git a/src/run/storage/storage.cts b/src/run/storage/storage.cts index 98f11268f2..e16fbe383a 100644 --- a/src/run/storage/storage.cts +++ b/src/run/storage/storage.cts @@ -25,16 +25,48 @@ export const getMemoizedKeyValueStoreBackedByRegionalBlobStore = ( const inMemoryCache = getRequestScopedInMemoryCache() const memoizedValue = inMemoryCache.get(key) - if (typeof memoizedValue !== 'undefined') { - return memoizedValue as T | null | Promise + if ( + memoizedValue?.conditional === false && + typeof memoizedValue?.currentRequestValue !== 'undefined' + ) { + return memoizedValue.currentRequestValue as T | null | Promise } const blobKey = await encodeBlobKey(key) const getPromise = withActiveSpan(tracer, otelSpanTitle, async (span) => { - span?.setAttributes({ key, blobKey }) - const blob = (await store.get(blobKey, { type: 'json' })) as T | null - inMemoryCache.set(key, blob) - span?.addEvent(blob ? 'Hit' : 'Miss') + const { etag: previousEtag, globalValue: previousBlob } = memoizedValue?.conditional + ? memoizedValue + : {} + + span?.setAttributes({ key, blobKey, previousEtag }) + + const result = await store.getWithMetadata(blobKey, { + type: 'json', + etag: previousEtag, + }) + + const shouldReuseMemoizedBlob = result?.etag && previousEtag === result?.etag + + const blob = (shouldReuseMemoizedBlob ? previousBlob : result?.data) as T | null + + if (result?.etag && blob) { + inMemoryCache.set(key, { + data: blob, + etag: result?.etag, + }) + } else { + // if we don't get blob (null) or etag for some reason is missing, + // we still want to store resolved blob value so that it could be reused + // within the same request + inMemoryCache.set(key, blob) + } + + span?.setAttributes({ + etag: result?.etag, + reusingPreviouslyFetchedBlob: shouldReuseMemoizedBlob, + status: blob ? (shouldReuseMemoizedBlob ? 'Hit, no change' : 'Hit') : 'Miss', + }) + return blob }) inMemoryCache.set(key, getPromise) @@ -48,7 +80,14 @@ export const getMemoizedKeyValueStoreBackedByRegionalBlobStore = ( const blobKey = await encodeBlobKey(key) return withActiveSpan(tracer, otelSpanTitle, async (span) => { span?.setAttributes({ key, blobKey }) - return await store.setJSON(blobKey, value) + const writeResult = await store.setJSON(blobKey, value) + if (writeResult?.etag) { + inMemoryCache.set(key, { + data: value, + etag: writeResult.etag, + }) + } + return writeResult }) }, } diff --git a/src/run/storage/storage.test.ts b/src/run/storage/storage.test.ts index e36e8e3f38..dfb9045841 100644 --- a/src/run/storage/storage.test.ts +++ b/src/run/storage/storage.test.ts @@ -1,20 +1,52 @@ +import { createHash } from 'node:crypto' + import { beforeEach, describe, expect, it, vi } from 'vitest' import { decodeBlobKey } from '../../../tests/utils/helpers.ts' -import { BlobType } from '../../shared/cache-types.cts' +import { BlobType } from '../../shared/blob-types.cts' import { createRequestContext, runWithRequestContext } from '../handlers/request-context.cts' import { getMemoizedKeyValueStoreBackedByRegionalBlobStore } from './storage.cts' -let mockBlobValues: Record = {} +function mockGenerateRecord(data: BlobType) { + const etag = `"${createHash('sha256').update(JSON.stringify(data)).digest('hex')}"` as const + return { data, etag } +} + +let mockBlobValues: Record = {} const mockedStore = { - get: vi.fn((blobKey) => { + getWithMetadata: vi.fn((blobKey, options) => { const key = decodeBlobKey(blobKey) + const record = mockBlobValues[key] + if (record && options?.etag === record.etag) { + // on etag matches blobs client will return data as null, with etag set + // indicating that cached value can be reused + return Promise.resolve({ + data: null, + etag: record.etag, + }) + } return Promise.resolve(mockBlobValues[key]) }), - setJSON: vi.fn(async (blobKey, value) => { + setJSON: vi.fn(async (blobKey, data) => { const key = decodeBlobKey(blobKey) - mockBlobValues[key] = value + const prevValue = mockBlobValues[key] + const currentValue = mockGenerateRecord(data) + + if (currentValue.etag && prevValue?.etag === currentValue.etag) { + // no changes + return { + etag: currentValue.etag, + modified: false, + } + } + + mockBlobValues[key] = currentValue + + return { + etag: currentValue.etag, + modified: true, + } }), } @@ -27,7 +59,8 @@ vi.mock('@netlify/blobs', () => { const OTEL_SPAN_TITLE = 'test' const TEST_KEY = 'foo' const TEST_DEFAULT_VALUE = { - revalidatedAt: 123, + staleAt: 123, + expireAt: 456, } satisfies BlobType function generate30MBBlobTypeValue(id: string): BlobType { @@ -43,8 +76,13 @@ function generate30MBBlobTypeValue(id: string): BlobType { } beforeEach(() => { + // reset in memory cache between tests + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const unTypedGlobalThis = globalThis as any + unTypedGlobalThis[Symbol.for('nf-in-memory-lru-cache')] = undefined + mockBlobValues = { - [TEST_KEY]: TEST_DEFAULT_VALUE, + [TEST_KEY]: mockGenerateRecord(TEST_DEFAULT_VALUE), } }) describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { @@ -52,11 +90,11 @@ describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() const get1 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + expect(mockedStore.getWithMetadata, 'Blobs should be requested').toHaveBeenCalledOnce() expect(get1, 'Expected blob should be returned').toBe(TEST_DEFAULT_VALUE) const get2 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Blobs should be requested twice').toHaveBeenCalledTimes(2) + expect(mockedStore.getWithMetadata, 'Blobs should be requested twice').toHaveBeenCalledTimes(2) expect(get2, 'Expected second .get to return the same as first one').toBe(get1) }) @@ -65,11 +103,14 @@ describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() const get1 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + expect(mockedStore.getWithMetadata, 'Blobs should be requested').toHaveBeenCalledOnce() expect(get1, 'Expected blob should be returned').toBe(TEST_DEFAULT_VALUE) const get2 = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Blobs should be requested just once').toHaveBeenCalledTimes(1) + expect( + mockedStore.getWithMetadata, + 'Blobs should be requested just once', + ).toHaveBeenCalledOnce() expect(get2, 'Expected second .get to return the same as first one').toBe(get1) }) }) @@ -79,33 +120,81 @@ describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() const writeValue = { - revalidatedAt: 456, + staleAt: 456, + expireAt: 789, } satisfies BlobType await store.set(TEST_KEY, writeValue, OTEL_SPAN_TITLE) - expect(mockedStore.setJSON, 'Blobs should be posted').toHaveBeenCalledTimes(1) + expect(mockedStore.setJSON, 'Blobs should be posted').toHaveBeenCalledOnce() const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() expect(get, 'Value from memory should be correct').toBe(writeValue) }) }) - it('is using separate in-memory caches when running in request contexts', async () => { + it('does not automatically reuse in-memory values when running in request contexts', async () => { const store = getMemoizedKeyValueStoreBackedByRegionalBlobStore() - await runWithRequestContext(createRequestContext(), async () => { - await store.get(TEST_KEY, OTEL_SPAN_TITLE) + const get1 = await runWithRequestContext(createRequestContext(), async () => { + return await store.get(TEST_KEY, OTEL_SPAN_TITLE) }) - await runWithRequestContext(createRequestContext(), async () => { - await store.get(TEST_KEY, OTEL_SPAN_TITLE) + const get2 = await runWithRequestContext(createRequestContext(), async () => { + return await store.get(TEST_KEY, OTEL_SPAN_TITLE) }) expect( - mockedStore.get, + mockedStore.getWithMetadata, 'Blobs should be requested separately for each request context', ).toHaveBeenCalledTimes(2) + + // first request context assertions + expect(get1, 'store.get in first request should return expected value').toEqual( + TEST_DEFAULT_VALUE, + ) + + expect( + mockedStore.getWithMetadata, + 'On first request context, we should not provide etag as we do not have any yet', + ).toHaveBeenNthCalledWith(1, expect.any(String), { + etag: undefined, + type: 'json', + }) + + expect( + mockedStore.getWithMetadata, + 'should return full value from blobs as it is first time being requested', + ).toHaveNthResolvedWith( + 1, + expect.objectContaining({ + data: TEST_DEFAULT_VALUE, + }), + ) + + // second request context assertions + expect(get2, 'store.get in second request should return expected value').toEqual( + TEST_DEFAULT_VALUE, + ) + + expect( + mockedStore.getWithMetadata, + 'On second request context, we should provide an etag as first request fetched same blob', + ).toHaveBeenNthCalledWith(2, expect.any(String), { + etag: expect.any(String), + type: 'json', + }) + + expect( + mockedStore.getWithMetadata, + 'On second request context, we should not get blob value, just indication that we can reuse blob', + ).toHaveNthResolvedWith( + 2, + expect.objectContaining({ + data: null, + etag: expect.any(String), + }), + ) }) it('writing in one request context should not affect in-memory value in another request context', async () => { @@ -115,31 +204,32 @@ describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { const requestContext2 = createRequestContext() const writeValue = { - revalidatedAt: 456, + staleAt: 456, + expireAt: 789, } satisfies BlobType await runWithRequestContext(requestContext1, async () => { const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) expect(get, 'Value from memory should be the same as before').toBe(TEST_DEFAULT_VALUE) - expect(mockedStore.get, 'Blobs should be requested').toHaveBeenCalledTimes(1) + expect(mockedStore.getWithMetadata, 'Blobs should be requested').toHaveBeenCalledOnce() }) await runWithRequestContext(requestContext2, async () => { - mockedStore.get.mockClear() + mockedStore.getWithMetadata.mockClear() await store.set(TEST_KEY, writeValue, OTEL_SPAN_TITLE) const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() expect(get, 'Value from memory should be correct').toBe(writeValue) }) await runWithRequestContext(requestContext1, async () => { - mockedStore.get.mockClear() + mockedStore.getWithMetadata.mockClear() const get = await store.get(TEST_KEY, OTEL_SPAN_TITLE) expect( get, 'Value from memory should be the same as before and not affected by other request context', ).toBe(TEST_DEFAULT_VALUE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() }) }) @@ -151,55 +241,58 @@ describe('getMemoizedKeyValueStoreBackedByRegionalBlobStore', () => { mockBlobValues = { // very heavy values that in-memory caches can only hold one value at a time - 'heavy-route-1': generate30MBBlobTypeValue('1'), - 'heavy-route-2': generate30MBBlobTypeValue('2'), + 'heavy-route-1': mockGenerateRecord(generate30MBBlobTypeValue('1')), + 'heavy-route-2': mockGenerateRecord(generate30MBBlobTypeValue('2')), } await runWithRequestContext(requestContext1, async () => { await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from blobs').toHaveBeenCalledOnce() + mockedStore.getWithMetadata.mockClear() await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() + mockedStore.getWithMetadata.mockClear() await store.get('heavy-route-2', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from blobs').toHaveBeenCalledOnce() + mockedStore.getWithMetadata.mockClear() // at this point we should exceed the memory limit and least recently used value should be evicted await store.get('heavy-route-1', OTEL_SPAN_TITLE) expect( - mockedStore.get, + mockedStore.getWithMetadata, 'Previously stored in-memory value should be evicted and fresh value should be read from blobs', - ).toHaveBeenCalledTimes(1) - mockedStore.get.mockClear() + ).toHaveBeenCalledOnce() + mockedStore.getWithMetadata.mockClear() await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory again').toHaveBeenCalledTimes(0) - mockedStore.get.mockClear() + expect( + mockedStore.getWithMetadata, + 'Value should be read from memory again', + ).not.toHaveBeenCalled() + mockedStore.getWithMetadata.mockClear() }) await runWithRequestContext(requestContext2, async () => { await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from blobs').toHaveBeenCalledOnce() + mockedStore.getWithMetadata.mockClear() await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() + mockedStore.getWithMetadata.mockClear() }) await runWithRequestContext(requestContext1, async () => { await store.get('heavy-route-1', OTEL_SPAN_TITLE) // operations in requestContext2 should result in evicting value for requestContext1 - expect(mockedStore.get, 'Value should be read from blobs').toHaveBeenCalledTimes(1) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from blobs').toHaveBeenCalledOnce() + mockedStore.getWithMetadata.mockClear() await store.get('heavy-route-1', OTEL_SPAN_TITLE) - expect(mockedStore.get, 'Value should be read from memory').toHaveBeenCalledTimes(0) - mockedStore.get.mockClear() + expect(mockedStore.getWithMetadata, 'Value should be read from memory').not.toHaveBeenCalled() + mockedStore.getWithMetadata.mockClear() }) }) })