forked from vercel/next.js
-
Notifications
You must be signed in to change notification settings - Fork 11
/
use-cache-wrapper.ts
365 lines (330 loc) · 13.3 KB
/
use-cache-wrapper.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
import type { DeepReadonly } from '../../shared/lib/deep-readonly'
/* eslint-disable import/no-extraneous-dependencies */
import {
renderToReadableStream,
decodeReply,
createTemporaryReferenceSet as createServerTemporaryReferenceSet,
} from 'react-server-dom-webpack/server.edge'
/* eslint-disable import/no-extraneous-dependencies */
import {
createFromReadableStream,
encodeReply,
createTemporaryReferenceSet as createClientTemporaryReferenceSet,
} from 'react-server-dom-webpack/client.edge'
import type { StaticGenerationStore } from '../../client/components/static-generation-async-storage.external'
import { staticGenerationAsyncStorage } from '../../client/components/static-generation-async-storage.external'
import type { CacheStore } from '../app-render/cache-async-storage.external'
import { cacheAsyncStorage } from '../app-render/cache-async-storage.external'
import { runInCleanSnapshot } from '../app-render/clean-async-snapshot.external'
import type { ClientReferenceManifest } from '../../build/webpack/plugins/flight-manifest-plugin'
import {
getClientReferenceManifestSingleton,
getServerModuleMap,
} from '../app-render/encryption-utils'
import type { ManifestNode } from '../../build/webpack/plugins/flight-manifest-plugin'
type CacheEntry = {
value: ReadableStream
// In-memory caches are fragile and should not use stale-while-revalidate
// semantics on the caches because it's not worth warming up an entry that's
// likely going to get evicted before we get to use it anyway. However,
// we also don't want to reuse a stale entry for too long so stale entries
// should be considered expired/missing in such CacheHandlers.
stale: boolean
}
interface CacheHandler {
get(cacheKey: string | ArrayBuffer): Promise<undefined | CacheEntry>
set(cacheKey: string | ArrayBuffer, value: ReadableStream): Promise<void>
}
const cacheHandlerMap: Map<string, CacheHandler> = new Map()
// TODO: Move default implementation to be injectable.
const defaultCacheStorage: Map<string, ReadableStream> = new Map()
cacheHandlerMap.set('default', {
async get(cacheKey: string | ArrayBuffer) {
// TODO: Implement proper caching.
if (typeof cacheKey === 'string') {
const value = defaultCacheStorage.get(cacheKey)
if (value !== undefined) {
const [returnStream, newSaved] = value.tee()
defaultCacheStorage.set(cacheKey, newSaved)
return {
value: returnStream,
stale: false,
}
}
} else {
// TODO: Handle binary keys.
}
return undefined
},
async set(cacheKey: string | ArrayBuffer, value: ReadableStream) {
// TODO: Implement proper caching.
if (typeof cacheKey === 'string') {
defaultCacheStorage.set(cacheKey, value)
} else {
// TODO: Handle binary keys.
await value.cancel()
}
},
})
function generateCacheEntry(
staticGenerationStore: StaticGenerationStore,
clientReferenceManifest: DeepReadonly<ClientReferenceManifest>,
cacheHandler: CacheHandler,
serializedCacheKey: string | ArrayBuffer,
encodedArguments: FormData | string,
fn: any
): Promise<any> {
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
// generation cannot read anything from the context we're currently executing which
// might include request specific things like cookies() inside a React.cache().
// Note: It is important that we await at least once before this because it lets us
// pop out of any stack specific contexts as well - aka "Sync" Local Storage.
return runInCleanSnapshot(
generateCacheEntryWithRestoredStaticGenerationStore,
staticGenerationStore,
clientReferenceManifest,
cacheHandler,
serializedCacheKey,
encodedArguments,
fn
)
}
function generateCacheEntryWithRestoredStaticGenerationStore(
staticGenerationStore: StaticGenerationStore,
clientReferenceManifest: DeepReadonly<ClientReferenceManifest>,
cacheHandler: CacheHandler,
serializedCacheKey: string | ArrayBuffer,
encodedArguments: FormData | string,
fn: any
) {
// Since we cleared the AsyncLocalStorage we need to restore the staticGenerationStore.
// Note: We explicitly don't restore the RequestStore nor the PrerenderStore.
// We don't want any request specific information leaking an we don't want to create a
// bloated fake request mock for every cache call. So any feature that currently lives
// in RequestStore but should be available to Caches need to move to StaticGenerationStore.
// PrerenderStore is not needed inside the cache scope because the outer most one will
// be the one to report its result to the outer Prerender.
return staticGenerationAsyncStorage.run(
staticGenerationStore,
generateCacheEntryWithCacheContext,
staticGenerationStore,
clientReferenceManifest,
cacheHandler,
serializedCacheKey,
encodedArguments,
fn
)
}
function generateCacheEntryWithCacheContext(
staticGenerationStore: StaticGenerationStore,
clientReferenceManifest: DeepReadonly<ClientReferenceManifest>,
cacheHandler: CacheHandler,
serializedCacheKey: string | ArrayBuffer,
encodedArguments: FormData | string,
fn: any
) {
// Initialize the Store for this Cache entry.
const cacheStore: CacheStore = {}
return cacheAsyncStorage.run(
cacheStore,
generateCacheEntryImpl,
staticGenerationStore,
clientReferenceManifest,
cacheHandler,
serializedCacheKey,
encodedArguments,
fn
)
}
async function generateCacheEntryImpl(
staticGenerationStore: StaticGenerationStore,
clientReferenceManifest: DeepReadonly<ClientReferenceManifest>,
cacheHandler: CacheHandler,
serializedCacheKey: string | ArrayBuffer,
encodedArguments: FormData | string,
fn: any
): Promise<ReadableStream> {
const temporaryReferences = createServerTemporaryReferenceSet()
const [, , args] = await decodeReply<any[]>(
encodedArguments,
getServerModuleMap(),
{
temporaryReferences,
}
)
// Invoke the inner function to load a new result.
const result = fn.apply(null, args)
let didError = false
let firstError: any = null
const stream = renderToReadableStream(
result,
clientReferenceManifest.clientModules,
{
environmentName: 'Cache',
temporaryReferences,
onError(error: any) {
// Report the error.
console.error(error)
if (!didError) {
didError = true
firstError = error
}
},
}
)
const [returnStream, savedStream] = stream.tee()
// We create a stream that passed through the RSC render of the response.
// It always runs to completion but at the very end, if something errored
// or rejected anywhere in the render. We close the stream as errored.
// This lets a CacheHandler choose to save the errored result for future
// hits for a while to avoid unnecessary retries or not to retry.
// We use the end of the stream for this to avoid another complicated
// side-channel. A receiver has to consider that the stream might also
// error for other reasons anyway such as losing connection.
const reader = savedStream.getReader()
const erroringSavedStream = new ReadableStream({
pull(controller) {
return reader.read().then(({ done, value }) => {
if (done) {
if (didError) {
controller.error(firstError)
} else {
controller.close()
}
return
}
controller.enqueue(value)
})
},
cancel(reason: any) {
reader.cancel(reason)
},
})
if (!staticGenerationStore.pendingRevalidateWrites) {
staticGenerationStore.pendingRevalidateWrites = []
}
const promise = cacheHandler.set(serializedCacheKey, erroringSavedStream)
staticGenerationStore.pendingRevalidateWrites.push(promise)
// Return the stream as we're creating it. This means that if it ends up
// erroring we cannot return a stale-while-error version but it allows
// streaming back the result earlier.
return returnStream
}
export function cache(kind: string, id: string, fn: any) {
if (!process.env.__NEXT_DYNAMIC_IO) {
throw new Error(
'"use cache" is only available with the experimental.dynamicIO config.'
)
}
const cacheHandler = cacheHandlerMap.get(kind)
if (cacheHandler === undefined) {
throw new Error('Unknown cache handler: ' + kind)
}
const name = fn.name
const cachedFn = {
[name]: async function (...args: any[]) {
const staticGenerationStore = staticGenerationAsyncStorage.getStore()
if (staticGenerationStore === undefined) {
throw new Error(
'"use cache" cannot be used outside of App Router. Expected a StaticGenerationStore.'
)
}
// Because the Action ID is not yet unique per implementation of that Action we can't
// safely reuse the results across builds yet. In the meantime we add the buildId to the
// arguments as a seed to ensure they're not reused. Remove this once Action IDs hash
// the implementation.
const buildId = staticGenerationStore.buildId
const temporaryReferences = createClientTemporaryReferenceSet()
const encodedArguments: FormData | string = await encodeReply(
[buildId, id, args],
{
temporaryReferences,
}
)
const serializedCacheKey =
typeof encodedArguments === 'string'
? // Fast path for the simple case for simple inputs. We let the CacheHandler
// Convert it to an ArrayBuffer if it wants to.
encodedArguments
: // The FormData might contain binary data that is not valid UTF-8 so this
// cannot be a string in this case. I.e. .text() is not valid here and it
// is not valid to use TextDecoder on this result.
await new Response(encodedArguments).arrayBuffer()
let entry: undefined | CacheEntry =
await cacheHandler.get(serializedCacheKey)
let stream
if (
entry === undefined ||
(entry.stale && staticGenerationStore.isStaticGeneration)
) {
// Miss. Generate a new result.
// If the cache entry is stale and we're prerendering, we don't want to use the
// stale entry since it would unnecessarily need to shorten the lifetime of the
// prerender. We're not time constrained here so we can re-generated it now.
// We need to run this inside a clean AsyncLocalStorage snapshot so that the cache
// generation cannot read anything from the context we're currently executing which
// might include request specific things like cookies() inside a React.cache().
// Note: It is important that we await at least once before this because it lets us
// pop out of any stack specific contexts as well - aka "Sync" Local Storage.
// Get the clientReferenceManifestSingleton while we're still in the outer Context.
// In case getClientReferenceManifestSingleton is implemented using AsyncLocalStorage.
const clientReferenceManifestSingleton =
getClientReferenceManifestSingleton()
stream = await generateCacheEntry(
staticGenerationStore,
clientReferenceManifestSingleton,
cacheHandler,
serializedCacheKey,
encodedArguments,
fn
)
} else {
stream = entry.value
if (entry.stale) {
// If this is stale, and we're not in a prerender (i.e. this is dynamic render),
// then we should warm up the cache with a fresh revalidated entry.
const clientReferenceManifestSingleton =
getClientReferenceManifestSingleton()
const ignoredStream = await generateCacheEntry(
staticGenerationStore,
clientReferenceManifestSingleton,
cacheHandler,
serializedCacheKey,
encodedArguments,
fn
)
await ignoredStream.cancel()
}
}
// Logs are replayed even if it's a hit - to ensure we see them on the client eventually.
// If we didn't then the client wouldn't see the logs if it was seeded from a prewarm that
// never made it to the client. However, this also means that you see logs even when the
// cached function isn't actually re-executed. We should instead ensure prewarms always
// make it to the client. Another issue is that this will cause double logging in the
// server terminal. Once while generating the cache entry and once when replaying it on
// the server, which is required to pick it up for replaying again on the client.
const replayConsoleLogs = true
// TODO: We can't use the client reference manifest to resolve the modules
// on the server side - instead they need to be recovered as the module
// references (proxies) again.
// For now, we'll just use an empty module map.
const ssrModuleMap: {
[moduleExport: string]: ManifestNode
} = {}
const ssrManifest = {
// moduleLoading must be null because we don't want to trigger preloads of ClientReferences
// to be added to the consumer. Instead, we'll wait for any ClientReference to be emitted
// which themselves will handle the preloading.
moduleLoading: null,
moduleMap: ssrModuleMap,
}
return createFromReadableStream(stream, {
ssrManifest,
temporaryReferences,
replayConsoleLogs,
environmentName: 'Cache',
})
},
}[name]
return cachedFn
}