forked from vercel/next.js
/
fetch-cache.ts
203 lines (183 loc) · 5.83 KB
/
fetch-cache.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
import LRUCache from 'next/dist/compiled/lru-cache'
import { FETCH_CACHE_HEADER } from '../../../client/components/app-router-headers'
import { CACHE_ONE_YEAR } from '../../../lib/constants'
import type { CacheHandler, CacheHandlerContext, CacheHandlerValue } from './'
let memoryCache: LRUCache<string, CacheHandlerValue> | undefined
export default class FetchCache implements CacheHandler {
private headers: Record<string, string>
private cacheEndpoint?: string
private debug: boolean
constructor(ctx: CacheHandlerContext) {
this.debug = !!process.env.NEXT_PRIVATE_DEBUG_CACHE
this.headers = {}
this.headers['Content-Type'] = 'application/json'
if (FETCH_CACHE_HEADER in ctx._requestHeaders) {
const newHeaders = JSON.parse(
ctx._requestHeaders[FETCH_CACHE_HEADER] as string
)
for (const k in newHeaders) {
this.headers[k] = newHeaders[k]
}
delete ctx._requestHeaders[FETCH_CACHE_HEADER]
}
if (ctx._requestHeaders['x-vercel-sc-host']) {
this.cacheEndpoint = `https://${ctx._requestHeaders['x-vercel-sc-host']}${
ctx._requestHeaders['x-vercel-sc-basepath'] || ''
}`
if (this.debug) {
console.log('using cache endpoint', this.cacheEndpoint)
}
} else if (this.debug) {
console.log('no cache endpoint available')
}
if (ctx.maxMemoryCacheSize && !memoryCache) {
if (this.debug) {
console.log('using memory store for fetch cache')
}
memoryCache = new LRUCache({
max: ctx.maxMemoryCacheSize,
length({ value }) {
if (!value) {
return 25
} else if (value.kind === 'REDIRECT') {
return JSON.stringify(value.props).length
} else if (value.kind === 'IMAGE') {
throw new Error('invariant image should not be incremental-cache')
} else if (value.kind === 'FETCH') {
return JSON.stringify(value.data || '').length
} else if (value.kind === 'ROUTE') {
return value.body.length
}
// rough estimate of size of cache value
return (
value.html.length + (JSON.stringify(value.pageData)?.length || 0)
)
},
})
} else {
if (this.debug) {
console.log('not using memory store for fetch cache')
}
}
}
public async get(key: string, fetchCache?: boolean) {
if (!fetchCache) return null
let data = memoryCache?.get(key)
// get data from fetch cache
if (!data && this.cacheEndpoint) {
try {
const start = Date.now()
const res = await fetch(
`${this.cacheEndpoint}/v1/suspense-cache/${key}`,
{
method: 'GET',
headers: this.headers,
// @ts-expect-error
next: { internal: true },
}
)
if (res.status === 404) {
if (this.debug) {
console.log(
`no fetch cache entry for ${key}, duration: ${
Date.now() - start
}ms`
)
}
return null
}
if (!res.ok) {
console.error(await res.text())
throw new Error(`invalid response from cache ${res.status}`)
}
const cached = await res.json()
if (!cached || cached.kind !== 'FETCH') {
this.debug && console.log({ cached })
throw new Error(`invalid cache value`)
}
const cacheState = res.headers.get('x-vercel-cache-state')
const age = res.headers.get('age')
data = {
value: cached,
// if it's already stale set it to a year in the future
// if not derive last modified from age
lastModified:
cacheState === 'stale'
? Date.now() + CACHE_ONE_YEAR
: Date.now() - parseInt(age || '0', 10) * 1000,
}
if (this.debug) {
console.log(
`got fetch cache entry for ${key}, duration: ${
Date.now() - start
}ms, size: ${Object.keys(cached).length}`
)
}
if (data) {
memoryCache?.set(key, data)
}
} catch (err) {
// unable to get data from fetch-cache
if (this.debug) {
console.error(`Failed to get from fetch-cache`, err)
}
}
}
return data || null
}
public async set(
key: string,
data: CacheHandlerValue['value'],
fetchCache?: boolean
) {
if (!fetchCache) return
memoryCache?.set(key, {
value: data,
lastModified: Date.now(),
})
if (this.cacheEndpoint) {
try {
const start = Date.now()
if (data !== null && 'revalidate' in data) {
this.headers['x-vercel-revalidate'] = data.revalidate.toString()
}
if (
!this.headers['x-vercel-revalidate'] &&
data !== null &&
'data' in data
) {
this.headers['x-vercel-cache-control'] =
data.data.headers['cache-control']
}
const body = JSON.stringify(data)
const res = await fetch(
`${this.cacheEndpoint}/v1/suspense-cache/${key}`,
{
method: 'POST',
headers: this.headers,
body: body,
// @ts-expect-error
next: { internal: true },
}
)
if (!res.ok) {
this.debug && console.log(await res.text())
throw new Error(`invalid response ${res.status}`)
}
if (this.debug) {
console.log(
`successfully set to fetch-cache for ${key}, duration: ${
Date.now() - start
}ms, size: ${body.length}`
)
}
} catch (err) {
// unable to set to fetch-cache
if (this.debug) {
console.error(`Failed to update fetch cache`, err)
}
}
}
return
}
}