Skip to content

Commit

Permalink
feat: formalize waitForRequestsIdle (experimental) (#16135)
Browse files Browse the repository at this point in the history
  • Loading branch information
patak-dev authored Mar 14, 2024
1 parent 7970aa6 commit 9888843
Show file tree
Hide file tree
Showing 6 changed files with 165 additions and 88 deletions.
12 changes: 12 additions & 0 deletions docs/guide/api-javascript.md
Original file line number Diff line number Diff line change
Expand Up @@ -183,9 +183,21 @@ interface ViteDevServer {
* Bind CLI shortcuts
*/
bindCLIShortcuts(options?: BindCLIShortcutsOptions<ViteDevServer>): void
/**
* Calling `await server.waitForRequestsIdle(id)` will wait until all static imports
* are processed. If called from a load or transform plugin hook, the id needs to be
* passed as a parameter to avoid deadlocks. Calling this function after the first
* static imports section of the module graph has been processed will resolve immediately.
* @experimental
*/
waitForRequestsIdle: (ignoredId?: string) => Promise<void>
}
```
:::info
`waitForRequestsIdle` is meant to be used as a escape hatch to improve DX for features that can't be implemented following the on-demand nature of the Vite dev server. It can be used during startup by tools like Tailwind to delay generating the app CSS classes until the app code has been seen, avoiding flashes of style changes. When this function is used in a load or transform hook, and the default HTTP1 server is used, one of the six http channels will be blocked until the server processes all static imports. Vite's dependency optimizer currently uses this function to avoid full-page reloads on missing dependencies by delaying loading of pre-bundled dependencies until all imported dependencies have been collected from static imported sources. Vite may switch to a different strategy in a future major release, setting `optimizeDeps.crawlUntilStaticImports: false` by default to avoid the performance hit in large applications during cold start.
:::
## `build`
**Type Signature:**
Expand Down
1 change: 0 additions & 1 deletion packages/vite/src/node/optimizer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,6 @@ export interface DepsOptimizer {
isOptimizedDepFile: (id: string) => boolean
isOptimizedDepUrl: (url: string) => boolean
getOptimizedDepId: (depInfo: OptimizedDepInfo) => string
delayDepsOptimizerUntil: (id: string, done: () => Promise<any>) => void

close: () => Promise<void>

Expand Down
103 changes: 17 additions & 86 deletions packages/vite/src/node/optimizer/optimizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ export function getDepsOptimizer(

export async function initDepsOptimizer(
config: ResolvedConfig,
server?: ViteDevServer,
server: ViteDevServer,
): Promise<void> {
if (!getDepsOptimizer(config, false)) {
await createDepsOptimizer(config, server)
Expand Down Expand Up @@ -78,7 +78,7 @@ export async function initDevSsrDepsOptimizer(

async function createDepsOptimizer(
config: ResolvedConfig,
server?: ViteDevServer,
server: ViteDevServer,
): Promise<void> {
const { logger } = config
const ssr = false
Expand All @@ -105,7 +105,6 @@ async function createDepsOptimizer(
isOptimizedDepUrl: createIsOptimizedDepUrl(config),
getOptimizedDepId: (depInfo: OptimizedDepInfo) =>
`${depInfo.file}?v=${depInfo.browserHash}`,
delayDepsOptimizerUntil,
close,
options,
}
Expand Down Expand Up @@ -167,9 +166,10 @@ async function createDepsOptimizer(
// from the first request before resolving to minimize full page reloads.
// On warm start or after the first optimization is run, we use a simpler
// debounce strategy each time a new dep is discovered.
let crawlEndFinder: CrawlEndFinder | undefined
let waitingForCrawlEnd = false
if (!cachedMetadata) {
crawlEndFinder = setupOnCrawlEnd(onCrawlEnd)
server._onCrawlEnd(onCrawlEnd)
waitingForCrawlEnd = true
}

let optimizationResult:
Expand All @@ -188,7 +188,6 @@ async function createDepsOptimizer(

async function close() {
closed = true
crawlEndFinder?.cancel()
await Promise.allSettled([
discover?.cancel(),
depsOptimizer.scanProcessing,
Expand Down Expand Up @@ -271,7 +270,7 @@ async function createDepsOptimizer(
optimizationResult.result.then((result) => {
// Check if the crawling of static imports has already finished. In that
// case, the result is handled by the onCrawlEnd callback
if (!crawlEndFinder) return
if (!waitingForCrawlEnd) return

optimizationResult = undefined // signal that we'll be using the result

Expand Down Expand Up @@ -535,17 +534,15 @@ async function createDepsOptimizer(
}

function fullReload() {
if (server) {
// Cached transform results have stale imports (resolved to
// old locations) so they need to be invalidated before the page is
// reloaded.
server.moduleGraph.invalidateAll()

server.hot.send({
type: 'full-reload',
path: '*',
})
}
// Cached transform results have stale imports (resolved to
// old locations) so they need to be invalidated before the page is
// reloaded.
server.moduleGraph.invalidateAll()

server.hot.send({
type: 'full-reload',
path: '*',
})
}

async function rerun() {
Expand Down Expand Up @@ -594,7 +591,7 @@ async function createDepsOptimizer(
// we can get a list of every missing dependency before giving to the
// browser a dependency that may be outdated, thus avoiding full page reloads

if (!crawlEndFinder) {
if (!waitingForCrawlEnd) {
// Debounced rerun, let other missing dependencies be discovered before
// the running next optimizeDeps
debouncedProcessing()
Expand Down Expand Up @@ -649,7 +646,7 @@ async function createDepsOptimizer(
// be crawled if the browser requests them right away).
async function onCrawlEnd() {
// switch after this point to a simple debounce strategy
crawlEndFinder = undefined
waitingForCrawlEnd = false

debug?.(colors.green(`✨ static imports crawl ended`))
if (closed) {
Expand Down Expand Up @@ -757,71 +754,6 @@ async function createDepsOptimizer(
debouncedProcessing(0)
}
}

function delayDepsOptimizerUntil(id: string, done: () => Promise<any>) {
if (crawlEndFinder && !depsOptimizer.isOptimizedDepFile(id)) {
crawlEndFinder.delayDepsOptimizerUntil(id, done)
}
}
}

const callCrawlEndIfIdleAfterMs = 50

interface CrawlEndFinder {
delayDepsOptimizerUntil: (id: string, done: () => Promise<any>) => void
cancel: () => void
}

function setupOnCrawlEnd(onCrawlEnd: () => void): CrawlEndFinder {
const registeredIds = new Set<string>()
const seenIds = new Set<string>()
let timeoutHandle: NodeJS.Timeout | undefined

let cancelled = false
function cancel() {
cancelled = true
}

let crawlEndCalled = false
function callOnCrawlEnd() {
if (!cancelled && !crawlEndCalled) {
crawlEndCalled = true
onCrawlEnd()
}
}

function delayDepsOptimizerUntil(id: string, done: () => Promise<any>): void {
if (!seenIds.has(id)) {
seenIds.add(id)
registeredIds.add(id)
done()
.catch(() => {})
.finally(() => markIdAsDone(id))
}
}
function markIdAsDone(id: string): void {
registeredIds.delete(id)
checkIfCrawlEndAfterTimeout()
}

function checkIfCrawlEndAfterTimeout() {
if (cancelled || registeredIds.size > 0) return

if (timeoutHandle) clearTimeout(timeoutHandle)
timeoutHandle = setTimeout(
callOnCrawlEndWhenIdle,
callCrawlEndIfIdleAfterMs,
)
}
async function callOnCrawlEndWhenIdle() {
if (cancelled || registeredIds.size > 0) return
callOnCrawlEnd()
}

return {
delayDepsOptimizerUntil,
cancel,
}
}

async function createDevSsrDepsOptimizer(
Expand All @@ -844,7 +776,6 @@ async function createDevSsrDepsOptimizer(
// noop, there is no scanning during dev SSR
// the optimizer blocks the server start
run: () => {},
delayDepsOptimizerUntil: (id: string, done: () => Promise<any>) => {},

close: async () => {},
options: config.ssr.optimizeDeps,
Expand Down
114 changes: 114 additions & 0 deletions packages/vite/src/node/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@ import {
isParentDirectory,
mergeConfig,
normalizePath,
promiseWithResolvers,
resolveHostname,
resolveServerUrls,
} from '../utils'
Expand Down Expand Up @@ -344,6 +345,22 @@ export interface ViteDevServer {
* Open browser
*/
openBrowser(): void
/**
* Calling `await server.waitForRequestsIdle(id)` will wait until all static imports
* are processed. If called from a load or transform plugin hook, the id needs to be
* passed as a parameter to avoid deadlocks. Calling this function after the first
* static imports section of the module graph has been processed will resolve immediately.
* @experimental
*/
waitForRequestsIdle: (ignoredId?: string) => Promise<void>
/**
* @internal
*/
_registerRequestProcessing: (id: string, done: () => Promise<unknown>) => void
/**
* @internal
*/
_onCrawlEnd(cb: () => void): void
/**
* @internal
*/
Expand Down Expand Up @@ -459,6 +476,20 @@ export async function _createServer(

const devHtmlTransformFn = createDevHtmlTransformFn(config)

const onCrawlEndCallbacks: (() => void)[] = []
const crawlEndFinder = setupOnCrawlEnd(() => {
onCrawlEndCallbacks.forEach((cb) => cb())
})
function waitForRequestsIdle(ignoredId?: string): Promise<void> {
return crawlEndFinder.waitForRequestsIdle(ignoredId)
}
function _registerRequestProcessing(id: string, done: () => Promise<any>) {
crawlEndFinder.registerRequestProcessing(id, done)
}
function _onCrawlEnd(cb: () => void) {
onCrawlEndCallbacks.push(cb)
}

let server: ViteDevServer = {
config,
middlewares,
Expand Down Expand Up @@ -590,6 +621,7 @@ export async function _createServer(
watcher.close(),
hot.close(),
container.close(),
crawlEndFinder?.cancel(),
getDepsOptimizer(server.config)?.close(),
getDepsOptimizer(server.config, true)?.close(),
closeHttpServer(),
Expand Down Expand Up @@ -638,6 +670,10 @@ export async function _createServer(
return server._restartPromise
},

waitForRequestsIdle,
_registerRequestProcessing,
_onCrawlEnd,

_setInternalServer(_server: ViteDevServer) {
// Rebind internal the server variable so functions reference the user
// server instance after a restart
Expand Down Expand Up @@ -1133,3 +1169,81 @@ export async function restartServerWithUrls(
server.printUrls()
}
}

const callCrawlEndIfIdleAfterMs = 50

interface CrawlEndFinder {
registerRequestProcessing: (id: string, done: () => Promise<any>) => void
waitForRequestsIdle: (ignoredId?: string) => Promise<void>
cancel: () => void
}

function setupOnCrawlEnd(onCrawlEnd: () => void): CrawlEndFinder {
const registeredIds = new Set<string>()
const seenIds = new Set<string>()
const onCrawlEndPromiseWithResolvers = promiseWithResolvers<void>()

let timeoutHandle: NodeJS.Timeout | undefined

let cancelled = false
function cancel() {
cancelled = true
}

let crawlEndCalled = false
function callOnCrawlEnd() {
if (!cancelled && !crawlEndCalled) {
crawlEndCalled = true
onCrawlEnd()
}
onCrawlEndPromiseWithResolvers.resolve()
}

function registerRequestProcessing(
id: string,
done: () => Promise<any>,
): void {
if (!seenIds.has(id)) {
seenIds.add(id)
registeredIds.add(id)
done()
.catch(() => {})
.finally(() => markIdAsDone(id))
}
}

function waitForRequestsIdle(ignoredId?: string): Promise<void> {
if (ignoredId) {
seenIds.add(ignoredId)
markIdAsDone(ignoredId)
}
return onCrawlEndPromiseWithResolvers.promise
}

function markIdAsDone(id: string): void {
if (registeredIds.has(id)) {
registeredIds.delete(id)
checkIfCrawlEndAfterTimeout()
}
}

function checkIfCrawlEndAfterTimeout() {
if (cancelled || registeredIds.size > 0) return

if (timeoutHandle) clearTimeout(timeoutHandle)
timeoutHandle = setTimeout(
callOnCrawlEndWhenIdle,
callCrawlEndIfIdleAfterMs,
)
}
async function callOnCrawlEndWhenIdle() {
if (cancelled || registeredIds.size > 0) return
callOnCrawlEnd()
}

return {
registerRequestProcessing,
waitForRequestsIdle,
cancel,
}
}
5 changes: 4 additions & 1 deletion packages/vite/src/node/server/transformRequest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,10 @@ async function doTransform(
resolved,
)

getDepsOptimizer(config, ssr)?.delayDepsOptimizerUntil(id, () => result)
const depsOptimizer = getDepsOptimizer(config, ssr)
if (!depsOptimizer?.isOptimizedDepFile(id)) {
server._registerRequestProcessing(id, () => result)
}

return result
}
Expand Down
Loading

0 comments on commit 9888843

Please sign in to comment.