Skip to content

Commit

Permalink
feat(optimizer): holdUntilCrawlEnd option (#15244)
Browse files Browse the repository at this point in the history
  • Loading branch information
patak-dev authored Jan 23, 2024
1 parent aa7916a commit b7c6629
Show file tree
Hide file tree
Showing 4 changed files with 124 additions and 13 deletions.
8 changes: 8 additions & 0 deletions docs/config/dep-optimization-options.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,14 @@ Certain options are omitted since changing them would not be compatible with Vit

Set to `true` to force dependency pre-bundling, ignoring previously cached optimized dependencies.

## optimizeDeps.holdUntilCrawlEnd

- **Experimental**
- **Type:** `boolean`
- **Default:** `true`

When enabled, it will hold the first optimized deps results until all static imports are crawled on cold start. This avoids the need for full-page reloads when new dependencies are discovered and they trigger the generation of new common chunks. If all dependencies are found by the scanner plus the explicitely defined ones in `include`, it is better to disable this option to let the browser process more requests in parallel.

## optimizeDeps.disabled

- **Deprecated**
Expand Down
1 change: 1 addition & 0 deletions packages/vite/src/node/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -766,6 +766,7 @@ export async function resolveConfig(
packageCache,
createResolver,
optimizeDeps: {
holdUntilCrawlEnd: true,
...optimizeDeps,
esbuildOptions: {
preserveSymlinks: resolveOptions.preserveSymlinks,
Expand Down
11 changes: 11 additions & 0 deletions packages/vite/src/node/optimizer/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,17 @@ export interface DepOptimizationConfig {
* @experimental
*/
noDiscovery?: boolean
/**
* When enabled, it will hold the first optimized deps results until all static
* imports are crawled on cold start. This avoids the need for full-page reloads
* when new dependencies are discovered and they trigger the generation of new
* common chunks. If all dependencies are found by the scanner plus the explicitely
* defined ones in `include`, it is better to disable this option to let the
* browser process more requests in parallel.
* @default true
* @experimental
*/
holdUntilCrawlEnd?: boolean
}

export type DepOptimizationOptions = DepOptimizationConfig & {
Expand Down
117 changes: 104 additions & 13 deletions packages/vite/src/node/optimizer/optimizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ async function createDepsOptimizer(
let metadata =
cachedMetadata || initDepsOptimizerMetadata(config, ssr, sessionTimestamp)

const options = getDepOptimizationConfig(config, ssr)

const { noDiscovery, holdUntilCrawlEnd } = options

const depsOptimizer: DepsOptimizer = {
metadata,
registerMissingImport,
Expand All @@ -103,7 +107,7 @@ async function createDepsOptimizer(
`${depInfo.file}?v=${depInfo.browserHash}`,
delayDepsOptimizerUntil,
close,
options: getDepOptimizationConfig(config, ssr),
options,
}

depsOptimizerMap.set(config, depsOptimizer)
Expand All @@ -126,6 +130,23 @@ async function createDepsOptimizer(
}
}

let discoveredDepsWhileScanning: string[] = []
const logDiscoveredDepsWhileScanning = () => {
if (discoveredDepsWhileScanning.length) {
config.logger.info(
colors.green(
`✨ discovered while scanning: ${depsLogString(
discoveredDepsWhileScanning,
)}`,
),
{
timestamp: true,
},
)
discoveredDepsWhileScanning = []
}
}

let depOptimizationProcessing = promiseWithResolvers<void>()
let depOptimizationProcessingQueue: PromiseWithResolvers<void>[] = []
const resolveEnqueuedProcessingPromises = () => {
Expand All @@ -140,6 +161,7 @@ async function createDepsOptimizer(
let currentlyProcessing = false

let firstRunCalled = !!cachedMetadata
let warnAboutMissedDependencies = false

// If this is a cold run, we wait for static imports discovered
// from the first request before resolving to minimize full page reloads.
Expand Down Expand Up @@ -180,25 +202,25 @@ async function createDepsOptimizer(

// Initialize discovered deps with manually added optimizeDeps.include info

const deps: Record<string, string> = {}
await addManuallyIncludedOptimizeDeps(deps, config, ssr)
const manuallyIncludedDeps: Record<string, string> = {}
await addManuallyIncludedOptimizeDeps(manuallyIncludedDeps, config, ssr)

const discovered = toDiscoveredDependencies(
const manuallyIncludedDepsInfo = toDiscoveredDependencies(
config,
deps,
manuallyIncludedDeps,
ssr,
sessionTimestamp,
)

for (const depInfo of Object.values(discovered)) {
for (const depInfo of Object.values(manuallyIncludedDepsInfo)) {
addOptimizedDepInfo(metadata, 'discovered', {
...depInfo,
processing: depOptimizationProcessing.promise,
})
newDepsDiscovered = true
}

if (config.optimizeDeps.noDiscovery) {
if (noDiscovery) {
// We don't need to scan for dependencies or wait for the static crawl to end
// Run the first optimization run immediately
runOptimizer()
Expand All @@ -214,6 +236,13 @@ async function createDepsOptimizer(
const deps = await discover.result
discover = undefined

const manuallyIncluded = Object.keys(manuallyIncludedDepsInfo)
discoveredDepsWhileScanning.push(
...Object.keys(metadata.discovered).filter(
(dep) => !deps[dep] && !manuallyIncluded.includes(dep),
),
)

// Add these dependencies to the discovered list, as these are currently
// used by the preAliasPlugin to support aliased and optimized deps.
// This is also used by the CJS externalization heuristics in legacy mode
Expand All @@ -224,12 +253,31 @@ async function createDepsOptimizer(
}

const knownDeps = prepareKnownDeps()
startNextDiscoveredBatch()

// For dev, we run the scanner and the first optimization
// run on the background, but we wait until crawling has ended
// to decide if we send this result to the browser or we need to
// do another optimize step
// run on the background
optimizationResult = runOptimizeDeps(config, knownDeps, ssr)

// If the holdUntilCrawlEnd stratey is used, we wait until crawling has
// ended to decide if we send this result to the browser or we need to
// do another optimize step
if (!holdUntilCrawlEnd) {
// If not, we release the result to the browser as soon as the scanner
// is done. If the scanner missed any dependency, and a new dependency
// is discovered while crawling static imports, then there will be a
// full-page reload if new common chunks are generated between the old
// and new optimized deps.
optimizationResult.result.then((result) => {
// Check if the crawling of static imports has already finished. In that
// case, the result is handled by the onCrawlEnd callback
if (!crawlEndFinder) return

optimizationResult = undefined // signal that we'll be using the result

runOptimizer(result)
})
}
} catch (e) {
logger.error(e.stack || e.message)
} finally {
Expand Down Expand Up @@ -394,6 +442,16 @@ async function createDepsOptimizer(
newDepsToLogHandle = setTimeout(() => {
newDepsToLogHandle = undefined
logNewlyDiscoveredDeps()
if (warnAboutMissedDependencies) {
logDiscoveredDepsWhileScanning()
config.logger.info(
colors.magenta(
`❗ add these dependencies to optimizeDeps.include to speed up cold start`,
),
{ timestamp: true },
)
warnAboutMissedDependencies = false
}
}, 2 * debounceMs)
} else {
debug(
Expand Down Expand Up @@ -426,6 +484,16 @@ async function createDepsOptimizer(
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle)
newDepsToLogHandle = undefined
logNewlyDiscoveredDeps()
if (warnAboutMissedDependencies) {
logDiscoveredDepsWhileScanning()
config.logger.info(
colors.magenta(
`❗ add these dependencies to optimizeDeps.include to avoid a full page reload during cold start`,
),
{ timestamp: true },
)
warnAboutMissedDependencies = false
}
}

logger.info(
Expand Down Expand Up @@ -562,7 +630,7 @@ async function createDepsOptimizer(

function debouncedProcessing(timeout = debounceMs) {
// Debounced rerun, let other missing dependencies be discovered before
// the running next optimizeDeps
// the next optimizeDeps run
enqueuedRerun = undefined
if (debounceProcessingHandle) clearTimeout(debounceProcessingHandle)
if (newDepsToLogHandle) clearTimeout(newDepsToLogHandle)
Expand Down Expand Up @@ -593,8 +661,17 @@ async function createDepsOptimizer(
await depsOptimizer.scanProcessing

if (optimizationResult && !config.optimizeDeps.noDiscovery) {
const result = await optimizationResult.result
optimizationResult = undefined
// In the holdUntilCrawlEnd strategy, we don't release the result of the
// post-scanner optimize step to the browser until we reach this point
// If there are new dependencies, we do another optimize run, if not, we
// use the post-scanner optimize result
// If holdUntilCrawlEnd is false and we reach here, it means that the
// scan+optimize step finished after crawl end. We follow the same
// process as in the holdUntilCrawlEnd in this case.
const afterScanResult = optimizationResult.result
optimizationResult = undefined // signal that we'll be using the result

const result = await afterScanResult
currentlyProcessing = false

const crawlDeps = Object.keys(metadata.discovered)
Expand Down Expand Up @@ -649,6 +726,20 @@ async function createDepsOptimizer(
startNextDiscoveredBatch()
runOptimizer(result)
}
} else if (!holdUntilCrawlEnd) {
// The post-scanner optimize result has been released to the browser
// If new deps have been discovered, issue a regular rerun of the
// optimizer. A full page reload may still be avoided if the new
// optimize result is compatible in this case
if (newDepsDiscovered) {
debug?.(
colors.green(
`✨ new dependencies were found while crawling static imports, re-running optimizer`,
),
)
warnAboutMissedDependencies = true
debouncedProcessing(0)
}
} else {
const crawlDeps = Object.keys(metadata.discovered)
currentlyProcessing = false
Expand Down

0 comments on commit b7c6629

Please sign in to comment.