Skip to content

Commit

Permalink
Merge pull request webpack#11168 from webpack/bugfix/split-chunks
Browse files Browse the repository at this point in the history
splitChunks improvements
  • Loading branch information
sokra committed Jul 15, 2020
2 parents dfd2021 + f0568ec commit a41994a
Show file tree
Hide file tree
Showing 4 changed files with 99 additions and 53 deletions.
8 changes: 8 additions & 0 deletions declarations/WebpackOptions.d.ts
Expand Up @@ -927,6 +927,10 @@ export interface OptimizationSplitChunksOptions {
* Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group
*/
enforce?: boolean;
/**
* Size threshold at which splitting is enforced and other restrictions (maxAsyncRequests, maxInitialRequests) are ignored.
*/
enforceSizeThreshold?: number;
/**
* Sets the template for the filename for created chunks (Only works for initial chunks)
*/
Expand Down Expand Up @@ -973,6 +977,10 @@ export interface OptimizationSplitChunksOptions {
* Select chunks for determining shared modules (defaults to "async", "initial" and "all" requires adding these chunks to the HTML)
*/
chunks?: ("initial" | "async" | "all") | Function;
/**
* Size threshold at which splitting is enforced and other restrictions (maxAsyncRequests, maxInitialRequests) are ignored.
*/
enforceSizeThreshold?: number;
/**
* Options for modules not selected by any other cache group
*/
Expand Down
134 changes: 82 additions & 52 deletions lib/optimize/SplitChunksPlugin.js
Expand Up @@ -75,12 +75,15 @@ const compareEntries = (a, b) => {
const bSizeReduce = b.size * (b.chunks.size - 1);
const diffSizeReduce = aSizeReduce - bSizeReduce;
if (diffSizeReduce) return diffSizeReduce;
// 4. by number of modules (to be able to compare by identifier)
// 4. by cache group index
const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
if (indexDiff) return indexDiff;
// 5. by number of modules (to be able to compare by identifier)
const modulesA = a.modules;
const modulesB = b.modules;
const diff = modulesA.size - modulesB.size;
if (diff) return diff;
// 5. by module identifiers
// 6. by module identifiers
modulesA.sort();
modulesB.sort();
const aI = modulesA[Symbol.iterator]();
Expand Down Expand Up @@ -114,6 +117,7 @@ module.exports = class SplitChunksPlugin {
options.chunks || "all"
),
minSize: options.minSize || 0,
enforceSizeThreshold: options.enforceSizeThreshold || 0,
maxSize: options.maxSize || 0,
minChunks: options.minChunks || 1,
maxAsyncRequests: options.maxAsyncRequests || 1,
Expand Down Expand Up @@ -286,6 +290,7 @@ module.exports = class SplitChunksPlugin {
),
enforce: option.enforce,
minSize: option.minSize,
enforceSizeThreshold: option.enforceSizeThreshold,
maxSize: option.maxSize,
minChunks: option.minChunks,
maxAsyncRequests: option.maxAsyncRequests,
Expand Down Expand Up @@ -458,8 +463,8 @@ module.exports = class SplitChunksPlugin {
* @typedef {Object} ChunksInfoItem
* @property {SortableSet} modules
* @property {TODO} cacheGroup
* @property {number} cacheGroupIndex
* @property {string} name
* @property {boolean} validateSize
* @property {number} size
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
Expand All @@ -473,13 +478,15 @@ module.exports = class SplitChunksPlugin {

/**
* @param {TODO} cacheGroup the current cache group
* @param {number} cacheGroupIndex the index of the cache group of ordering
* @param {Chunk[]} selectedChunks chunks selected for this module
* @param {string} selectedChunksKey a key of selectedChunks
* @param {Module} module the current module
* @returns {void}
*/
const addModuleToChunksInfoMap = (
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
Expand Down Expand Up @@ -507,8 +514,8 @@ module.exports = class SplitChunksPlugin {
(info = {
modules: new SortableSet(undefined, sortByIdentifier),
cacheGroup,
cacheGroupIndex,
name,
validateSize: cacheGroup.minSize > 0,
size: 0,
chunks: new Set(),
reuseableChunks: new Set(),
Expand All @@ -517,9 +524,7 @@ module.exports = class SplitChunksPlugin {
);
}
info.modules.add(module);
if (info.validateSize) {
info.size += module.size();
}
info.size += module.size();
if (!info.chunksKeys.has(selectedChunksKey)) {
info.chunksKeys.add(selectedChunksKey);
for (const chunk of selectedChunks) {
Expand All @@ -544,22 +549,31 @@ module.exports = class SplitChunksPlugin {
combinationsCache.set(chunksKey, combs);
}

let cacheGroupIndex = 0;
for (const cacheGroupSource of cacheGroups) {
const minSize =
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize;
const enforceSizeThreshold =
cacheGroupSource.enforceSizeThreshold !== undefined
? cacheGroupSource.enforceSizeThreshold
: cacheGroupSource.enforce
? 0
: this.options.enforceSizeThreshold;
const cacheGroup = {
key: cacheGroupSource.key,
priority: cacheGroupSource.priority || 0,
chunksFilter:
cacheGroupSource.chunksFilter || this.options.chunksFilter,
minSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize,
minSize,
minSizeForMaxSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: this.options.minSize,
enforceSizeThreshold,
maxSize:
cacheGroupSource.maxSize !== undefined
? cacheGroupSource.maxSize
Expand Down Expand Up @@ -596,7 +610,9 @@ module.exports = class SplitChunksPlugin {
cacheGroupSource.automaticNameDelimiter !== undefined
? cacheGroupSource.automaticNameDelimiter
: this.options.automaticNameDelimiter,
reuseExistingChunk: cacheGroupSource.reuseExistingChunk
reuseExistingChunk: cacheGroupSource.reuseExistingChunk,
_validateSize: minSize > 0,
_conditionalEnforce: enforceSizeThreshold > 0
};
// For all combination of chunk selection
for (const chunkCombination of combs) {
Expand All @@ -613,18 +629,23 @@ module.exports = class SplitChunksPlugin {

addModuleToChunksInfoMap(
cacheGroup,
cacheGroupIndex,
selectedChunks,
selectedChunksKey,
module
);
}
cacheGroupIndex++;
}
}

// Filter items were size < minSize
for (const pair of chunksInfoMap) {
const info = pair[1];
if (info.validateSize && info.size < info.cacheGroup.minSize) {
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(pair[0]);
}
}
Expand Down Expand Up @@ -684,24 +705,30 @@ module.exports = class SplitChunksPlugin {
}
// Check if maxRequests condition can be fulfilled

const usedChunks = Array.from(item.chunks).filter(chunk => {
const selectedChunks = Array.from(item.chunks).filter(chunk => {
// skip if we address ourself
return (
(!chunkName || chunk.name !== chunkName) && chunk !== newChunk
);
});

const enforced =
item.cacheGroup._conditionalEnforce &&
item.size >= item.cacheGroup.enforceSizeThreshold;

// Skip when no chunk selected
if (usedChunks.length === 0) continue;
if (selectedChunks.length === 0) continue;

let validChunks = usedChunks;
const usedChunks = new Set(selectedChunks);

// Check if maxRequests condition can be fulfilled
if (
Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests)
!enforced &&
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests))
) {
validChunks = validChunks.filter(chunk => {
// respect max requests when not enforced
for (const chunk of usedChunks) {
// respect max requests
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
: chunk.canBeInitial()
Expand All @@ -710,26 +737,33 @@ module.exports = class SplitChunksPlugin {
item.cacheGroup.maxAsyncRequests
)
: item.cacheGroup.maxAsyncRequests;
return (
!isFinite(maxRequests) || getRequests(chunk) < maxRequests
);
});
if (
isFinite(maxRequests) &&
getRequests(chunk) >= maxRequests
) {
usedChunks.delete(chunk);
}
}
}

validChunks = validChunks.filter(chunk => {
outer: for (const chunk of usedChunks) {
for (const module of item.modules) {
if (chunk.containsModule(module)) return true;
if (chunk.containsModule(module)) continue outer;
}
return false;
});
usedChunks.delete(chunk);
}

if (validChunks.length < usedChunks.length) {
if (validChunks.length >= item.cacheGroup.minChunks) {
// Were some (invalid) chunks removed from usedChunks?
// => readd all modules to the queue, as things could have been changed
if (usedChunks.size < selectedChunks.length) {
if (usedChunks.size >= item.cacheGroup.minChunks) {
const chunksArr = Array.from(usedChunks);
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
validChunks,
getKey(validChunks),
item.cacheGroupIndex,
chunksArr,
getKey(usedChunks),
module
);
}
Expand Down Expand Up @@ -819,28 +853,24 @@ module.exports = class SplitChunksPlugin {

// remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) {
if (isOverlap(info.chunks, item.chunks)) {
if (info.validateSize) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (isOverlap(info.chunks, usedChunks)) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size !== oldSize) {
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
continue;
}
if (info.modules.size !== oldSize) {
info.size = getModulesSize(info.modules);
if (info.size < info.cacheGroup.minSize) {
chunksInfoMap.delete(key);
}
}
} else {
// only update the modules
for (const module of item.modules) {
info.modules.delete(module);
info.size = getModulesSize(info.modules);
if (
info.cacheGroup._validateSize &&
info.size < info.cacheGroup.minSize
) {
chunksInfoMap.delete(key);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
Expand Down
8 changes: 8 additions & 0 deletions schemas/WebpackOptions.json
Expand Up @@ -612,6 +612,10 @@
"description": "Ignore minimum size, minimum chunks and maximum requests and always create chunks for this cache group",
"type": "boolean"
},
"enforceSizeThreshold": {
"description": "Size threshold at which splitting is enforced and other restrictions (maxAsyncRequests, maxInitialRequests) are ignored.",
"type": "number"
},
"filename": {
"description": "Sets the template for the filename for created chunks (Only works for initial chunks)",
"type": "string",
Expand Down Expand Up @@ -722,6 +726,10 @@
}
]
},
"enforceSizeThreshold": {
"description": "Size threshold at which splitting is enforced and other restrictions (maxAsyncRequests, maxInitialRequests) are ignored.",
"type": "number"
},
"fallbackCacheGroup": {
"description": "Options for modules not selected by any other cache group",
"type": "object",
Expand Down
2 changes: 1 addition & 1 deletion test/Validation.test.js
Expand Up @@ -429,7 +429,7 @@ describe("Validation", () => {
test: ...
}
}
object { <key>: false | function | string | RegExp | object { automaticNameDelimiter?, automaticNameMaxLength?, automaticNamePrefix?, chunks?, enforce?, filename?, maxAsyncRequests?, maxInitialRequests?, maxSize?, minChunks?, minSize?, name?, priority?, reuseExistingChunk?, test? } }
object { <key>: false | function | string | RegExp | object { automaticNameDelimiter?, automaticNameMaxLength?, automaticNamePrefix?, chunks?, enforce?, enforceSizeThreshold?, filename?, maxAsyncRequests?, maxInitialRequests?, maxSize?, minChunks?, minSize?, name?, priority?, reuseExistingChunk?, test? } }
-> Assign modules to a cache group (modules from different cache groups are tried to keep in separate chunks)"
`)
);
Expand Down

0 comments on commit a41994a

Please sign in to comment.