Skip to content

Commit

Permalink
enforce doesn't affect minSize for maxSize
Browse files Browse the repository at this point in the history
enforce now works as documented
emit warning for minSize > maxSize configuration
performance improvements
bugfix when multiple cacheGroups have the same name

fixes #8407
  • Loading branch information
sokra committed Dec 3, 2018
1 parent 72a8a1f commit 3b46b48
Show file tree
Hide file tree
Showing 4 changed files with 262 additions and 82 deletions.
29 changes: 29 additions & 0 deletions lib/optimize/MinMaxSizeWarning.js
@@ -0,0 +1,29 @@
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";

const WebpackError = require("../WebpackError");
const SizeFormatHelpers = require("../SizeFormatHelpers");

class MinMaxSizeWarning extends WebpackError {
constructor(keys, minSize, maxSize) {
let keysMessage = "Fallback cache group";
if (keys) {
keysMessage =
keys.length > 1
? `Cache groups ${keys.sort().join(", ")}`
: `Cache group ${keys[0]}`;
}
super(
`SplitChunksPlugin\n` +
`${keysMessage}\n` +
`Configured minSize (${SizeFormatHelpers.formatSize(minSize)}) is ` +
`bigger than maxSize (${SizeFormatHelpers.formatSize(maxSize)}).\n` +
"This seem to be a invalid optimiziation.splitChunks configuration."
);
}
}

module.exports = MinMaxSizeWarning;
201 changes: 122 additions & 79 deletions lib/optimize/SplitChunksPlugin.js
Expand Up @@ -9,6 +9,7 @@ const SortableSet = require("../util/SortableSet");
const GraphHelpers = require("../GraphHelpers");
const { isSubset } = require("../util/SetHelpers");
const deterministicGrouping = require("../util/deterministicGrouping");
const MinMaxSizeWarning = require("./MinMaxSizeWarning");
const contextify = require("../util/identifier").contextify;

/** @typedef {import("../Compiler")} Compiler */
Expand Down Expand Up @@ -96,6 +97,8 @@ const compareEntries = (a, b) => {
}
};

const compareNumbers = (a, b) => a - b;

const INITIAL_CHUNK_FILTER = chunk => chunk.canBeInitial();
const ASYNC_CHUNK_FILTER = chunk => !chunk.canBeInitial();
const ALL_CHUNK_FILTER = chunk => true;
Expand Down Expand Up @@ -340,7 +343,7 @@ module.exports = class SplitChunksPlugin {
}
const getKey = chunks => {
return Array.from(chunks, c => indexMap.get(c))
.sort()
.sort(compareNumbers)
.join();
};
/** @type {Map<string, Set<Chunk>>} */
Expand Down Expand Up @@ -436,6 +439,7 @@ module.exports = class SplitChunksPlugin {
* @property {SortableSet} modules
* @property {TODO} cacheGroup
* @property {string} name
* @property {boolean} validateSize
* @property {number} size
* @property {Set<Chunk>} chunks
* @property {Set<Chunk>} reuseableChunks
Expand All @@ -461,7 +465,11 @@ module.exports = class SplitChunksPlugin {
module
) => {
// Break if minimum number of chunks is not reached
if (selectedChunks.length < cacheGroup.minChunks) return;
if (
!cacheGroup.enforce &&
selectedChunks.length < cacheGroup.minChunks
)
return;
// Determine name for split chunk
const name = cacheGroup.getName(
module,
Expand All @@ -473,8 +481,8 @@ module.exports = class SplitChunksPlugin {
// Elsewise we create the key from chunks and cache group key
// This automatically merges equal names
const key =
(name && `name:${name}`) ||
`chunks:${selectedChunksKey} key:${cacheGroup.key}`;
cacheGroup.key +
(name ? ` name:${name}` : ` chunks:${selectedChunksKey}`);
// Add module to maps
let info = chunksInfoMap.get(key);
if (info === undefined) {
Expand All @@ -484,21 +492,18 @@ module.exports = class SplitChunksPlugin {
modules: new SortableSet(undefined, sortByIdentifier),
cacheGroup,
name,
validateSize: !cacheGroup.enforce && cacheGroup.minSize > 0,
size: 0,
chunks: new Set(),
reuseableChunks: new Set(),
chunksKeys: new Set()
})
);
} else {
if (info.cacheGroup !== cacheGroup) {
if (info.cacheGroup.priority < cacheGroup.priority) {
info.cacheGroup = cacheGroup;
}
}
}
info.modules.add(module);
info.size += module.size();
if (info.validateSize) {
info.size += module.size();
}
if (!info.chunksKeys.has(selectedChunksKey)) {
info.chunksKeys.add(selectedChunksKey);
for (const chunk of selectedChunks) {
Expand Down Expand Up @@ -529,36 +534,27 @@ module.exports = class SplitChunksPlugin {
priority: cacheGroupSource.priority || 0,
chunksFilter:
cacheGroupSource.chunksFilter || this.options.chunksFilter,
enforce: cacheGroupSource.enforce,
minSize:
cacheGroupSource.minSize !== undefined
? cacheGroupSource.minSize
: cacheGroupSource.enforce
? 0
: this.options.minSize,
: this.options.minSize,
maxSize:
cacheGroupSource.maxSize !== undefined
? cacheGroupSource.maxSize
: cacheGroupSource.enforce
? 0
: this.options.maxSize,
: this.options.maxSize,
minChunks:
cacheGroupSource.minChunks !== undefined
? cacheGroupSource.minChunks
: cacheGroupSource.enforce
? 1
: this.options.minChunks,
: this.options.minChunks,
maxAsyncRequests:
cacheGroupSource.maxAsyncRequests !== undefined
? cacheGroupSource.maxAsyncRequests
: cacheGroupSource.enforce
? Infinity
: this.options.maxAsyncRequests,
: this.options.maxAsyncRequests,
maxInitialRequests:
cacheGroupSource.maxInitialRequests !== undefined
? cacheGroupSource.maxInitialRequests
: cacheGroupSource.enforce
? Infinity
: this.options.maxInitialRequests,
: this.options.maxInitialRequests,
getName:
cacheGroupSource.getName !== undefined
? cacheGroupSource.getName
Expand All @@ -576,7 +572,11 @@ module.exports = class SplitChunksPlugin {
// For all combination of chunk selection
for (const chunkCombination of combs) {
// Break if minimum number of chunks is not reached
if (chunkCombination.size < cacheGroup.minChunks) continue;
if (
!cacheGroup.enforce &&
chunkCombination.size < cacheGroup.minChunks
)
continue;
// Select chunks by configuration
const {
chunks: selectedChunks,
Expand All @@ -586,17 +586,27 @@ module.exports = class SplitChunksPlugin {
cacheGroup.chunksFilter
);

addModuleToChunksInfoMap(
cacheGroup,
selectedChunks,
selectedChunksKey,
module
);
if (selectedChunks.length > 0) {
addModuleToChunksInfoMap(
cacheGroup,
selectedChunks,
selectedChunksKey,
module
);
}
}
}
}

/** @type {Map<Chunk, {minSize: number, maxSize: number, automaticNameDelimiter: string}>} */
// Filter items were size < minSize
for (const pair of chunksInfoMap) {
const info = pair[1];
if (info.validateSize && info.size < info.cacheGroup.minSize) {
chunksInfoMap.delete(pair[0]);
}
}

/** @type {Map<Chunk, {minSize: number, maxSize: number, automaticNameDelimiter: string, keys: string[]}>} */
const maxSizeQueueMap = new Map();

while (chunksInfoMap.size > 0) {
Expand All @@ -606,20 +616,15 @@ module.exports = class SplitChunksPlugin {
for (const pair of chunksInfoMap) {
const key = pair[0];
const info = pair[1];
if (info.size >= info.cacheGroup.minSize) {
if (bestEntry === undefined) {
bestEntry = info;
bestEntryKey = key;
} else if (compareEntries(bestEntry, info) < 0) {
bestEntry = info;
bestEntryKey = key;
}
if (bestEntry === undefined) {
bestEntry = info;
bestEntryKey = key;
} else if (compareEntries(bestEntry, info) < 0) {
bestEntry = info;
bestEntryKey = key;
}
}

// No suitable item left
if (bestEntry === undefined) break;

const item = bestEntry;
chunksInfoMap.delete(bestEntryKey);

Expand Down Expand Up @@ -666,29 +671,41 @@ module.exports = class SplitChunksPlugin {
// Skip when no chunk selected
if (usedChunks.length === 0) continue;

const chunkInLimit = usedChunks.filter(chunk => {
// respect max requests when not enforced
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
: chunk.canBeInitial()
? Math.min(
item.cacheGroup.maxInitialRequests,
item.cacheGroup.maxAsyncRequests
)
: item.cacheGroup.maxAsyncRequests;
return !isFinite(maxRequests) || getRequests(chunk) < maxRequests;
});

if (chunkInLimit.length < usedChunks.length) {
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
chunkInLimit,
getKey(chunkInLimit),
module
if (
!item.cacheGroup.enforce &&
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
Number.isFinite(item.cacheGroup.maxAsyncRequests))
) {
const chunkInLimit = usedChunks.filter(chunk => {
// respect max requests when not enforced
const maxRequests = chunk.isOnlyInitial()
? item.cacheGroup.maxInitialRequests
: chunk.canBeInitial()
? Math.min(
item.cacheGroup.maxInitialRequests,
item.cacheGroup.maxAsyncRequests
)
: item.cacheGroup.maxAsyncRequests;
return (
!isFinite(maxRequests) || getRequests(chunk) < maxRequests
);
});

if (chunkInLimit.length < usedChunks.length) {
// We do not need to check enforce here as it was
// already checked above.
if (chunkInLimit.length >= item.cacheGroup.minChunks) {
for (const module of item.modules) {
addModuleToChunksInfoMap(
item.cacheGroup,
chunkInLimit,
getKey(chunkInLimit),
module
);
}
}
continue;
}
continue;
}

// Create the new chunk if not reusing one
Expand Down Expand Up @@ -764,36 +781,62 @@ module.exports = class SplitChunksPlugin {
oldMaxSizeSettings ? oldMaxSizeSettings.maxSize : Infinity,
item.cacheGroup.maxSize
),
automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter
automaticNameDelimiter: item.cacheGroup.automaticNameDelimiter,
keys: oldMaxSizeSettings
? oldMaxSizeSettings.keys.concat(item.cacheGroup.key)
: [item.cacheGroup.key]
});
}

// remove all modules from other entries and update size
for (const [key, info] of chunksInfoMap) {
if (isOverlap(info.chunks, item.chunks)) {
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
continue;
}
if (info.modules.size !== oldSize) {
info.size = getModulesSize(info.modules);
if (info.size < info.cacheGroup.minSize) {
if (info.validateSize) {
// update modules and total size
// may remove it from the map when < minSize
const oldSize = info.modules.size;
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
continue;
}
if (info.modules.size !== oldSize) {
info.size = getModulesSize(info.modules);
if (info.size < info.cacheGroup.minSize) {
chunksInfoMap.delete(key);
}
}
} else {
// only update the modules
for (const module of item.modules) {
info.modules.delete(module);
}
if (info.modules.size === 0) {
chunksInfoMap.delete(key);
}
}
}
}
}

const incorrectMinMaxSizeSet = new Set();

// Make sure that maxSize is fulfilled
for (const chunk of compilation.chunks.slice()) {
const { minSize, maxSize, automaticNameDelimiter } =
const { minSize, maxSize, automaticNameDelimiter, keys } =
maxSizeQueueMap.get(chunk) || this.options.fallbackCacheGroup;
if (!maxSize) continue;
if (minSize > maxSize) {
const warningKey = `${keys && keys.join()} ${minSize} ${maxSize}`;
if (!incorrectMinMaxSizeSet.has(warningKey)) {
incorrectMinMaxSizeSet.add(warningKey);
compilation.warnings.push(
new MinMaxSizeWarning(keys, minSize, maxSize)
);
}
}
const results = deterministicGroupingForModules({
maxSize: Math.max(minSize, maxSize),
minSize,
Expand Down

0 comments on commit 3b46b48

Please sign in to comment.