Skip to content

Commit

Permalink
Improve performance of LimitChunkCountPlugin a lot
Browse files Browse the repository at this point in the history
  • Loading branch information
sokra committed Sep 24, 2019
1 parent adfa484 commit 6f1c685
Show file tree
Hide file tree
Showing 3 changed files with 429 additions and 37 deletions.
7 changes: 6 additions & 1 deletion lib/Chunk.js
Expand Up @@ -385,7 +385,7 @@ class Chunk {
/**
*
* @param {Chunk} otherChunk the chunk to integrate with
* @param {ModuleReason} reason reason why the module is being integrated
* @param {string} reason reason why the module is being integrated
* @returns {boolean} returns true or false if integration succeeds or fails
*/
integrate(otherChunk, reason) {
Expand Down Expand Up @@ -462,6 +462,11 @@ class Chunk {
return false;
}

/**
* @param {Chunk} a chunk
* @param {Chunk} b chunk
* @returns {boolean} true, if a is always available when b is reached
*/
const isAvailable = (a, b) => {
const queue = new Set(b.groupsIterable);
for (const chunkGroup of queue) {
Expand Down
224 changes: 188 additions & 36 deletions lib/optimize/LimitChunkCountPlugin.js
Expand Up @@ -6,8 +6,33 @@

const validateOptions = require("schema-utils");
const schema = require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json");
const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");

/** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
/** @typedef {import("../Chunk")} Chunk */
/** @typedef {import("../Compiler")} Compiler */

/**
* @typedef {Object} ChunkCombination
* @property {boolean} deleted this is set to true when combination was removed
* @property {number} sizeDiff
* @property {number} integratedSize
* @property {Chunk} a
* @property {Chunk} b
* @property {number} aIdx
* @property {number} bIdx
* @property {number} aSize
* @property {number} bSize
*/

const addToSetMap = (map, key, value) => {
const set = map.get(key);
if (set === undefined) {
map.set(key, new Set([value]));
} else {
set.add(value);
}
};

class LimitChunkCountPlugin {
/**
Expand All @@ -19,6 +44,11 @@ class LimitChunkCountPlugin {
validateOptions(schema, options, "Limit Chunk Count Plugin");
this.options = options;
}

/**
* @param {Compiler} compiler the webpack compiler
* @returns {void}
*/
apply(compiler) {
const options = this.options;
compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
Expand All @@ -30,47 +60,169 @@ class LimitChunkCountPlugin {
if (maxChunks < 1) return;
if (chunks.length <= maxChunks) return;

let remainingChunksToMerge = chunks.length - maxChunks;

// order chunks in a deterministic way
const orderedChunks = chunks.slice().sort((a, b) => a.compareTo(b));

const sortedExtendedPairCombinations = orderedChunks
.reduce((combinations, a, idx) => {
// create combination pairs
for (let i = 0; i < idx; i++) {
const b = orderedChunks[i];
combinations.push([b, a]);
}
return combinations;
}, [])
.map(pair => {
// extend combination pairs with size and integrated size
const a = pair[0].size(options);
const b = pair[1].size(options);
const ab = pair[0].integratedSize(pair[1], options);
return [a + b - ab, ab, pair[0], pair[1], a, b];
})
.filter(extendedPair => {
// create a lazy sorted data structure to keep all combinations
// this is large. Size = chunks * (chunks - 1) / 2
// It uses a multi layer bucket sort plus normal sort in the last layer
// It's also lazy so only accessed buckets are sorted
const combinations = new LazyBucketSortedSet(
// Layer 1: ordered by largest size benefit
c => c.sizeDiff,
(a, b) => b - a,
// Layer 2: ordered by smallest combined size
c => c.integratedSize,
(a, b) => a - b,
// Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
c => c.bIdx - c.aIdx,
(a, b) => a - b,
// Layer 4: ordered by position in orderedChunk (-> to be deterministic)
(a, b) => a.bIdx - b.bIdx
);

// we keep a mappng from chunk to all combinations
// but this mapping is not kept up-to-date with deletions
// so `deleted` flag need to be considered when iterating this
/** @type {Map<Chunk, Set<ChunkCombination>>} */
const combinationsByChunk = new Map();

orderedChunks.forEach((b, bIdx) => {
// create combination pairs with size and integrated size
for (let aIdx = 0; aIdx < bIdx; aIdx++) {
const a = orderedChunks[aIdx];
const integratedSize = a.integratedSize(b, options);

// filter pairs that do not have an integratedSize
// meaning they can NOT be integrated!
return extendedPair[1] !== false;
})
.sort((a, b) => {
// sadly javascript does an inplace sort here
// sort them by size
const diff1 = b[0] - a[0];
if (diff1 !== 0) return diff1;
const diff2 = a[1] - b[1];
if (diff2 !== 0) return diff2;
const diff3 = a[2].compareTo(b[2]);
if (diff3 !== 0) return diff3;
return a[3].compareTo(b[3]);
});

const pair = sortedExtendedPairCombinations[0];

if (pair && pair[2].integrate(pair[3], "limit")) {
chunks.splice(chunks.indexOf(pair[3]), 1);
return true;
if (integratedSize === false) continue;

const aSize = a.size(options);
const bSize = b.size(options);
const c = {
deleted: false,
sizeDiff: aSize + bSize - integratedSize,
integratedSize,
a,
b,
aIdx,
bIdx,
aSize,
bSize
};
combinations.add(c);
addToSetMap(combinationsByChunk, a, c);
addToSetMap(combinationsByChunk, b, c);
}
return combinations;
});

// list of modified chunks during this run
// combinations affected by this change are skipped to allow
// futher optimizations
/** @type {Set<Chunk>} */
const modifiedChunks = new Set();

let changed = false;
// eslint-disable-next-line no-constant-condition
loop: while (true) {
const combination = combinations.popFirst();
if (combination === undefined) break;

combination.deleted = true;
const { a, b, integratedSize } = combination;

// skip over pair when
// one of the already merged chunks is a parent of one of the chunks
if (modifiedChunks.size > 0) {
const queue = new Set(a.groupsIterable);
for (const group of b.groupsIterable) {
queue.add(group);
}
for (const group of queue) {
for (const mChunk of modifiedChunks) {
if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
// This is a potential pair which needs recalculation
// We can't do that now, but it merge before following pairs
// so we leave space for it, and consider chunks as modified
// just for the worse case
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break loop;
modifiedChunks.add(a);
modifiedChunks.add(b);
continue loop;
}
}
for (const parent of group.parentsIterable) {
queue.add(parent);
}
}
}

// merge the chunks
if (a.integrate(b, "limit")) {
chunks.splice(chunks.indexOf(b), 1);

// flag chunk a as modified as further optimization are possible for all children here
modifiedChunks.add(a);

changed = true;
remainingChunksToMerge--;
if (remainingChunksToMerge <= 0) break;

// Update all affected combinations
// delete all combination with the removed chunk
// we will use combinations with the kept chunk instead
for (const combination of combinationsByChunk.get(b)) {
if (combination.deleted) continue;
combination.deleted = true;
combinations.delete(combination);
}

// Update combinations with the kept chunk with new sizes
for (const combination of combinationsByChunk.get(a)) {
if (combination.deleted) continue;
if (combination.a === a) {
// Update size
const newIntegratedSize = a.integratedSize(
combination.b,
options
);
if (newIntegratedSize === false) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
const finishUpdate = combinations.startUpdate(combination);
combination.integratedSize = newIntegratedSize;
combination.aSize = integratedSize;
combination.sizeDiff =
combination.bSize + integratedSize - newIntegratedSize;
finishUpdate();
} else if (combination.b === a) {
// Update size
const newIntegratedSize = combination.a.integratedSize(
a,
options
);
if (newIntegratedSize === false) {
combination.deleted = true;
combinations.delete(combination);
continue;
}
const finishUpdate = combinations.startUpdate(combination);
combination.integratedSize = newIntegratedSize;
combination.bSize = integratedSize;
combination.sizeDiff =
integratedSize + combination.aSize - newIntegratedSize;
finishUpdate();
}
}
}
}
if (changed) return true;
}
);
});
Expand Down

0 comments on commit 6f1c685

Please sign in to comment.