1 | /*
|
---|
2 | MIT License http://www.opensource.org/licenses/mit-license.php
|
---|
3 | Author Tobias Koppers @sokra
|
---|
4 | */
|
---|
5 |
|
---|
6 | "use strict";
|
---|
7 |
|
---|
8 | const { STAGE_ADVANCED } = require("../OptimizationStages");
|
---|
9 | const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
|
---|
10 | const { compareChunks } = require("../util/comparators");
|
---|
11 | const createSchemaValidation = require("../util/create-schema-validation");
|
---|
12 |
|
---|
13 | /** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
|
---|
14 | /** @typedef {import("../Chunk")} Chunk */
|
---|
15 | /** @typedef {import("../Compiler")} Compiler */
|
---|
16 |
|
---|
17 | const validate = createSchemaValidation(
|
---|
18 | require("../../schemas/plugins/optimize/LimitChunkCountPlugin.check.js"),
|
---|
19 | () => require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json"),
|
---|
20 | {
|
---|
21 | name: "Limit Chunk Count Plugin",
|
---|
22 | baseDataPath: "options"
|
---|
23 | }
|
---|
24 | );
|
---|
25 |
|
---|
26 | /**
|
---|
27 | * @typedef {Object} ChunkCombination
|
---|
28 | * @property {boolean} deleted this is set to true when combination was removed
|
---|
29 | * @property {number} sizeDiff
|
---|
30 | * @property {number} integratedSize
|
---|
31 | * @property {Chunk} a
|
---|
32 | * @property {Chunk} b
|
---|
33 | * @property {number} aIdx
|
---|
34 | * @property {number} bIdx
|
---|
35 | * @property {number} aSize
|
---|
36 | * @property {number} bSize
|
---|
37 | */
|
---|
38 |
|
---|
39 | const addToSetMap = (map, key, value) => {
|
---|
40 | const set = map.get(key);
|
---|
41 | if (set === undefined) {
|
---|
42 | map.set(key, new Set([value]));
|
---|
43 | } else {
|
---|
44 | set.add(value);
|
---|
45 | }
|
---|
46 | };
|
---|
47 |
|
---|
48 | class LimitChunkCountPlugin {
|
---|
49 | /**
|
---|
50 | * @param {LimitChunkCountPluginOptions=} options options object
|
---|
51 | */
|
---|
52 | constructor(options) {
|
---|
53 | validate(options);
|
---|
54 | this.options = options;
|
---|
55 | }
|
---|
56 |
|
---|
57 | /**
|
---|
58 | * @param {Compiler} compiler the webpack compiler
|
---|
59 | * @returns {void}
|
---|
60 | */
|
---|
61 | apply(compiler) {
|
---|
62 | const options = this.options;
|
---|
63 | compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
|
---|
64 | compilation.hooks.optimizeChunks.tap(
|
---|
65 | {
|
---|
66 | name: "LimitChunkCountPlugin",
|
---|
67 | stage: STAGE_ADVANCED
|
---|
68 | },
|
---|
69 | chunks => {
|
---|
70 | const chunkGraph = compilation.chunkGraph;
|
---|
71 | const maxChunks = options.maxChunks;
|
---|
72 | if (!maxChunks) return;
|
---|
73 | if (maxChunks < 1) return;
|
---|
74 | if (compilation.chunks.size <= maxChunks) return;
|
---|
75 |
|
---|
76 | let remainingChunksToMerge = compilation.chunks.size - maxChunks;
|
---|
77 |
|
---|
78 | // order chunks in a deterministic way
|
---|
79 | const compareChunksWithGraph = compareChunks(chunkGraph);
|
---|
80 | const orderedChunks = Array.from(chunks).sort(compareChunksWithGraph);
|
---|
81 |
|
---|
82 | // create a lazy sorted data structure to keep all combinations
|
---|
83 | // this is large. Size = chunks * (chunks - 1) / 2
|
---|
84 | // It uses a multi layer bucket sort plus normal sort in the last layer
|
---|
85 | // It's also lazy so only accessed buckets are sorted
|
---|
86 | const combinations = new LazyBucketSortedSet(
|
---|
87 | // Layer 1: ordered by largest size benefit
|
---|
88 | c => c.sizeDiff,
|
---|
89 | (a, b) => b - a,
|
---|
90 | // Layer 2: ordered by smallest combined size
|
---|
91 | c => c.integratedSize,
|
---|
92 | (a, b) => a - b,
|
---|
93 | // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
|
---|
94 | c => c.bIdx - c.aIdx,
|
---|
95 | (a, b) => a - b,
|
---|
96 | // Layer 4: ordered by position in orderedChunk (-> to be deterministic)
|
---|
97 | (a, b) => a.bIdx - b.bIdx
|
---|
98 | );
|
---|
99 |
|
---|
100 | // we keep a mapping from chunk to all combinations
|
---|
101 | // but this mapping is not kept up-to-date with deletions
|
---|
102 | // so `deleted` flag need to be considered when iterating this
|
---|
103 | /** @type {Map<Chunk, Set<ChunkCombination>>} */
|
---|
104 | const combinationsByChunk = new Map();
|
---|
105 |
|
---|
106 | orderedChunks.forEach((b, bIdx) => {
|
---|
107 | // create combination pairs with size and integrated size
|
---|
108 | for (let aIdx = 0; aIdx < bIdx; aIdx++) {
|
---|
109 | const a = orderedChunks[aIdx];
|
---|
110 | // filter pairs that can not be integrated!
|
---|
111 | if (!chunkGraph.canChunksBeIntegrated(a, b)) continue;
|
---|
112 |
|
---|
113 | const integratedSize = chunkGraph.getIntegratedChunksSize(
|
---|
114 | a,
|
---|
115 | b,
|
---|
116 | options
|
---|
117 | );
|
---|
118 |
|
---|
119 | const aSize = chunkGraph.getChunkSize(a, options);
|
---|
120 | const bSize = chunkGraph.getChunkSize(b, options);
|
---|
121 | const c = {
|
---|
122 | deleted: false,
|
---|
123 | sizeDiff: aSize + bSize - integratedSize,
|
---|
124 | integratedSize,
|
---|
125 | a,
|
---|
126 | b,
|
---|
127 | aIdx,
|
---|
128 | bIdx,
|
---|
129 | aSize,
|
---|
130 | bSize
|
---|
131 | };
|
---|
132 | combinations.add(c);
|
---|
133 | addToSetMap(combinationsByChunk, a, c);
|
---|
134 | addToSetMap(combinationsByChunk, b, c);
|
---|
135 | }
|
---|
136 | return combinations;
|
---|
137 | });
|
---|
138 |
|
---|
139 | // list of modified chunks during this run
|
---|
140 | // combinations affected by this change are skipped to allow
|
---|
141 | // further optimizations
|
---|
142 | /** @type {Set<Chunk>} */
|
---|
143 | const modifiedChunks = new Set();
|
---|
144 |
|
---|
145 | let changed = false;
|
---|
146 | // eslint-disable-next-line no-constant-condition
|
---|
147 | loop: while (true) {
|
---|
148 | const combination = combinations.popFirst();
|
---|
149 | if (combination === undefined) break;
|
---|
150 |
|
---|
151 | combination.deleted = true;
|
---|
152 | const { a, b, integratedSize } = combination;
|
---|
153 |
|
---|
154 | // skip over pair when
|
---|
155 | // one of the already merged chunks is a parent of one of the chunks
|
---|
156 | if (modifiedChunks.size > 0) {
|
---|
157 | const queue = new Set(a.groupsIterable);
|
---|
158 | for (const group of b.groupsIterable) {
|
---|
159 | queue.add(group);
|
---|
160 | }
|
---|
161 | for (const group of queue) {
|
---|
162 | for (const mChunk of modifiedChunks) {
|
---|
163 | if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
|
---|
164 | // This is a potential pair which needs recalculation
|
---|
165 | // We can't do that now, but it merge before following pairs
|
---|
166 | // so we leave space for it, and consider chunks as modified
|
---|
167 | // just for the worse case
|
---|
168 | remainingChunksToMerge--;
|
---|
169 | if (remainingChunksToMerge <= 0) break loop;
|
---|
170 | modifiedChunks.add(a);
|
---|
171 | modifiedChunks.add(b);
|
---|
172 | continue loop;
|
---|
173 | }
|
---|
174 | }
|
---|
175 | for (const parent of group.parentsIterable) {
|
---|
176 | queue.add(parent);
|
---|
177 | }
|
---|
178 | }
|
---|
179 | }
|
---|
180 |
|
---|
181 | // merge the chunks
|
---|
182 | if (chunkGraph.canChunksBeIntegrated(a, b)) {
|
---|
183 | chunkGraph.integrateChunks(a, b);
|
---|
184 | compilation.chunks.delete(b);
|
---|
185 |
|
---|
186 | // flag chunk a as modified as further optimization are possible for all children here
|
---|
187 | modifiedChunks.add(a);
|
---|
188 |
|
---|
189 | changed = true;
|
---|
190 | remainingChunksToMerge--;
|
---|
191 | if (remainingChunksToMerge <= 0) break;
|
---|
192 |
|
---|
193 | // Update all affected combinations
|
---|
194 | // delete all combination with the removed chunk
|
---|
195 | // we will use combinations with the kept chunk instead
|
---|
196 | for (const combination of combinationsByChunk.get(a)) {
|
---|
197 | if (combination.deleted) continue;
|
---|
198 | combination.deleted = true;
|
---|
199 | combinations.delete(combination);
|
---|
200 | }
|
---|
201 |
|
---|
202 | // Update combinations with the kept chunk with new sizes
|
---|
203 | for (const combination of combinationsByChunk.get(b)) {
|
---|
204 | if (combination.deleted) continue;
|
---|
205 | if (combination.a === b) {
|
---|
206 | if (!chunkGraph.canChunksBeIntegrated(a, combination.b)) {
|
---|
207 | combination.deleted = true;
|
---|
208 | combinations.delete(combination);
|
---|
209 | continue;
|
---|
210 | }
|
---|
211 | // Update size
|
---|
212 | const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
|
---|
213 | a,
|
---|
214 | combination.b,
|
---|
215 | options
|
---|
216 | );
|
---|
217 | const finishUpdate = combinations.startUpdate(combination);
|
---|
218 | combination.a = a;
|
---|
219 | combination.integratedSize = newIntegratedSize;
|
---|
220 | combination.aSize = integratedSize;
|
---|
221 | combination.sizeDiff =
|
---|
222 | combination.bSize + integratedSize - newIntegratedSize;
|
---|
223 | finishUpdate();
|
---|
224 | } else if (combination.b === b) {
|
---|
225 | if (!chunkGraph.canChunksBeIntegrated(combination.a, a)) {
|
---|
226 | combination.deleted = true;
|
---|
227 | combinations.delete(combination);
|
---|
228 | continue;
|
---|
229 | }
|
---|
230 | // Update size
|
---|
231 | const newIntegratedSize = chunkGraph.getIntegratedChunksSize(
|
---|
232 | combination.a,
|
---|
233 | a,
|
---|
234 | options
|
---|
235 | );
|
---|
236 |
|
---|
237 | const finishUpdate = combinations.startUpdate(combination);
|
---|
238 | combination.b = a;
|
---|
239 | combination.integratedSize = newIntegratedSize;
|
---|
240 | combination.bSize = integratedSize;
|
---|
241 | combination.sizeDiff =
|
---|
242 | integratedSize + combination.aSize - newIntegratedSize;
|
---|
243 | finishUpdate();
|
---|
244 | }
|
---|
245 | }
|
---|
246 | combinationsByChunk.set(a, combinationsByChunk.get(b));
|
---|
247 | combinationsByChunk.delete(b);
|
---|
248 | }
|
---|
249 | }
|
---|
250 | if (changed) return true;
|
---|
251 | }
|
---|
252 | );
|
---|
253 | });
|
---|
254 | }
|
---|
255 | }
|
---|
256 | module.exports = LimitChunkCountPlugin;
|
---|