[79a0317] | 1 | /*
|
---|
| 2 | MIT License http://www.opensource.org/licenses/mit-license.php
|
---|
| 3 | Author Tobias Koppers @sokra
|
---|
| 4 | */
|
---|
| 5 |
|
---|
| 6 | "use strict";
|
---|
| 7 |
|
---|
| 8 | const { STAGE_BASIC } = require("../OptimizationStages");
|
---|
| 9 | const createSchemaValidation = require("../util/create-schema-validation");
|
---|
| 10 | const { runtimeEqual } = require("../util/runtime");
|
---|
| 11 |
|
---|
| 12 | /** @typedef {import("../../declarations/plugins/optimize/MergeDuplicateChunksPlugin").MergeDuplicateChunksPluginOptions} MergeDuplicateChunksPluginOptions */
|
---|
| 13 | /** @typedef {import("../Compiler")} Compiler */
|
---|
| 14 |
|
---|
| 15 | const validate = createSchemaValidation(
|
---|
| 16 | require("../../schemas/plugins/optimize/MergeDuplicateChunksPlugin.check.js"),
|
---|
| 17 | () =>
|
---|
| 18 | require("../../schemas/plugins/optimize/MergeDuplicateChunksPlugin.json"),
|
---|
| 19 | {
|
---|
| 20 | name: "Merge Duplicate Chunks Plugin",
|
---|
| 21 | baseDataPath: "options"
|
---|
| 22 | }
|
---|
| 23 | );
|
---|
| 24 |
|
---|
| 25 | class MergeDuplicateChunksPlugin {
|
---|
| 26 | /**
|
---|
| 27 | * @param {MergeDuplicateChunksPluginOptions} options options object
|
---|
| 28 | */
|
---|
| 29 | constructor(options = { stage: STAGE_BASIC }) {
|
---|
| 30 | validate(options);
|
---|
| 31 | this.options = options;
|
---|
| 32 | }
|
---|
| 33 |
|
---|
| 34 | /**
|
---|
| 35 | * @param {Compiler} compiler the compiler
|
---|
| 36 | * @returns {void}
|
---|
| 37 | */
|
---|
| 38 | apply(compiler) {
|
---|
| 39 | compiler.hooks.compilation.tap(
|
---|
| 40 | "MergeDuplicateChunksPlugin",
|
---|
| 41 | compilation => {
|
---|
| 42 | compilation.hooks.optimizeChunks.tap(
|
---|
| 43 | {
|
---|
| 44 | name: "MergeDuplicateChunksPlugin",
|
---|
| 45 | stage: this.options.stage
|
---|
| 46 | },
|
---|
| 47 | chunks => {
|
---|
| 48 | const { chunkGraph, moduleGraph } = compilation;
|
---|
| 49 |
|
---|
| 50 | // remember already tested chunks for performance
|
---|
| 51 | const notDuplicates = new Set();
|
---|
| 52 |
|
---|
| 53 | // for each chunk
|
---|
| 54 | for (const chunk of chunks) {
|
---|
| 55 | // track a Set of all chunk that could be duplicates
|
---|
| 56 | let possibleDuplicates;
|
---|
| 57 | for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
---|
| 58 | if (possibleDuplicates === undefined) {
|
---|
| 59 | // when possibleDuplicates is not yet set,
|
---|
| 60 | // create a new Set from chunks of the current module
|
---|
| 61 | // including only chunks with the same number of modules
|
---|
| 62 | for (const dup of chunkGraph.getModuleChunksIterable(
|
---|
| 63 | module
|
---|
| 64 | )) {
|
---|
| 65 | if (
|
---|
| 66 | dup !== chunk &&
|
---|
| 67 | chunkGraph.getNumberOfChunkModules(chunk) ===
|
---|
| 68 | chunkGraph.getNumberOfChunkModules(dup) &&
|
---|
| 69 | !notDuplicates.has(dup)
|
---|
| 70 | ) {
|
---|
| 71 | // delay allocating the new Set until here, reduce memory pressure
|
---|
| 72 | if (possibleDuplicates === undefined) {
|
---|
| 73 | possibleDuplicates = new Set();
|
---|
| 74 | }
|
---|
| 75 | possibleDuplicates.add(dup);
|
---|
| 76 | }
|
---|
| 77 | }
|
---|
| 78 | // when no chunk is possible we can break here
|
---|
| 79 | if (possibleDuplicates === undefined) break;
|
---|
| 80 | } else {
|
---|
| 81 | // validate existing possible duplicates
|
---|
| 82 | for (const dup of possibleDuplicates) {
|
---|
| 83 | // remove possible duplicate when module is not contained
|
---|
| 84 | if (!chunkGraph.isModuleInChunk(module, dup)) {
|
---|
| 85 | possibleDuplicates.delete(dup);
|
---|
| 86 | }
|
---|
| 87 | }
|
---|
| 88 | // when all chunks has been removed we can break here
|
---|
| 89 | if (possibleDuplicates.size === 0) break;
|
---|
| 90 | }
|
---|
| 91 | }
|
---|
| 92 |
|
---|
| 93 | // when we found duplicates
|
---|
| 94 | if (
|
---|
| 95 | possibleDuplicates !== undefined &&
|
---|
| 96 | possibleDuplicates.size > 0
|
---|
| 97 | ) {
|
---|
| 98 | outer: for (const otherChunk of possibleDuplicates) {
|
---|
| 99 | if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
|
---|
| 100 | if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;
|
---|
| 101 | if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0)
|
---|
| 102 | continue;
|
---|
| 103 | if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) {
|
---|
| 104 | for (const module of chunkGraph.getChunkModulesIterable(
|
---|
| 105 | chunk
|
---|
| 106 | )) {
|
---|
| 107 | const exportsInfo = moduleGraph.getExportsInfo(module);
|
---|
| 108 | if (
|
---|
| 109 | !exportsInfo.isEquallyUsed(
|
---|
| 110 | chunk.runtime,
|
---|
| 111 | otherChunk.runtime
|
---|
| 112 | )
|
---|
| 113 | ) {
|
---|
| 114 | continue outer;
|
---|
| 115 | }
|
---|
| 116 | }
|
---|
| 117 | }
|
---|
| 118 | // merge them
|
---|
| 119 | if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) {
|
---|
| 120 | chunkGraph.integrateChunks(chunk, otherChunk);
|
---|
| 121 | compilation.chunks.delete(otherChunk);
|
---|
| 122 | }
|
---|
| 123 | }
|
---|
| 124 | }
|
---|
| 125 |
|
---|
| 126 | // don't check already processed chunks twice
|
---|
| 127 | notDuplicates.add(chunk);
|
---|
| 128 | }
|
---|
| 129 | }
|
---|
| 130 | );
|
---|
| 131 | }
|
---|
| 132 | );
|
---|
| 133 | }
|
---|
| 134 | }
|
---|
| 135 | module.exports = MergeDuplicateChunksPlugin;
|
---|