[6a3a178] | 1 | /*
|
---|
| 2 | MIT License http://www.opensource.org/licenses/mit-license.php
|
---|
| 3 | Author Tobias Koppers @sokra
|
---|
| 4 | */
|
---|
| 5 |
|
---|
| 6 | "use strict";
|
---|
| 7 |
|
---|
| 8 | const { STAGE_BASIC } = require("../OptimizationStages");
|
---|
| 9 | const { runtimeEqual } = require("../util/runtime");
|
---|
| 10 |
|
---|
| 11 | /** @typedef {import("../Compiler")} Compiler */
|
---|
| 12 |
|
---|
| 13 | class MergeDuplicateChunksPlugin {
|
---|
| 14 | /**
|
---|
| 15 | * @param {Compiler} compiler the compiler
|
---|
| 16 | * @returns {void}
|
---|
| 17 | */
|
---|
| 18 | apply(compiler) {
|
---|
| 19 | compiler.hooks.compilation.tap(
|
---|
| 20 | "MergeDuplicateChunksPlugin",
|
---|
| 21 | compilation => {
|
---|
| 22 | compilation.hooks.optimizeChunks.tap(
|
---|
| 23 | {
|
---|
| 24 | name: "MergeDuplicateChunksPlugin",
|
---|
| 25 | stage: STAGE_BASIC
|
---|
| 26 | },
|
---|
| 27 | chunks => {
|
---|
| 28 | const { chunkGraph, moduleGraph } = compilation;
|
---|
| 29 |
|
---|
| 30 | // remember already tested chunks for performance
|
---|
| 31 | const notDuplicates = new Set();
|
---|
| 32 |
|
---|
| 33 | // for each chunk
|
---|
| 34 | for (const chunk of chunks) {
|
---|
| 35 | // track a Set of all chunk that could be duplicates
|
---|
| 36 | let possibleDuplicates;
|
---|
| 37 | for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
---|
| 38 | if (possibleDuplicates === undefined) {
|
---|
| 39 | // when possibleDuplicates is not yet set,
|
---|
| 40 | // create a new Set from chunks of the current module
|
---|
| 41 | // including only chunks with the same number of modules
|
---|
| 42 | for (const dup of chunkGraph.getModuleChunksIterable(
|
---|
| 43 | module
|
---|
| 44 | )) {
|
---|
| 45 | if (
|
---|
| 46 | dup !== chunk &&
|
---|
| 47 | chunkGraph.getNumberOfChunkModules(chunk) ===
|
---|
| 48 | chunkGraph.getNumberOfChunkModules(dup) &&
|
---|
| 49 | !notDuplicates.has(dup)
|
---|
| 50 | ) {
|
---|
| 51 | // delay allocating the new Set until here, reduce memory pressure
|
---|
| 52 | if (possibleDuplicates === undefined) {
|
---|
| 53 | possibleDuplicates = new Set();
|
---|
| 54 | }
|
---|
| 55 | possibleDuplicates.add(dup);
|
---|
| 56 | }
|
---|
| 57 | }
|
---|
| 58 | // when no chunk is possible we can break here
|
---|
| 59 | if (possibleDuplicates === undefined) break;
|
---|
| 60 | } else {
|
---|
| 61 | // validate existing possible duplicates
|
---|
| 62 | for (const dup of possibleDuplicates) {
|
---|
| 63 | // remove possible duplicate when module is not contained
|
---|
| 64 | if (!chunkGraph.isModuleInChunk(module, dup)) {
|
---|
| 65 | possibleDuplicates.delete(dup);
|
---|
| 66 | }
|
---|
| 67 | }
|
---|
| 68 | // when all chunks has been removed we can break here
|
---|
| 69 | if (possibleDuplicates.size === 0) break;
|
---|
| 70 | }
|
---|
| 71 | }
|
---|
| 72 |
|
---|
| 73 | // when we found duplicates
|
---|
| 74 | if (
|
---|
| 75 | possibleDuplicates !== undefined &&
|
---|
| 76 | possibleDuplicates.size > 0
|
---|
| 77 | ) {
|
---|
| 78 | outer: for (const otherChunk of possibleDuplicates) {
|
---|
| 79 | if (otherChunk.hasRuntime() !== chunk.hasRuntime()) continue;
|
---|
| 80 | if (chunkGraph.getNumberOfEntryModules(chunk) > 0) continue;
|
---|
| 81 | if (chunkGraph.getNumberOfEntryModules(otherChunk) > 0)
|
---|
| 82 | continue;
|
---|
| 83 | if (!runtimeEqual(chunk.runtime, otherChunk.runtime)) {
|
---|
| 84 | for (const module of chunkGraph.getChunkModulesIterable(
|
---|
| 85 | chunk
|
---|
| 86 | )) {
|
---|
| 87 | const exportsInfo = moduleGraph.getExportsInfo(module);
|
---|
| 88 | if (
|
---|
| 89 | !exportsInfo.isEquallyUsed(
|
---|
| 90 | chunk.runtime,
|
---|
| 91 | otherChunk.runtime
|
---|
| 92 | )
|
---|
| 93 | ) {
|
---|
| 94 | continue outer;
|
---|
| 95 | }
|
---|
| 96 | }
|
---|
| 97 | }
|
---|
| 98 | // merge them
|
---|
| 99 | if (chunkGraph.canChunksBeIntegrated(chunk, otherChunk)) {
|
---|
| 100 | chunkGraph.integrateChunks(chunk, otherChunk);
|
---|
| 101 | compilation.chunks.delete(otherChunk);
|
---|
| 102 | }
|
---|
| 103 | }
|
---|
| 104 | }
|
---|
| 105 |
|
---|
| 106 | // don't check already processed chunks twice
|
---|
| 107 | notDuplicates.add(chunk);
|
---|
| 108 | }
|
---|
| 109 | }
|
---|
| 110 | );
|
---|
| 111 | }
|
---|
| 112 | );
|
---|
| 113 | }
|
---|
| 114 | }
|
---|
| 115 | module.exports = MergeDuplicateChunksPlugin;
|
---|