source: trip-planner-front/node_modules/webpack/lib/optimize/RealContentHashPlugin.js@ eed0bf8

Last change on this file since eed0bf8 was 6a3a178, checked in by Ema <ema_spirova@…>, 3 years ago

initial commit

  • Property mode set to 100644
File size: 12.0 KB
Line 
1/*
2 MIT License http://www.opensource.org/licenses/mit-license.php
3 Author Tobias Koppers @sokra
4*/
5
6"use strict";
7
8const { SyncBailHook } = require("tapable");
9const { RawSource, CachedSource, CompatSource } = require("webpack-sources");
10const Compilation = require("../Compilation");
11const WebpackError = require("../WebpackError");
12const { compareSelect, compareStrings } = require("../util/comparators");
13const createHash = require("../util/createHash");
14
15/** @typedef {import("webpack-sources").Source} Source */
16/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
17/** @typedef {import("../Compiler")} Compiler */
18
19const EMPTY_SET = new Set();
20
21const addToList = (itemOrItems, list) => {
22 if (Array.isArray(itemOrItems)) {
23 for (const item of itemOrItems) {
24 list.add(item);
25 }
26 } else if (itemOrItems) {
27 list.add(itemOrItems);
28 }
29};
30
31/**
32 * @template T
33 * @param {T[]} input list
34 * @param {function(T): Buffer} fn map function
35 * @returns {Buffer[]} buffers without duplicates
36 */
37const mapAndDeduplicateBuffers = (input, fn) => {
38 // Buffer.equals compares size first so this should be efficient enough
39 // If it becomes a performance problem we can use a map and group by size
40 // instead of looping over all assets.
41 const result = [];
42 outer: for (const value of input) {
43 const buf = fn(value);
44 for (const other of result) {
45 if (buf.equals(other)) continue outer;
46 }
47 result.push(buf);
48 }
49 return result;
50};
51
52/**
53 * Escapes regular expression metacharacters
54 * @param {string} str String to quote
55 * @returns {string} Escaped string
56 */
57const quoteMeta = str => {
58 return str.replace(/[-[\]\\/{}()*+?.^$|]/g, "\\$&");
59};
60
61const cachedSourceMap = new WeakMap();
62
63const toCachedSource = source => {
64 if (source instanceof CachedSource) {
65 return source;
66 }
67 const entry = cachedSourceMap.get(source);
68 if (entry !== undefined) return entry;
69 const newSource = new CachedSource(CompatSource.from(source));
70 cachedSourceMap.set(source, newSource);
71 return newSource;
72};
73
74/**
75 * @typedef {Object} AssetInfoForRealContentHash
76 * @property {string} name
77 * @property {AssetInfo} info
78 * @property {Source} source
79 * @property {RawSource | undefined} newSource
80 * @property {RawSource | undefined} newSourceWithoutOwn
81 * @property {string} content
82 * @property {Set<string>} ownHashes
83 * @property {Promise} contentComputePromise
84 * @property {Promise} contentComputeWithoutOwnPromise
85 * @property {Set<string>} referencedHashes
86 * @property {Set<string>} hashes
87 */
88
89/**
90 * @typedef {Object} CompilationHooks
91 * @property {SyncBailHook<[Buffer[], string], string>} updateHash
92 */
93
94/** @type {WeakMap<Compilation, CompilationHooks>} */
95const compilationHooksMap = new WeakMap();
96
97class RealContentHashPlugin {
98 /**
99 * @param {Compilation} compilation the compilation
100 * @returns {CompilationHooks} the attached hooks
101 */
102 static getCompilationHooks(compilation) {
103 if (!(compilation instanceof Compilation)) {
104 throw new TypeError(
105 "The 'compilation' argument must be an instance of Compilation"
106 );
107 }
108 let hooks = compilationHooksMap.get(compilation);
109 if (hooks === undefined) {
110 hooks = {
111 updateHash: new SyncBailHook(["content", "oldHash"])
112 };
113 compilationHooksMap.set(compilation, hooks);
114 }
115 return hooks;
116 }
117
118 constructor({ hashFunction, hashDigest }) {
119 this._hashFunction = hashFunction;
120 this._hashDigest = hashDigest;
121 }
122
123 /**
124 * Apply the plugin
125 * @param {Compiler} compiler the compiler instance
126 * @returns {void}
127 */
128 apply(compiler) {
129 compiler.hooks.compilation.tap("RealContentHashPlugin", compilation => {
130 const cacheAnalyse = compilation.getCache(
131 "RealContentHashPlugin|analyse"
132 );
133 const cacheGenerate = compilation.getCache(
134 "RealContentHashPlugin|generate"
135 );
136 const hooks = RealContentHashPlugin.getCompilationHooks(compilation);
137 compilation.hooks.processAssets.tapPromise(
138 {
139 name: "RealContentHashPlugin",
140 stage: Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH
141 },
142 async () => {
143 const assets = compilation.getAssets();
144 /** @type {AssetInfoForRealContentHash[]} */
145 const assetsWithInfo = [];
146 const hashToAssets = new Map();
147 for (const { source, info, name } of assets) {
148 const cachedSource = toCachedSource(source);
149 const content = cachedSource.source();
150 /** @type {Set<string>} */
151 const hashes = new Set();
152 addToList(info.contenthash, hashes);
153 const data = {
154 name,
155 info,
156 source: cachedSource,
157 /** @type {RawSource | undefined} */
158 newSource: undefined,
159 /** @type {RawSource | undefined} */
160 newSourceWithoutOwn: undefined,
161 content,
162 /** @type {Set<string>} */
163 ownHashes: undefined,
164 contentComputePromise: undefined,
165 contentComputeWithoutOwnPromise: undefined,
166 /** @type {Set<string>} */
167 referencedHashes: undefined,
168 hashes
169 };
170 assetsWithInfo.push(data);
171 for (const hash of hashes) {
172 const list = hashToAssets.get(hash);
173 if (list === undefined) {
174 hashToAssets.set(hash, [data]);
175 } else {
176 list.push(data);
177 }
178 }
179 }
180 if (hashToAssets.size === 0) return;
181 const hashRegExp = new RegExp(
182 Array.from(hashToAssets.keys(), quoteMeta).join("|"),
183 "g"
184 );
185 await Promise.all(
186 assetsWithInfo.map(async asset => {
187 const { name, source, content, hashes } = asset;
188 if (Buffer.isBuffer(content)) {
189 asset.referencedHashes = EMPTY_SET;
190 asset.ownHashes = EMPTY_SET;
191 return;
192 }
193 const etag = cacheAnalyse.mergeEtags(
194 cacheAnalyse.getLazyHashedEtag(source),
195 Array.from(hashes).join("|")
196 );
197 [asset.referencedHashes, asset.ownHashes] =
198 await cacheAnalyse.providePromise(name, etag, () => {
199 const referencedHashes = new Set();
200 let ownHashes = new Set();
201 const inContent = content.match(hashRegExp);
202 if (inContent) {
203 for (const hash of inContent) {
204 if (hashes.has(hash)) {
205 ownHashes.add(hash);
206 continue;
207 }
208 referencedHashes.add(hash);
209 }
210 }
211 return [referencedHashes, ownHashes];
212 });
213 })
214 );
215 const getDependencies = hash => {
216 const assets = hashToAssets.get(hash);
217 if (!assets) {
218 const referencingAssets = assetsWithInfo.filter(asset =>
219 asset.referencedHashes.has(hash)
220 );
221 const err = new WebpackError(`RealContentHashPlugin
222Some kind of unexpected caching problem occurred.
223An asset was cached with a reference to another asset (${hash}) that's not in the compilation anymore.
224Either the asset was incorrectly cached, or the referenced asset should also be restored from cache.
225Referenced by:
226${referencingAssets
227 .map(a => {
228 const match = new RegExp(`.{0,20}${quoteMeta(hash)}.{0,20}`).exec(
229 a.content
230 );
231 return ` - ${a.name}: ...${match ? match[0] : "???"}...`;
232 })
233 .join("\n")}`);
234 compilation.errors.push(err);
235 return undefined;
236 }
237 const hashes = new Set();
238 for (const { referencedHashes, ownHashes } of assets) {
239 if (!ownHashes.has(hash)) {
240 for (const hash of ownHashes) {
241 hashes.add(hash);
242 }
243 }
244 for (const hash of referencedHashes) {
245 hashes.add(hash);
246 }
247 }
248 return hashes;
249 };
250 const hashInfo = hash => {
251 const assets = hashToAssets.get(hash);
252 return `${hash} (${Array.from(assets, a => a.name)})`;
253 };
254 const hashesInOrder = new Set();
255 for (const hash of hashToAssets.keys()) {
256 const add = (hash, stack) => {
257 const deps = getDependencies(hash);
258 if (!deps) return;
259 stack.add(hash);
260 for (const dep of deps) {
261 if (hashesInOrder.has(dep)) continue;
262 if (stack.has(dep)) {
263 throw new Error(
264 `Circular hash dependency ${Array.from(
265 stack,
266 hashInfo
267 ).join(" -> ")} -> ${hashInfo(dep)}`
268 );
269 }
270 add(dep, stack);
271 }
272 hashesInOrder.add(hash);
273 stack.delete(hash);
274 };
275 if (hashesInOrder.has(hash)) continue;
276 add(hash, new Set());
277 }
278 const hashToNewHash = new Map();
279 const getEtag = asset =>
280 cacheGenerate.mergeEtags(
281 cacheGenerate.getLazyHashedEtag(asset.source),
282 Array.from(asset.referencedHashes, hash =>
283 hashToNewHash.get(hash)
284 ).join("|")
285 );
286 const computeNewContent = asset => {
287 if (asset.contentComputePromise) return asset.contentComputePromise;
288 return (asset.contentComputePromise = (async () => {
289 if (
290 asset.ownHashes.size > 0 ||
291 Array.from(asset.referencedHashes).some(
292 hash => hashToNewHash.get(hash) !== hash
293 )
294 ) {
295 const identifier = asset.name;
296 const etag = getEtag(asset);
297 asset.newSource = await cacheGenerate.providePromise(
298 identifier,
299 etag,
300 () => {
301 const newContent = asset.content.replace(hashRegExp, hash =>
302 hashToNewHash.get(hash)
303 );
304 return new RawSource(newContent);
305 }
306 );
307 }
308 })());
309 };
310 const computeNewContentWithoutOwn = asset => {
311 if (asset.contentComputeWithoutOwnPromise)
312 return asset.contentComputeWithoutOwnPromise;
313 return (asset.contentComputeWithoutOwnPromise = (async () => {
314 if (
315 asset.ownHashes.size > 0 ||
316 Array.from(asset.referencedHashes).some(
317 hash => hashToNewHash.get(hash) !== hash
318 )
319 ) {
320 const identifier = asset.name + "|without-own";
321 const etag = getEtag(asset);
322 asset.newSourceWithoutOwn = await cacheGenerate.providePromise(
323 identifier,
324 etag,
325 () => {
326 const newContent = asset.content.replace(
327 hashRegExp,
328 hash => {
329 if (asset.ownHashes.has(hash)) {
330 return "";
331 }
332 return hashToNewHash.get(hash);
333 }
334 );
335 return new RawSource(newContent);
336 }
337 );
338 }
339 })());
340 };
341 const comparator = compareSelect(a => a.name, compareStrings);
342 for (const oldHash of hashesInOrder) {
343 const assets = hashToAssets.get(oldHash);
344 assets.sort(comparator);
345 const hash = createHash(this._hashFunction);
346 await Promise.all(
347 assets.map(asset =>
348 asset.ownHashes.has(oldHash)
349 ? computeNewContentWithoutOwn(asset)
350 : computeNewContent(asset)
351 )
352 );
353 const assetsContent = mapAndDeduplicateBuffers(assets, asset => {
354 if (asset.ownHashes.has(oldHash)) {
355 return asset.newSourceWithoutOwn
356 ? asset.newSourceWithoutOwn.buffer()
357 : asset.source.buffer();
358 } else {
359 return asset.newSource
360 ? asset.newSource.buffer()
361 : asset.source.buffer();
362 }
363 });
364 let newHash = hooks.updateHash.call(assetsContent, oldHash);
365 if (!newHash) {
366 for (const content of assetsContent) {
367 hash.update(content);
368 }
369 const digest = hash.digest(this._hashDigest);
370 newHash = /** @type {string} */ (digest.slice(0, oldHash.length));
371 }
372 hashToNewHash.set(oldHash, newHash);
373 }
374 await Promise.all(
375 assetsWithInfo.map(async asset => {
376 await computeNewContent(asset);
377 const newName = asset.name.replace(hashRegExp, hash =>
378 hashToNewHash.get(hash)
379 );
380
381 const infoUpdate = {};
382 const hash = asset.info.contenthash;
383 infoUpdate.contenthash = Array.isArray(hash)
384 ? hash.map(hash => hashToNewHash.get(hash))
385 : hashToNewHash.get(hash);
386
387 if (asset.newSource !== undefined) {
388 compilation.updateAsset(
389 asset.name,
390 asset.newSource,
391 infoUpdate
392 );
393 } else {
394 compilation.updateAsset(asset.name, asset.source, infoUpdate);
395 }
396
397 if (asset.name !== newName) {
398 compilation.renameAsset(asset.name, newName);
399 }
400 })
401 );
402 }
403 );
404 });
405 }
406}
407
408module.exports = RealContentHashPlugin;
Note: See TracBrowser for help on using the repository browser.