source: imaps-frontend/node_modules/webpack/lib/serialization/FileMiddleware.js

main
Last change on this file was 79a0317, checked in by stefan toskovski <stefantoska84@…>, 4 days ago

F4 Finalna Verzija

  • Property mode set to 100644
File size: 20.0 KB
Line 
1/*
2 MIT License http://www.opensource.org/licenses/mit-license.php
3*/
4
5"use strict";
6
7const { constants } = require("buffer");
8const { pipeline } = require("stream");
9const {
10 createBrotliCompress,
11 createBrotliDecompress,
12 createGzip,
13 createGunzip,
14 constants: zConstants
15} = require("zlib");
16const createHash = require("../util/createHash");
17const { dirname, join, mkdirp } = require("../util/fs");
18const memoize = require("../util/memoize");
19const SerializerMiddleware = require("./SerializerMiddleware");
20
21/** @typedef {typeof import("../util/Hash")} Hash */
22/** @typedef {import("../util/fs").IStats} IStats */
23/** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
24/** @typedef {import("./types").BufferSerializableType} BufferSerializableType */
25
26/*
27Format:
28
29File -> Header Section*
30
31Version -> u32
32AmountOfSections -> u32
33SectionSize -> i32 (if less than zero represents lazy value)
34
35Header -> Version AmountOfSections SectionSize*
36
37Buffer -> n bytes
38Section -> Buffer
39
40*/
41
42// "wpc" + 1 in little-endian
43const VERSION = 0x01637077;
44const WRITE_LIMIT_TOTAL = 0x7fff0000;
45const WRITE_LIMIT_CHUNK = 511 * 1024 * 1024;
46
47/**
48 * @param {Buffer[]} buffers buffers
49 * @param {string | Hash} hashFunction hash function to use
50 * @returns {string} hash
51 */
52const hashForName = (buffers, hashFunction) => {
53 const hash = createHash(hashFunction);
54 for (const buf of buffers) hash.update(buf);
55 return /** @type {string} */ (hash.digest("hex"));
56};
57
58const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
59const DECOMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
60
61/** @type {function(Buffer, number, number): void} */
62const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
63 ? (buf, value, offset) => {
64 buf.writeBigUInt64LE(BigInt(value), offset);
65 }
66 : (buf, value, offset) => {
67 const low = value % 0x100000000;
68 const high = (value - low) / 0x100000000;
69 buf.writeUInt32LE(low, offset);
70 buf.writeUInt32LE(high, offset + 4);
71 };
72
73/** @type {function(Buffer, number): void} */
74const readUInt64LE = Buffer.prototype.readBigUInt64LE
75 ? (buf, offset) => Number(buf.readBigUInt64LE(offset))
76 : (buf, offset) => {
77 const low = buf.readUInt32LE(offset);
78 const high = buf.readUInt32LE(offset + 4);
79 return high * 0x100000000 + low;
80 };
81
82/**
83 * @typedef {object} SerializeResult
84 * @property {string | false} name
85 * @property {number} size
86 * @property {Promise<any>=} backgroundJob
87 */
88
89/**
90 * @param {FileMiddleware} middleware this
91 * @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
92 * @param {string | boolean} name file base name
93 * @param {function(string | false, Buffer[], number): Promise<void>} writeFile writes a file
94 * @param {string | Hash} hashFunction hash function to use
95 * @returns {Promise<SerializeResult>} resulting file pointer and promise
96 */
97const serialize = async (
98 middleware,
99 data,
100 name,
101 writeFile,
102 hashFunction = "md4"
103) => {
104 /** @type {(Buffer[] | Buffer | SerializeResult | Promise<SerializeResult>)[]} */
105 const processedData = [];
106 /** @type {WeakMap<SerializeResult, function(): any | Promise<any>>} */
107 const resultToLazy = new WeakMap();
108 /** @type {Buffer[] | undefined} */
109 let lastBuffers;
110 for (const item of await data) {
111 if (typeof item === "function") {
112 if (!SerializerMiddleware.isLazy(item))
113 throw new Error("Unexpected function");
114 if (!SerializerMiddleware.isLazy(item, middleware)) {
115 throw new Error(
116 "Unexpected lazy value with non-this target (can't pass through lazy values)"
117 );
118 }
119 lastBuffers = undefined;
120 const serializedInfo = SerializerMiddleware.getLazySerializedValue(item);
121 if (serializedInfo) {
122 if (typeof serializedInfo === "function") {
123 throw new Error(
124 "Unexpected lazy value with non-this target (can't pass through lazy values)"
125 );
126 } else {
127 processedData.push(serializedInfo);
128 }
129 } else {
130 const content = item();
131 if (content) {
132 const options = SerializerMiddleware.getLazyOptions(item);
133 processedData.push(
134 serialize(
135 middleware,
136 content,
137 (options && options.name) || true,
138 writeFile,
139 hashFunction
140 ).then(result => {
141 /** @type {any} */ (item).options.size = result.size;
142 resultToLazy.set(result, item);
143 return result;
144 })
145 );
146 } else {
147 throw new Error(
148 "Unexpected falsy value returned by lazy value function"
149 );
150 }
151 }
152 } else if (item) {
153 if (lastBuffers) {
154 lastBuffers.push(item);
155 } else {
156 lastBuffers = [item];
157 processedData.push(lastBuffers);
158 }
159 } else {
160 throw new Error("Unexpected falsy value in items array");
161 }
162 }
163 /** @type {Promise<any>[]} */
164 const backgroundJobs = [];
165 const resolvedData = (
166 await Promise.all(
167 /** @type {Promise<Buffer[] | Buffer | SerializeResult>[]} */
168 (processedData)
169 )
170 ).map(item => {
171 if (Array.isArray(item) || Buffer.isBuffer(item)) return item;
172
173 backgroundJobs.push(item.backgroundJob);
174 // create pointer buffer from size and name
175 const name = /** @type {string} */ (item.name);
176 const nameBuffer = Buffer.from(name);
177 const buf = Buffer.allocUnsafe(8 + nameBuffer.length);
178 writeUInt64LE(buf, item.size, 0);
179 nameBuffer.copy(buf, 8, 0);
180 const lazy = resultToLazy.get(item);
181 SerializerMiddleware.setLazySerializedValue(lazy, buf);
182 return buf;
183 });
184 /** @type {number[]} */
185 const lengths = [];
186 for (const item of resolvedData) {
187 if (Array.isArray(item)) {
188 let l = 0;
189 for (const b of item) l += b.length;
190 while (l > 0x7fffffff) {
191 lengths.push(0x7fffffff);
192 l -= 0x7fffffff;
193 }
194 lengths.push(l);
195 } else if (item) {
196 lengths.push(-item.length);
197 } else {
198 throw new Error(`Unexpected falsy value in resolved data ${item}`);
199 }
200 }
201 const header = Buffer.allocUnsafe(8 + lengths.length * 4);
202 header.writeUInt32LE(VERSION, 0);
203 header.writeUInt32LE(lengths.length, 4);
204 for (let i = 0; i < lengths.length; i++) {
205 header.writeInt32LE(lengths[i], 8 + i * 4);
206 }
207 /** @type {Buffer[]} */
208 const buf = [header];
209 for (const item of resolvedData) {
210 if (Array.isArray(item)) {
211 for (const b of item) buf.push(b);
212 } else if (item) {
213 buf.push(item);
214 }
215 }
216 if (name === true) {
217 name = hashForName(buf, hashFunction);
218 }
219 let size = 0;
220 for (const b of buf) size += b.length;
221 backgroundJobs.push(writeFile(name, buf, size));
222 return {
223 size,
224 name,
225 backgroundJob:
226 backgroundJobs.length === 1
227 ? backgroundJobs[0]
228 : Promise.all(backgroundJobs)
229 };
230};
231
232/**
233 * @param {FileMiddleware} middleware this
234 * @param {string | false} name filename
235 * @param {function(string | false): Promise<Buffer[]>} readFile read content of a file
236 * @returns {Promise<BufferSerializableType[]>} deserialized data
237 */
238const deserialize = async (middleware, name, readFile) => {
239 const contents = await readFile(name);
240 if (contents.length === 0) throw new Error(`Empty file ${name}`);
241 let contentsIndex = 0;
242 let contentItem = contents[0];
243 let contentItemLength = contentItem.length;
244 let contentPosition = 0;
245 if (contentItemLength === 0) throw new Error(`Empty file ${name}`);
246 const nextContent = () => {
247 contentsIndex++;
248 contentItem = contents[contentsIndex];
249 contentItemLength = contentItem.length;
250 contentPosition = 0;
251 };
252 /**
253 * @param {number} n number of bytes to ensure
254 */
255 const ensureData = n => {
256 if (contentPosition === contentItemLength) {
257 nextContent();
258 }
259 while (contentItemLength - contentPosition < n) {
260 const remaining = contentItem.slice(contentPosition);
261 let lengthFromNext = n - remaining.length;
262 const buffers = [remaining];
263 for (let i = contentsIndex + 1; i < contents.length; i++) {
264 const l = contents[i].length;
265 if (l > lengthFromNext) {
266 buffers.push(contents[i].slice(0, lengthFromNext));
267 contents[i] = contents[i].slice(lengthFromNext);
268 lengthFromNext = 0;
269 break;
270 } else {
271 buffers.push(contents[i]);
272 contentsIndex = i;
273 lengthFromNext -= l;
274 }
275 }
276 if (lengthFromNext > 0) throw new Error("Unexpected end of data");
277 contentItem = Buffer.concat(buffers, n);
278 contentItemLength = n;
279 contentPosition = 0;
280 }
281 };
282 /**
283 * @returns {number} value value
284 */
285 const readUInt32LE = () => {
286 ensureData(4);
287 const value = contentItem.readUInt32LE(contentPosition);
288 contentPosition += 4;
289 return value;
290 };
291 /**
292 * @returns {number} value value
293 */
294 const readInt32LE = () => {
295 ensureData(4);
296 const value = contentItem.readInt32LE(contentPosition);
297 contentPosition += 4;
298 return value;
299 };
300 /**
301 * @param {number} l length
302 * @returns {Buffer} buffer
303 */
304 const readSlice = l => {
305 ensureData(l);
306 if (contentPosition === 0 && contentItemLength === l) {
307 const result = contentItem;
308 if (contentsIndex + 1 < contents.length) {
309 nextContent();
310 } else {
311 contentPosition = l;
312 }
313 return result;
314 }
315 const result = contentItem.slice(contentPosition, contentPosition + l);
316 contentPosition += l;
317 // we clone the buffer here to allow the original content to be garbage collected
318 return l * 2 < contentItem.buffer.byteLength ? Buffer.from(result) : result;
319 };
320 const version = readUInt32LE();
321 if (version !== VERSION) {
322 throw new Error("Invalid file version");
323 }
324 const sectionCount = readUInt32LE();
325 const lengths = [];
326 let lastLengthPositive = false;
327 for (let i = 0; i < sectionCount; i++) {
328 const value = readInt32LE();
329 const valuePositive = value >= 0;
330 if (lastLengthPositive && valuePositive) {
331 lengths[lengths.length - 1] += value;
332 } else {
333 lengths.push(value);
334 lastLengthPositive = valuePositive;
335 }
336 }
337 const result = [];
338 for (let length of lengths) {
339 if (length < 0) {
340 const slice = readSlice(-length);
341 const size = Number(readUInt64LE(slice, 0));
342 const nameBuffer = slice.slice(8);
343 const name = nameBuffer.toString();
344 result.push(
345 SerializerMiddleware.createLazy(
346 memoize(() => deserialize(middleware, name, readFile)),
347 middleware,
348 {
349 name,
350 size
351 },
352 slice
353 )
354 );
355 } else {
356 if (contentPosition === contentItemLength) {
357 nextContent();
358 } else if (contentPosition !== 0) {
359 if (length <= contentItemLength - contentPosition) {
360 result.push(
361 Buffer.from(
362 contentItem.buffer,
363 contentItem.byteOffset + contentPosition,
364 length
365 )
366 );
367 contentPosition += length;
368 length = 0;
369 } else {
370 const l = contentItemLength - contentPosition;
371 result.push(
372 Buffer.from(
373 contentItem.buffer,
374 contentItem.byteOffset + contentPosition,
375 l
376 )
377 );
378 length -= l;
379 contentPosition = contentItemLength;
380 }
381 } else if (length >= contentItemLength) {
382 result.push(contentItem);
383 length -= contentItemLength;
384 contentPosition = contentItemLength;
385 } else {
386 result.push(
387 Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
388 );
389 contentPosition += length;
390 length = 0;
391 }
392 while (length > 0) {
393 nextContent();
394 if (length >= contentItemLength) {
395 result.push(contentItem);
396 length -= contentItemLength;
397 contentPosition = contentItemLength;
398 } else {
399 result.push(
400 Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
401 );
402 contentPosition += length;
403 length = 0;
404 }
405 }
406 }
407 }
408 return result;
409};
410
411/** @typedef {{ filename: string, extension?: string }} FileMiddlewareContext */
412
413/**
414 * @typedef {BufferSerializableType[]} DeserializedType
415 * @typedef {true} SerializedType
416 * @extends {SerializerMiddleware<DeserializedType, SerializedType>}
417 */
418class FileMiddleware extends SerializerMiddleware {
419 /**
420 * @param {IntermediateFileSystem} fs filesystem
421 * @param {string | Hash} hashFunction hash function to use
422 */
423 constructor(fs, hashFunction = "md4") {
424 super();
425 this.fs = fs;
426 this._hashFunction = hashFunction;
427 }
428
429 /**
430 * @param {DeserializedType} data data
431 * @param {object} context context object
432 * @returns {SerializedType|Promise<SerializedType>} serialized data
433 */
434 serialize(data, context) {
435 const { filename, extension = "" } = context;
436 return new Promise((resolve, reject) => {
437 mkdirp(this.fs, dirname(this.fs, filename), err => {
438 if (err) return reject(err);
439
440 // It's important that we don't touch existing files during serialization
441 // because serialize may read existing files (when deserializing)
442 const allWrittenFiles = new Set();
443 /**
444 * @param {string | false} name name
445 * @param {Buffer[]} content content
446 * @param {number} size size
447 * @returns {Promise<void>}
448 */
449 const writeFile = async (name, content, size) => {
450 const file = name
451 ? join(this.fs, filename, `../${name}${extension}`)
452 : filename;
453 await new Promise(
454 /**
455 * @param {(value?: undefined) => void} resolve resolve
456 * @param {(reason?: Error | null) => void} reject reject
457 */
458 (resolve, reject) => {
459 let stream = this.fs.createWriteStream(`${file}_`);
460 let compression;
461 if (file.endsWith(".gz")) {
462 compression = createGzip({
463 chunkSize: COMPRESSION_CHUNK_SIZE,
464 level: zConstants.Z_BEST_SPEED
465 });
466 } else if (file.endsWith(".br")) {
467 compression = createBrotliCompress({
468 chunkSize: COMPRESSION_CHUNK_SIZE,
469 params: {
470 [zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
471 [zConstants.BROTLI_PARAM_QUALITY]: 2,
472 [zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
473 [zConstants.BROTLI_PARAM_SIZE_HINT]: size
474 }
475 });
476 }
477 if (compression) {
478 pipeline(compression, stream, reject);
479 stream = compression;
480 stream.on("finish", () => resolve());
481 } else {
482 stream.on("error", err => reject(err));
483 stream.on("finish", () => resolve());
484 }
485 // split into chunks for WRITE_LIMIT_CHUNK size
486 /** @type {TODO[]} */
487 const chunks = [];
488 for (const b of content) {
489 if (b.length < WRITE_LIMIT_CHUNK) {
490 chunks.push(b);
491 } else {
492 for (let i = 0; i < b.length; i += WRITE_LIMIT_CHUNK) {
493 chunks.push(b.slice(i, i + WRITE_LIMIT_CHUNK));
494 }
495 }
496 }
497
498 const len = chunks.length;
499 let i = 0;
500 /**
501 * @param {(Error | null)=} err err
502 */
503 const batchWrite = err => {
504 // will be handled in "on" error handler
505 if (err) return;
506
507 if (i === len) {
508 stream.end();
509 return;
510 }
511
512 // queue up a batch of chunks up to the write limit
513 // end is exclusive
514 let end = i;
515 let sum = chunks[end++].length;
516 while (end < len) {
517 sum += chunks[end].length;
518 if (sum > WRITE_LIMIT_TOTAL) break;
519 end++;
520 }
521 while (i < end - 1) {
522 stream.write(chunks[i++]);
523 }
524 stream.write(chunks[i++], batchWrite);
525 };
526 batchWrite();
527 }
528 );
529 if (name) allWrittenFiles.add(file);
530 };
531
532 resolve(
533 serialize(this, data, false, writeFile, this._hashFunction).then(
534 async ({ backgroundJob }) => {
535 await backgroundJob;
536
537 // Rename the index file to disallow access during inconsistent file state
538 await new Promise(
539 /**
540 * @param {(value?: undefined) => void} resolve resolve
541 */
542 resolve => {
543 this.fs.rename(filename, `${filename}.old`, err => {
544 resolve();
545 });
546 }
547 );
548
549 // update all written files
550 await Promise.all(
551 Array.from(
552 allWrittenFiles,
553 file =>
554 new Promise(
555 /**
556 * @param {(value?: undefined) => void} resolve resolve
557 * @param {(reason?: Error | null) => void} reject reject
558 * @returns {void}
559 */
560 (resolve, reject) => {
561 this.fs.rename(`${file}_`, file, err => {
562 if (err) return reject(err);
563 resolve();
564 });
565 }
566 )
567 )
568 );
569
570 // As final step automatically update the index file to have a consistent pack again
571 await new Promise(
572 /**
573 * @param {(value?: undefined) => void} resolve resolve
574 * @returns {void}
575 */
576 resolve => {
577 this.fs.rename(`${filename}_`, filename, err => {
578 if (err) return reject(err);
579 resolve();
580 });
581 }
582 );
583 return /** @type {true} */ (true);
584 }
585 )
586 );
587 });
588 });
589 }
590
591 /**
592 * @param {SerializedType} data data
593 * @param {object} context context object
594 * @returns {DeserializedType|Promise<DeserializedType>} deserialized data
595 */
596 deserialize(data, context) {
597 const { filename, extension = "" } = context;
598 /**
599 * @param {string | boolean} name name
600 * @returns {Promise<TODO>} result
601 */
602 const readFile = name =>
603 new Promise((resolve, reject) => {
604 const file = name
605 ? join(this.fs, filename, `../${name}${extension}`)
606 : filename;
607 this.fs.stat(file, (err, stats) => {
608 if (err) {
609 reject(err);
610 return;
611 }
612 let remaining = /** @type {IStats} */ (stats).size;
613 /** @type {Buffer | undefined} */
614 let currentBuffer;
615 /** @type {number | undefined} */
616 let currentBufferUsed;
617 /** @type {any[]} */
618 const buf = [];
619 /** @type {import("zlib").Zlib & import("stream").Transform | undefined} */
620 let decompression;
621 if (file.endsWith(".gz")) {
622 decompression = createGunzip({
623 chunkSize: DECOMPRESSION_CHUNK_SIZE
624 });
625 } else if (file.endsWith(".br")) {
626 decompression = createBrotliDecompress({
627 chunkSize: DECOMPRESSION_CHUNK_SIZE
628 });
629 }
630 if (decompression) {
631 let newResolve;
632 let newReject;
633 resolve(
634 Promise.all([
635 new Promise((rs, rj) => {
636 newResolve = rs;
637 newReject = rj;
638 }),
639 new Promise((resolve, reject) => {
640 decompression.on("data", chunk => buf.push(chunk));
641 decompression.on("end", () => resolve());
642 decompression.on("error", err => reject(err));
643 })
644 ]).then(() => buf)
645 );
646 resolve = newResolve;
647 reject = newReject;
648 }
649 this.fs.open(file, "r", (err, _fd) => {
650 if (err) {
651 reject(err);
652 return;
653 }
654 const fd = /** @type {number} */ (_fd);
655 const read = () => {
656 if (currentBuffer === undefined) {
657 currentBuffer = Buffer.allocUnsafeSlow(
658 Math.min(
659 constants.MAX_LENGTH,
660 remaining,
661 decompression ? DECOMPRESSION_CHUNK_SIZE : Infinity
662 )
663 );
664 currentBufferUsed = 0;
665 }
666 let readBuffer = currentBuffer;
667 let readOffset = /** @type {number} */ (currentBufferUsed);
668 let readLength =
669 currentBuffer.length -
670 /** @type {number} */ (currentBufferUsed);
671 // values passed to fs.read must be valid int32 values
672 if (readOffset > 0x7fffffff) {
673 readBuffer = currentBuffer.slice(readOffset);
674 readOffset = 0;
675 }
676 if (readLength > 0x7fffffff) {
677 readLength = 0x7fffffff;
678 }
679 this.fs.read(
680 fd,
681 readBuffer,
682 readOffset,
683 readLength,
684 null,
685 (err, bytesRead) => {
686 if (err) {
687 this.fs.close(fd, () => {
688 reject(err);
689 });
690 return;
691 }
692 /** @type {number} */
693 (currentBufferUsed) += bytesRead;
694 remaining -= bytesRead;
695 if (
696 currentBufferUsed ===
697 /** @type {Buffer} */ (currentBuffer).length
698 ) {
699 if (decompression) {
700 decompression.write(currentBuffer);
701 } else {
702 buf.push(currentBuffer);
703 }
704 currentBuffer = undefined;
705 if (remaining === 0) {
706 if (decompression) {
707 decompression.end();
708 }
709 this.fs.close(fd, err => {
710 if (err) {
711 reject(err);
712 return;
713 }
714 resolve(buf);
715 });
716 return;
717 }
718 }
719 read();
720 }
721 );
722 };
723 read();
724 });
725 });
726 });
727 return deserialize(this, false, readFile);
728 }
729}
730
731module.exports = FileMiddleware;
Note: See TracBrowser for help on using the repository browser.