1 | /*
|
---|
2 | MIT License http://www.opensource.org/licenses/mit-license.php
|
---|
3 | */
|
---|
4 |
|
---|
5 | "use strict";
|
---|
6 |
|
---|
7 | const { constants } = require("buffer");
|
---|
8 | const { pipeline } = require("stream");
|
---|
9 | const {
|
---|
10 | createBrotliCompress,
|
---|
11 | createBrotliDecompress,
|
---|
12 | createGzip,
|
---|
13 | createGunzip,
|
---|
14 | constants: zConstants
|
---|
15 | } = require("zlib");
|
---|
16 | const createHash = require("../util/createHash");
|
---|
17 | const { dirname, join, mkdirp } = require("../util/fs");
|
---|
18 | const memoize = require("../util/memoize");
|
---|
19 | const SerializerMiddleware = require("./SerializerMiddleware");
|
---|
20 |
|
---|
21 | /** @typedef {import("../util/fs").IntermediateFileSystem} IntermediateFileSystem */
|
---|
22 | /** @typedef {import("./types").BufferSerializableType} BufferSerializableType */
|
---|
23 |
|
---|
24 | /*
|
---|
25 | Format:
|
---|
26 |
|
---|
27 | File -> Header Section*
|
---|
28 |
|
---|
29 | Version -> u32
|
---|
30 | AmountOfSections -> u32
|
---|
31 | SectionSize -> i32 (if less than zero represents lazy value)
|
---|
32 |
|
---|
33 | Header -> Version AmountOfSections SectionSize*
|
---|
34 |
|
---|
35 | Buffer -> n bytes
|
---|
36 | Section -> Buffer
|
---|
37 |
|
---|
38 | */
|
---|
39 |
|
---|
40 | // "wpc" + 1 in little-endian
|
---|
41 | const VERSION = 0x01637077;
|
---|
42 | const hashForName = buffers => {
|
---|
43 | const hash = createHash("md4");
|
---|
44 | for (const buf of buffers) hash.update(buf);
|
---|
45 | return /** @type {string} */ (hash.digest("hex"));
|
---|
46 | };
|
---|
47 |
|
---|
48 | const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
---|
49 | const DECOMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
---|
50 |
|
---|
51 | const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
|
---|
52 | ? (buf, value, offset) => {
|
---|
53 | buf.writeBigUInt64LE(BigInt(value), offset);
|
---|
54 | }
|
---|
55 | : (buf, value, offset) => {
|
---|
56 | const low = value % 0x100000000;
|
---|
57 | const high = (value - low) / 0x100000000;
|
---|
58 | buf.writeUInt32LE(low, offset);
|
---|
59 | buf.writeUInt32LE(high, offset + 4);
|
---|
60 | };
|
---|
61 |
|
---|
62 | const readUInt64LE = Buffer.prototype.readBigUInt64LE
|
---|
63 | ? (buf, offset) => {
|
---|
64 | return Number(buf.readBigUInt64LE(offset));
|
---|
65 | }
|
---|
66 | : (buf, offset) => {
|
---|
67 | const low = buf.readUInt32LE(offset);
|
---|
68 | const high = buf.readUInt32LE(offset + 4);
|
---|
69 | return high * 0x100000000 + low;
|
---|
70 | };
|
---|
71 |
|
---|
72 | /**
|
---|
73 | * @typedef {Object} SerializeResult
|
---|
74 | * @property {string | false} name
|
---|
75 | * @property {number} size
|
---|
76 | * @property {Promise=} backgroundJob
|
---|
77 | */
|
---|
78 |
|
---|
79 | /**
|
---|
80 | * @param {FileMiddleware} middleware this
|
---|
81 | * @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
|
---|
82 | * @param {string | boolean} name file base name
|
---|
83 | * @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
|
---|
84 | * @returns {Promise<SerializeResult>} resulting file pointer and promise
|
---|
85 | */
|
---|
86 | const serialize = async (middleware, data, name, writeFile) => {
|
---|
87 | /** @type {(Buffer[] | Buffer | SerializeResult | Promise<SerializeResult>)[]} */
|
---|
88 | const processedData = [];
|
---|
89 | /** @type {WeakMap<SerializeResult, function(): any | Promise<any>>} */
|
---|
90 | const resultToLazy = new WeakMap();
|
---|
91 | /** @type {Buffer[]} */
|
---|
92 | let lastBuffers = undefined;
|
---|
93 | for (const item of await data) {
|
---|
94 | if (typeof item === "function") {
|
---|
95 | if (!SerializerMiddleware.isLazy(item))
|
---|
96 | throw new Error("Unexpected function");
|
---|
97 | if (!SerializerMiddleware.isLazy(item, middleware)) {
|
---|
98 | throw new Error(
|
---|
99 | "Unexpected lazy value with non-this target (can't pass through lazy values)"
|
---|
100 | );
|
---|
101 | }
|
---|
102 | lastBuffers = undefined;
|
---|
103 | const serializedInfo = SerializerMiddleware.getLazySerializedValue(item);
|
---|
104 | if (serializedInfo) {
|
---|
105 | if (typeof serializedInfo === "function") {
|
---|
106 | throw new Error(
|
---|
107 | "Unexpected lazy value with non-this target (can't pass through lazy values)"
|
---|
108 | );
|
---|
109 | } else {
|
---|
110 | processedData.push(serializedInfo);
|
---|
111 | }
|
---|
112 | } else {
|
---|
113 | const content = item();
|
---|
114 | if (content) {
|
---|
115 | const options = SerializerMiddleware.getLazyOptions(item);
|
---|
116 | processedData.push(
|
---|
117 | serialize(
|
---|
118 | middleware,
|
---|
119 | content,
|
---|
120 | (options && options.name) || true,
|
---|
121 | writeFile
|
---|
122 | ).then(result => {
|
---|
123 | /** @type {any} */ (item).options.size = result.size;
|
---|
124 | resultToLazy.set(result, item);
|
---|
125 | return result;
|
---|
126 | })
|
---|
127 | );
|
---|
128 | } else {
|
---|
129 | throw new Error(
|
---|
130 | "Unexpected falsy value returned by lazy value function"
|
---|
131 | );
|
---|
132 | }
|
---|
133 | }
|
---|
134 | } else if (item) {
|
---|
135 | if (lastBuffers) {
|
---|
136 | lastBuffers.push(item);
|
---|
137 | } else {
|
---|
138 | lastBuffers = [item];
|
---|
139 | processedData.push(lastBuffers);
|
---|
140 | }
|
---|
141 | } else {
|
---|
142 | throw new Error("Unexpected falsy value in items array");
|
---|
143 | }
|
---|
144 | }
|
---|
145 | /** @type {Promise<any>[]} */
|
---|
146 | const backgroundJobs = [];
|
---|
147 | const resolvedData = (
|
---|
148 | await Promise.all(
|
---|
149 | /** @type {Promise<Buffer[] | Buffer | SerializeResult>[]} */ (
|
---|
150 | processedData
|
---|
151 | )
|
---|
152 | )
|
---|
153 | ).map(item => {
|
---|
154 | if (Array.isArray(item) || Buffer.isBuffer(item)) return item;
|
---|
155 |
|
---|
156 | backgroundJobs.push(item.backgroundJob);
|
---|
157 | // create pointer buffer from size and name
|
---|
158 | const name = /** @type {string} */ (item.name);
|
---|
159 | const nameBuffer = Buffer.from(name);
|
---|
160 | const buf = Buffer.allocUnsafe(8 + nameBuffer.length);
|
---|
161 | writeUInt64LE(buf, item.size, 0);
|
---|
162 | nameBuffer.copy(buf, 8, 0);
|
---|
163 | const lazy = resultToLazy.get(item);
|
---|
164 | SerializerMiddleware.setLazySerializedValue(lazy, buf);
|
---|
165 | return buf;
|
---|
166 | });
|
---|
167 | const lengths = [];
|
---|
168 | for (const item of resolvedData) {
|
---|
169 | if (Array.isArray(item)) {
|
---|
170 | let l = 0;
|
---|
171 | for (const b of item) l += b.length;
|
---|
172 | while (l > 0x7fffffff) {
|
---|
173 | lengths.push(0x7fffffff);
|
---|
174 | l -= 0x7fffffff;
|
---|
175 | }
|
---|
176 | lengths.push(l);
|
---|
177 | } else if (item) {
|
---|
178 | lengths.push(-item.length);
|
---|
179 | } else {
|
---|
180 | throw new Error("Unexpected falsy value in resolved data " + item);
|
---|
181 | }
|
---|
182 | }
|
---|
183 | const header = Buffer.allocUnsafe(8 + lengths.length * 4);
|
---|
184 | header.writeUInt32LE(VERSION, 0);
|
---|
185 | header.writeUInt32LE(lengths.length, 4);
|
---|
186 | for (let i = 0; i < lengths.length; i++) {
|
---|
187 | header.writeInt32LE(lengths[i], 8 + i * 4);
|
---|
188 | }
|
---|
189 | const buf = [header];
|
---|
190 | for (const item of resolvedData) {
|
---|
191 | if (Array.isArray(item)) {
|
---|
192 | for (const b of item) buf.push(b);
|
---|
193 | } else if (item) {
|
---|
194 | buf.push(item);
|
---|
195 | }
|
---|
196 | }
|
---|
197 | if (name === true) {
|
---|
198 | name = hashForName(buf);
|
---|
199 | }
|
---|
200 | backgroundJobs.push(writeFile(name, buf));
|
---|
201 | let size = 0;
|
---|
202 | for (const b of buf) size += b.length;
|
---|
203 | return {
|
---|
204 | size,
|
---|
205 | name,
|
---|
206 | backgroundJob:
|
---|
207 | backgroundJobs.length === 1
|
---|
208 | ? backgroundJobs[0]
|
---|
209 | : Promise.all(backgroundJobs)
|
---|
210 | };
|
---|
211 | };
|
---|
212 |
|
---|
213 | /**
|
---|
214 | * @param {FileMiddleware} middleware this
|
---|
215 | * @param {string | false} name filename
|
---|
216 | * @param {function(string | false): Promise<Buffer[]>} readFile read content of a file
|
---|
217 | * @returns {Promise<BufferSerializableType[]>} deserialized data
|
---|
218 | */
|
---|
219 | const deserialize = async (middleware, name, readFile) => {
|
---|
220 | const contents = await readFile(name);
|
---|
221 | if (contents.length === 0) throw new Error("Empty file " + name);
|
---|
222 | let contentsIndex = 0;
|
---|
223 | let contentItem = contents[0];
|
---|
224 | let contentItemLength = contentItem.length;
|
---|
225 | let contentPosition = 0;
|
---|
226 | if (contentItemLength === 0) throw new Error("Empty file " + name);
|
---|
227 | const nextContent = () => {
|
---|
228 | contentsIndex++;
|
---|
229 | contentItem = contents[contentsIndex];
|
---|
230 | contentItemLength = contentItem.length;
|
---|
231 | contentPosition = 0;
|
---|
232 | };
|
---|
233 | const ensureData = n => {
|
---|
234 | if (contentPosition === contentItemLength) {
|
---|
235 | nextContent();
|
---|
236 | }
|
---|
237 | while (contentItemLength - contentPosition < n) {
|
---|
238 | const remaining = contentItem.slice(contentPosition);
|
---|
239 | let lengthFromNext = n - remaining.length;
|
---|
240 | const buffers = [remaining];
|
---|
241 | for (let i = contentsIndex + 1; i < contents.length; i++) {
|
---|
242 | const l = contents[i].length;
|
---|
243 | if (l > lengthFromNext) {
|
---|
244 | buffers.push(contents[i].slice(0, lengthFromNext));
|
---|
245 | contents[i] = contents[i].slice(lengthFromNext);
|
---|
246 | lengthFromNext = 0;
|
---|
247 | break;
|
---|
248 | } else {
|
---|
249 | buffers.push(contents[i]);
|
---|
250 | contentsIndex = i;
|
---|
251 | lengthFromNext -= l;
|
---|
252 | }
|
---|
253 | }
|
---|
254 | if (lengthFromNext > 0) throw new Error("Unexpected end of data");
|
---|
255 | contentItem = Buffer.concat(buffers, n);
|
---|
256 | contentItemLength = n;
|
---|
257 | contentPosition = 0;
|
---|
258 | }
|
---|
259 | };
|
---|
260 | const readUInt32LE = () => {
|
---|
261 | ensureData(4);
|
---|
262 | const value = contentItem.readUInt32LE(contentPosition);
|
---|
263 | contentPosition += 4;
|
---|
264 | return value;
|
---|
265 | };
|
---|
266 | const readInt32LE = () => {
|
---|
267 | ensureData(4);
|
---|
268 | const value = contentItem.readInt32LE(contentPosition);
|
---|
269 | contentPosition += 4;
|
---|
270 | return value;
|
---|
271 | };
|
---|
272 | const readSlice = l => {
|
---|
273 | ensureData(l);
|
---|
274 | if (contentPosition === 0 && contentItemLength === l) {
|
---|
275 | const result = contentItem;
|
---|
276 | if (contentsIndex + 1 < contents.length) {
|
---|
277 | nextContent();
|
---|
278 | } else {
|
---|
279 | contentPosition = l;
|
---|
280 | }
|
---|
281 | return result;
|
---|
282 | }
|
---|
283 | const result = contentItem.slice(contentPosition, contentPosition + l);
|
---|
284 | contentPosition += l;
|
---|
285 | // we clone the buffer here to allow the original content to be garbage collected
|
---|
286 | return l * 2 < contentItem.buffer.byteLength ? Buffer.from(result) : result;
|
---|
287 | };
|
---|
288 | const version = readUInt32LE();
|
---|
289 | if (version !== VERSION) {
|
---|
290 | throw new Error("Invalid file version");
|
---|
291 | }
|
---|
292 | const sectionCount = readUInt32LE();
|
---|
293 | const lengths = [];
|
---|
294 | let lastLengthPositive = false;
|
---|
295 | for (let i = 0; i < sectionCount; i++) {
|
---|
296 | const value = readInt32LE();
|
---|
297 | const valuePositive = value >= 0;
|
---|
298 | if (lastLengthPositive && valuePositive) {
|
---|
299 | lengths[lengths.length - 1] += value;
|
---|
300 | } else {
|
---|
301 | lengths.push(value);
|
---|
302 | lastLengthPositive = valuePositive;
|
---|
303 | }
|
---|
304 | }
|
---|
305 | const result = [];
|
---|
306 | for (let length of lengths) {
|
---|
307 | if (length < 0) {
|
---|
308 | const slice = readSlice(-length);
|
---|
309 | const size = Number(readUInt64LE(slice, 0));
|
---|
310 | const nameBuffer = slice.slice(8);
|
---|
311 | const name = nameBuffer.toString();
|
---|
312 | result.push(
|
---|
313 | SerializerMiddleware.createLazy(
|
---|
314 | memoize(() => deserialize(middleware, name, readFile)),
|
---|
315 | middleware,
|
---|
316 | {
|
---|
317 | name,
|
---|
318 | size
|
---|
319 | },
|
---|
320 | slice
|
---|
321 | )
|
---|
322 | );
|
---|
323 | } else {
|
---|
324 | if (contentPosition === contentItemLength) {
|
---|
325 | nextContent();
|
---|
326 | } else if (contentPosition !== 0) {
|
---|
327 | if (length <= contentItemLength - contentPosition) {
|
---|
328 | result.push(
|
---|
329 | Buffer.from(
|
---|
330 | contentItem.buffer,
|
---|
331 | contentItem.byteOffset + contentPosition,
|
---|
332 | length
|
---|
333 | )
|
---|
334 | );
|
---|
335 | contentPosition += length;
|
---|
336 | length = 0;
|
---|
337 | } else {
|
---|
338 | const l = contentItemLength - contentPosition;
|
---|
339 | result.push(
|
---|
340 | Buffer.from(
|
---|
341 | contentItem.buffer,
|
---|
342 | contentItem.byteOffset + contentPosition,
|
---|
343 | l
|
---|
344 | )
|
---|
345 | );
|
---|
346 | length -= l;
|
---|
347 | contentPosition = contentItemLength;
|
---|
348 | }
|
---|
349 | } else {
|
---|
350 | if (length >= contentItemLength) {
|
---|
351 | result.push(contentItem);
|
---|
352 | length -= contentItemLength;
|
---|
353 | contentPosition = contentItemLength;
|
---|
354 | } else {
|
---|
355 | result.push(
|
---|
356 | Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
|
---|
357 | );
|
---|
358 | contentPosition += length;
|
---|
359 | length = 0;
|
---|
360 | }
|
---|
361 | }
|
---|
362 | while (length > 0) {
|
---|
363 | nextContent();
|
---|
364 | if (length >= contentItemLength) {
|
---|
365 | result.push(contentItem);
|
---|
366 | length -= contentItemLength;
|
---|
367 | contentPosition = contentItemLength;
|
---|
368 | } else {
|
---|
369 | result.push(
|
---|
370 | Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
|
---|
371 | );
|
---|
372 | contentPosition += length;
|
---|
373 | length = 0;
|
---|
374 | }
|
---|
375 | }
|
---|
376 | }
|
---|
377 | }
|
---|
378 | return result;
|
---|
379 | };
|
---|
380 |
|
---|
381 | /**
|
---|
382 | * @typedef {BufferSerializableType[]} DeserializedType
|
---|
383 | * @typedef {true} SerializedType
|
---|
384 | * @extends {SerializerMiddleware<DeserializedType, SerializedType>}
|
---|
385 | */
|
---|
386 | class FileMiddleware extends SerializerMiddleware {
|
---|
387 | /**
|
---|
388 | * @param {IntermediateFileSystem} fs filesystem
|
---|
389 | */
|
---|
390 | constructor(fs) {
|
---|
391 | super();
|
---|
392 | this.fs = fs;
|
---|
393 | }
|
---|
394 | /**
|
---|
395 | * @param {DeserializedType} data data
|
---|
396 | * @param {Object} context context object
|
---|
397 | * @returns {SerializedType|Promise<SerializedType>} serialized data
|
---|
398 | */
|
---|
399 | serialize(data, context) {
|
---|
400 | const { filename, extension = "" } = context;
|
---|
401 | return new Promise((resolve, reject) => {
|
---|
402 | mkdirp(this.fs, dirname(this.fs, filename), err => {
|
---|
403 | if (err) return reject(err);
|
---|
404 |
|
---|
405 | // It's important that we don't touch existing files during serialization
|
---|
406 | // because serialize may read existing files (when deserializing)
|
---|
407 | const allWrittenFiles = new Set();
|
---|
408 | const writeFile = async (name, content) => {
|
---|
409 | const file = name
|
---|
410 | ? join(this.fs, filename, `../${name}${extension}`)
|
---|
411 | : filename;
|
---|
412 | await new Promise((resolve, reject) => {
|
---|
413 | let stream = this.fs.createWriteStream(file + "_");
|
---|
414 | let compression;
|
---|
415 | if (file.endsWith(".gz")) {
|
---|
416 | compression = createGzip({
|
---|
417 | chunkSize: COMPRESSION_CHUNK_SIZE,
|
---|
418 | level: zConstants.Z_BEST_SPEED
|
---|
419 | });
|
---|
420 | } else if (file.endsWith(".br")) {
|
---|
421 | compression = createBrotliCompress({
|
---|
422 | chunkSize: COMPRESSION_CHUNK_SIZE,
|
---|
423 | params: {
|
---|
424 | [zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
|
---|
425 | [zConstants.BROTLI_PARAM_QUALITY]: 2,
|
---|
426 | [zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
|
---|
427 | [zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
|
---|
428 | (size, b) => size + b.length,
|
---|
429 | 0
|
---|
430 | )
|
---|
431 | }
|
---|
432 | });
|
---|
433 | }
|
---|
434 | if (compression) {
|
---|
435 | pipeline(compression, stream, reject);
|
---|
436 | stream = compression;
|
---|
437 | stream.on("finish", () => resolve());
|
---|
438 | } else {
|
---|
439 | stream.on("error", err => reject(err));
|
---|
440 | stream.on("finish", () => resolve());
|
---|
441 | }
|
---|
442 | for (const b of content) stream.write(b);
|
---|
443 | stream.end();
|
---|
444 | });
|
---|
445 | if (name) allWrittenFiles.add(file);
|
---|
446 | };
|
---|
447 |
|
---|
448 | resolve(
|
---|
449 | serialize(this, data, false, writeFile).then(
|
---|
450 | async ({ backgroundJob }) => {
|
---|
451 | await backgroundJob;
|
---|
452 |
|
---|
453 | // Rename the index file to disallow access during inconsistent file state
|
---|
454 | await new Promise(resolve =>
|
---|
455 | this.fs.rename(filename, filename + ".old", err => {
|
---|
456 | resolve();
|
---|
457 | })
|
---|
458 | );
|
---|
459 |
|
---|
460 | // update all written files
|
---|
461 | await Promise.all(
|
---|
462 | Array.from(
|
---|
463 | allWrittenFiles,
|
---|
464 | file =>
|
---|
465 | new Promise((resolve, reject) => {
|
---|
466 | this.fs.rename(file + "_", file, err => {
|
---|
467 | if (err) return reject(err);
|
---|
468 | resolve();
|
---|
469 | });
|
---|
470 | })
|
---|
471 | )
|
---|
472 | );
|
---|
473 |
|
---|
474 | // As final step automatically update the index file to have a consistent pack again
|
---|
475 | await new Promise(resolve => {
|
---|
476 | this.fs.rename(filename + "_", filename, err => {
|
---|
477 | if (err) return reject(err);
|
---|
478 | resolve();
|
---|
479 | });
|
---|
480 | });
|
---|
481 | return /** @type {true} */ (true);
|
---|
482 | }
|
---|
483 | )
|
---|
484 | );
|
---|
485 | });
|
---|
486 | });
|
---|
487 | }
|
---|
488 |
|
---|
489 | /**
|
---|
490 | * @param {SerializedType} data data
|
---|
491 | * @param {Object} context context object
|
---|
492 | * @returns {DeserializedType|Promise<DeserializedType>} deserialized data
|
---|
493 | */
|
---|
494 | deserialize(data, context) {
|
---|
495 | const { filename, extension = "" } = context;
|
---|
496 | const readFile = name =>
|
---|
497 | new Promise((resolve, reject) => {
|
---|
498 | const file = name
|
---|
499 | ? join(this.fs, filename, `../${name}${extension}`)
|
---|
500 | : filename;
|
---|
501 | this.fs.stat(file, (err, stats) => {
|
---|
502 | if (err) {
|
---|
503 | reject(err);
|
---|
504 | return;
|
---|
505 | }
|
---|
506 | let remaining = /** @type {number} */ (stats.size);
|
---|
507 | let currentBuffer;
|
---|
508 | let currentBufferUsed;
|
---|
509 | const buf = [];
|
---|
510 | let decompression;
|
---|
511 | if (file.endsWith(".gz")) {
|
---|
512 | decompression = createGunzip({
|
---|
513 | chunkSize: DECOMPRESSION_CHUNK_SIZE
|
---|
514 | });
|
---|
515 | } else if (file.endsWith(".br")) {
|
---|
516 | decompression = createBrotliDecompress({
|
---|
517 | chunkSize: DECOMPRESSION_CHUNK_SIZE
|
---|
518 | });
|
---|
519 | }
|
---|
520 | if (decompression) {
|
---|
521 | let newResolve, newReject;
|
---|
522 | resolve(
|
---|
523 | Promise.all([
|
---|
524 | new Promise((rs, rj) => {
|
---|
525 | newResolve = rs;
|
---|
526 | newReject = rj;
|
---|
527 | }),
|
---|
528 | new Promise((resolve, reject) => {
|
---|
529 | decompression.on("data", chunk => buf.push(chunk));
|
---|
530 | decompression.on("end", () => resolve());
|
---|
531 | decompression.on("error", err => reject(err));
|
---|
532 | })
|
---|
533 | ]).then(() => buf)
|
---|
534 | );
|
---|
535 | resolve = newResolve;
|
---|
536 | reject = newReject;
|
---|
537 | }
|
---|
538 | this.fs.open(file, "r", (err, fd) => {
|
---|
539 | if (err) {
|
---|
540 | reject(err);
|
---|
541 | return;
|
---|
542 | }
|
---|
543 | const read = () => {
|
---|
544 | if (currentBuffer === undefined) {
|
---|
545 | currentBuffer = Buffer.allocUnsafeSlow(
|
---|
546 | Math.min(
|
---|
547 | constants.MAX_LENGTH,
|
---|
548 | remaining,
|
---|
549 | decompression ? DECOMPRESSION_CHUNK_SIZE : Infinity
|
---|
550 | )
|
---|
551 | );
|
---|
552 | currentBufferUsed = 0;
|
---|
553 | }
|
---|
554 | let readBuffer = currentBuffer;
|
---|
555 | let readOffset = currentBufferUsed;
|
---|
556 | let readLength = currentBuffer.length - currentBufferUsed;
|
---|
557 | // values passed to fs.read must be valid int32 values
|
---|
558 | if (readOffset > 0x7fffffff) {
|
---|
559 | readBuffer = currentBuffer.slice(readOffset);
|
---|
560 | readOffset = 0;
|
---|
561 | }
|
---|
562 | if (readLength > 0x7fffffff) {
|
---|
563 | readLength = 0x7fffffff;
|
---|
564 | }
|
---|
565 | this.fs.read(
|
---|
566 | fd,
|
---|
567 | readBuffer,
|
---|
568 | readOffset,
|
---|
569 | readLength,
|
---|
570 | null,
|
---|
571 | (err, bytesRead) => {
|
---|
572 | if (err) {
|
---|
573 | this.fs.close(fd, () => {
|
---|
574 | reject(err);
|
---|
575 | });
|
---|
576 | return;
|
---|
577 | }
|
---|
578 | currentBufferUsed += bytesRead;
|
---|
579 | remaining -= bytesRead;
|
---|
580 | if (currentBufferUsed === currentBuffer.length) {
|
---|
581 | if (decompression) {
|
---|
582 | decompression.write(currentBuffer);
|
---|
583 | } else {
|
---|
584 | buf.push(currentBuffer);
|
---|
585 | }
|
---|
586 | currentBuffer = undefined;
|
---|
587 | if (remaining === 0) {
|
---|
588 | if (decompression) {
|
---|
589 | decompression.end();
|
---|
590 | }
|
---|
591 | this.fs.close(fd, err => {
|
---|
592 | if (err) {
|
---|
593 | reject(err);
|
---|
594 | return;
|
---|
595 | }
|
---|
596 | resolve(buf);
|
---|
597 | });
|
---|
598 | return;
|
---|
599 | }
|
---|
600 | }
|
---|
601 | read();
|
---|
602 | }
|
---|
603 | );
|
---|
604 | };
|
---|
605 | read();
|
---|
606 | });
|
---|
607 | });
|
---|
608 | });
|
---|
609 | return deserialize(this, false, readFile);
|
---|
610 | }
|
---|
611 | }
|
---|
612 |
|
---|
613 | module.exports = FileMiddleware;
|
---|