source: trip-planner-front/node_modules/streamroller/test/RollingFileStream-test.js@ 6a3a178

Last change on this file since 6a3a178 was 6a3a178, checked in by Ema <ema_spirova@…>, 3 years ago

initial commit

  • Property mode set to 100644
File size: 13.9 KB
Line 
1require("should");
2
3const fs = require("fs-extra"),
4 path = require("path"),
5 util = require("util"),
6 zlib = require("zlib"),
7 streams = require("stream"),
8 RollingFileStream = require("../lib").RollingFileStream;
9
10const gunzip = util.promisify(zlib.gunzip);
11const fullPath = f => path.join(__dirname, f);
12const remove = filename => fs.unlink(fullPath(filename)).catch(() => {});
13const create = filename => fs.writeFile(fullPath(filename), "test file");
14
15const write = (stream, data) => {
16 return new Promise((resolve, reject) => {
17 stream.write(data, "utf8", e => {
18 if (e) {
19 reject(e);
20 } else {
21 resolve();
22 }
23 });
24 });
25};
26
27const writeInSequence = async (stream, messages) => {
28 for (let i = 0; i < messages.length; i += 1) {
29 await write(stream, messages[i] + "\n");
30 }
31 return new Promise(resolve => {
32 stream.end(resolve);
33 });
34};
35
36const close = async (stream) => new Promise(
37 (resolve, reject) => stream.end(e => e ? reject(e) : resolve())
38);
39
40describe("RollingFileStream", function() {
41 describe("arguments", function() {
42 let stream;
43
44 before(async function() {
45 await remove("test-rolling-file-stream");
46 stream = new RollingFileStream(
47 path.join(__dirname, "test-rolling-file-stream"),
48 1024,
49 5
50 );
51 });
52
53 after(async function() {
54 await close(stream);
55 await remove("test-rolling-file-stream");
56 });
57
58 it("should take a filename, file size (bytes), no. backups, return Writable", function() {
59 stream.should.be.an.instanceOf(streams.Writable);
60 stream.filename.should.eql(
61 path.join(__dirname, "test-rolling-file-stream")
62 );
63 stream.size.should.eql(1024);
64 stream.backups.should.eql(5);
65 });
66
67 it("should apply default settings to the underlying stream", function() {
68 stream.theStream.mode.should.eql(420);
69 stream.theStream.flags.should.eql("a");
70 });
71 });
72
73 describe("with stream arguments", function() {
74 let stream;
75 it("should pass them to the underlying stream", function() {
76 stream = new RollingFileStream(
77 path.join(__dirname, "test-rolling-file-stream"),
78 1024,
79 5,
80 { mode: parseInt("0666", 8) }
81 );
82 stream.theStream.mode.should.eql(parseInt("0666", 8));
83 });
84
85 after(async function() {
86 await close(stream);
87 await remove("test-rolling-file-stream");
88 });
89 });
90
91 describe("without size", function() {
92 let stream;
93 it("should default to max int size", function() {
94 stream = new RollingFileStream(
95 path.join(__dirname, "test-rolling-file-stream")
96 );
97 stream.size.should.eql(Number.MAX_SAFE_INTEGER);
98 });
99
100 after(async function() {
101 await close(stream);
102 await remove("test-rolling-file-stream");
103 });
104 });
105
106 describe("without number of backups", function() {
107 let stream;
108 it("should default to 1 backup", function() {
109 stream = new RollingFileStream(
110 path.join(__dirname, "test-rolling-file-stream"),
111 1024
112 );
113 stream.backups.should.eql(1);
114 });
115
116 after(async function() {
117 await close(stream);
118 await remove("test-rolling-file-stream");
119 });
120 });
121
122 describe("writing less than the file size", function() {
123 before(async function() {
124 await remove("test-rolling-file-stream-write-less");
125 const stream = new RollingFileStream(
126 path.join(__dirname, "test-rolling-file-stream-write-less"),
127 100
128 );
129 await writeInSequence(stream, ["cheese"]);
130 });
131
132 after(async function() {
133 await remove("test-rolling-file-stream-write-less");
134 });
135
136 it("should write to the file", async function() {
137 const contents = await fs.readFile(
138 path.join(__dirname, "test-rolling-file-stream-write-less"),
139 "utf8"
140 );
141 contents.should.eql("cheese\n");
142 });
143
144 it("should write one file", async function() {
145 const files = await fs.readdir(__dirname);
146 files
147 .filter(
148 file => file.indexOf("test-rolling-file-stream-write-less") > -1
149 )
150 .should.have.length(1);
151 });
152 });
153
154 describe("writing more than the file size", function() {
155 before(async function() {
156 await remove("test-rolling-file-stream-write-more");
157 await remove("test-rolling-file-stream-write-more.1");
158 const stream = new RollingFileStream(
159 path.join(__dirname, "test-rolling-file-stream-write-more"),
160 45
161 );
162 await writeInSequence(
163 stream,
164 [0, 1, 2, 3, 4, 5, 6].map(i => i + ".cheese")
165 );
166 });
167
168 after(async function() {
169 await remove("test-rolling-file-stream-write-more");
170 await remove("test-rolling-file-stream-write-more.1");
171 });
172
173 it("should write two files", async function() {
174 const files = await fs.readdir(__dirname);
175 files
176 .filter(
177 file => file.indexOf("test-rolling-file-stream-write-more") > -1
178 )
179 .should.have.length(2);
180 });
181
182 it("should write the last two log messages to the first file", async function() {
183 const contents = await fs.readFile(
184 path.join(__dirname, "test-rolling-file-stream-write-more"),
185 "utf8"
186 );
187 contents.should.eql("5.cheese\n6.cheese\n");
188 });
189
190 it("should write the first five log messages to the second file", async function() {
191 const contents = await fs.readFile(
192 path.join(__dirname, "test-rolling-file-stream-write-more.1"),
193 "utf8"
194 );
195 contents.should.eql("0.cheese\n1.cheese\n2.cheese\n3.cheese\n4.cheese\n");
196 });
197 });
198
199 describe("with options.compress = true", function() {
200 before(async function() {
201 const stream = new RollingFileStream(
202 path.join(__dirname, "compressed-backups.log"),
203 30, //30 bytes max size
204 2, //two backup files to keep
205 { compress: true }
206 );
207 const messages = [
208 "This is the first log message.",
209 "This is the second log message.",
210 "This is the third log message.",
211 "This is the fourth log message."
212 ];
213 await writeInSequence(stream, messages);
214 });
215
216 it("should produce three files, with the backups compressed", async function() {
217 const files = await fs.readdir(__dirname);
218 const testFiles = files
219 .filter(f => f.indexOf("compressed-backups.log") > -1)
220 .sort();
221
222 testFiles.length.should.eql(3);
223 testFiles.should.eql([
224 "compressed-backups.log",
225 "compressed-backups.log.1.gz",
226 "compressed-backups.log.2.gz"
227 ]);
228
229 let contents = await fs.readFile(
230 path.join(__dirname, testFiles[0]),
231 "utf8"
232 );
233 contents.should.eql("This is the fourth log message.\n");
234
235 let gzipped = await fs.readFile(path.join(__dirname, testFiles[1]));
236 contents = await gunzip(gzipped);
237 contents.toString("utf8").should.eql("This is the third log message.\n");
238
239 gzipped = await fs.readFile(path.join(__dirname, testFiles[2]));
240 contents = await gunzip(gzipped);
241 contents.toString("utf8").should.eql("This is the second log message.\n");
242 });
243
244 after(function() {
245 return Promise.all([
246 remove("compressed-backups.log"),
247 remove("compressed-backups.log.1.gz"),
248 remove("compressed-backups.log.2.gz")
249 ]);
250 });
251 });
252
253 describe("with options.keepFileExt = true", function() {
254 before(async function() {
255 const stream = new RollingFileStream(
256 path.join(__dirname, "extKept-backups.log"),
257 30, //30 bytes max size
258 2, //two backup files to keep
259 { keepFileExt: true }
260 );
261 const messages = [
262 "This is the first log message.",
263 "This is the second log message.",
264 "This is the third log message.",
265 "This is the fourth log message."
266 ];
267 await writeInSequence(stream, messages);
268 });
269
270 it("should produce three files, with the file-extension kept", async function() {
271 const files = await fs.readdir(__dirname);
272 const testFiles = files
273 .filter(f => f.indexOf("extKept-backups") > -1)
274 .sort();
275
276 testFiles.length.should.eql(3);
277 testFiles.should.eql([
278 "extKept-backups.1.log",
279 "extKept-backups.2.log",
280 "extKept-backups.log"
281 ]);
282
283 let contents = await fs.readFile(
284 path.join(__dirname, testFiles[0]),
285 "utf8"
286 );
287 contents.should.eql("This is the third log message.\n");
288
289 contents = await fs.readFile(path.join(__dirname, testFiles[1]), "utf8");
290 contents.toString("utf8").should.eql("This is the second log message.\n");
291 contents = await fs.readFile(path.join(__dirname, testFiles[2]), "utf8");
292 contents.toString("utf8").should.eql("This is the fourth log message.\n");
293 });
294
295 after(function() {
296 return Promise.all([
297 remove("extKept-backups.log"),
298 remove("extKept-backups.1.log"),
299 remove("extKept-backups.2.log")
300 ]);
301 });
302 });
303
304 describe("with options.compress = true and keepFileExt = true", function() {
305 before(async function() {
306 const stream = new RollingFileStream(
307 path.join(__dirname, "compressed-backups.log"),
308 30, //30 bytes max size
309 2, //two backup files to keep
310 { compress: true, keepFileExt: true }
311 );
312 const messages = [
313 "This is the first log message.",
314 "This is the second log message.",
315 "This is the third log message.",
316 "This is the fourth log message."
317 ];
318 await writeInSequence(stream, messages);
319 });
320
321 it("should produce three files, with the backups compressed", async function() {
322 const files = await fs.readdir(__dirname);
323 const testFiles = files
324 .filter(f => f.indexOf("compressed-backups") > -1)
325 .sort();
326
327 testFiles.length.should.eql(3);
328 testFiles.should.eql([
329 "compressed-backups.1.log.gz",
330 "compressed-backups.2.log.gz",
331 "compressed-backups.log"
332 ]);
333
334 let contents = await fs.readFile(
335 path.join(__dirname, testFiles[2]),
336 "utf8"
337 );
338 contents.should.eql("This is the fourth log message.\n");
339
340 let gzipped = await fs.readFile(path.join(__dirname, testFiles[1]));
341 contents = await gunzip(gzipped);
342 contents.toString("utf8").should.eql("This is the second log message.\n");
343 gzipped = await fs.readFile(path.join(__dirname, testFiles[0]));
344 contents = await gunzip(gzipped);
345 contents.toString("utf8").should.eql("This is the third log message.\n");
346 });
347
348 after(function() {
349 return Promise.all([
350 remove("compressed-backups.log"),
351 remove("compressed-backups.1.log.gz"),
352 remove("compressed-backups.2.log.gz")
353 ]);
354 });
355 });
356
357 describe("when many files already exist", function() {
358 before(async function() {
359 await Promise.all([
360 remove("test-rolling-stream-with-existing-files.11"),
361 remove("test-rolling-stream-with-existing-files.20"),
362 remove("test-rolling-stream-with-existing-files.-1"),
363 remove("test-rolling-stream-with-existing-files.1.1"),
364 remove("test-rolling-stream-with-existing-files.1")
365 ]);
366 await Promise.all([
367 create("test-rolling-stream-with-existing-files.11"),
368 create("test-rolling-stream-with-existing-files.20"),
369 create("test-rolling-stream-with-existing-files.-1"),
370 create("test-rolling-stream-with-existing-files.1.1"),
371 create("test-rolling-stream-with-existing-files.1")
372 ]);
373
374 const stream = new RollingFileStream(
375 path.join(__dirname, "test-rolling-stream-with-existing-files"),
376 18,
377 5
378 );
379
380 await writeInSequence(
381 stream,
382 [0, 1, 2, 3, 4, 5, 6].map(i => i + ".cheese")
383 );
384 });
385
386 after(function() {
387 return Promise.all(
388 [
389 "test-rolling-stream-with-existing-files.-1",
390 "test-rolling-stream-with-existing-files",
391 "test-rolling-stream-with-existing-files.1.1",
392 "test-rolling-stream-with-existing-files.0",
393 "test-rolling-stream-with-existing-files.1",
394 "test-rolling-stream-with-existing-files.2",
395 "test-rolling-stream-with-existing-files.3",
396 "test-rolling-stream-with-existing-files.4",
397 "test-rolling-stream-with-existing-files.5",
398 "test-rolling-stream-with-existing-files.6",
399 "test-rolling-stream-with-existing-files.11",
400 "test-rolling-stream-with-existing-files.20"
401 ].map(remove)
402 );
403 });
404
405 it("should roll the files, removing the highest indices", async function() {
406 const files = await fs.readdir(__dirname);
407 files.should.containEql("test-rolling-stream-with-existing-files");
408 files.should.containEql("test-rolling-stream-with-existing-files.1");
409 files.should.containEql("test-rolling-stream-with-existing-files.2");
410 files.should.containEql("test-rolling-stream-with-existing-files.3");
411 files.should.containEql("test-rolling-stream-with-existing-files.4");
412 });
413 });
414
415 // in windows, you can't delete a directory if there is an open file handle
416 if (process.platform !== "win32") {
417
418 describe("when the directory gets deleted", function() {
419 var stream;
420 before(function(done) {
421 stream = new RollingFileStream(
422 path.join("subdir", "test-rolling-file-stream"),
423 5,
424 5
425 );
426 stream.write("initial", "utf8", done);
427 });
428
429 after(async () => {
430 await fs.unlink(path.join("subdir", "test-rolling-file-stream"));
431 await fs.rmdir("subdir");
432 });
433
434 it("handles directory deletion gracefully", async function() {
435 stream.theStream.on("error", e => {
436 throw e;
437 });
438
439 await fs.unlink(path.join("subdir", "test-rolling-file-stream"));
440 await fs.rmdir("subdir");
441 await new Promise(resolve => stream.write("rollover", "utf8", resolve));
442 await close(stream);
443 (await fs.readFile(
444 path.join("subdir", "test-rolling-file-stream"),
445 "utf8"
446 )).should.eql("rollover");
447 });
448 });
449}
450
451});
Note: See TracBrowser for help on using the repository browser.