[6a3a178] | 1 | 'use strict'
|
---|
| 2 |
|
---|
| 3 | // tar -r
|
---|
| 4 | const hlo = require('./high-level-opt.js')
|
---|
| 5 | const Pack = require('./pack.js')
|
---|
| 6 | const fs = require('fs')
|
---|
| 7 | const fsm = require('fs-minipass')
|
---|
| 8 | const t = require('./list.js')
|
---|
| 9 | const path = require('path')
|
---|
| 10 |
|
---|
| 11 | // starting at the head of the file, read a Header
|
---|
| 12 | // If the checksum is invalid, that's our position to start writing
|
---|
| 13 | // If it is, jump forward by the specified size (round up to 512)
|
---|
| 14 | // and try again.
|
---|
| 15 | // Write the new Pack stream starting there.
|
---|
| 16 |
|
---|
| 17 | const Header = require('./header.js')
|
---|
| 18 |
|
---|
| 19 | module.exports = (opt_, files, cb) => {
|
---|
| 20 | const opt = hlo(opt_)
|
---|
| 21 |
|
---|
| 22 | if (!opt.file)
|
---|
| 23 | throw new TypeError('file is required')
|
---|
| 24 |
|
---|
| 25 | if (opt.gzip)
|
---|
| 26 | throw new TypeError('cannot append to compressed archives')
|
---|
| 27 |
|
---|
| 28 | if (!files || !Array.isArray(files) || !files.length)
|
---|
| 29 | throw new TypeError('no files or directories specified')
|
---|
| 30 |
|
---|
| 31 | files = Array.from(files)
|
---|
| 32 |
|
---|
| 33 | return opt.sync ? replaceSync(opt, files)
|
---|
| 34 | : replace(opt, files, cb)
|
---|
| 35 | }
|
---|
| 36 |
|
---|
| 37 | const replaceSync = (opt, files) => {
|
---|
| 38 | const p = new Pack.Sync(opt)
|
---|
| 39 |
|
---|
| 40 | let threw = true
|
---|
| 41 | let fd
|
---|
| 42 | let position
|
---|
| 43 |
|
---|
| 44 | try {
|
---|
| 45 | try {
|
---|
| 46 | fd = fs.openSync(opt.file, 'r+')
|
---|
| 47 | } catch (er) {
|
---|
| 48 | if (er.code === 'ENOENT')
|
---|
| 49 | fd = fs.openSync(opt.file, 'w+')
|
---|
| 50 | else
|
---|
| 51 | throw er
|
---|
| 52 | }
|
---|
| 53 |
|
---|
| 54 | const st = fs.fstatSync(fd)
|
---|
| 55 | const headBuf = Buffer.alloc(512)
|
---|
| 56 |
|
---|
| 57 | POSITION: for (position = 0; position < st.size; position += 512) {
|
---|
| 58 | for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
|
---|
| 59 | bytes = fs.readSync(
|
---|
| 60 | fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
|
---|
| 61 | )
|
---|
| 62 |
|
---|
| 63 | if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
|
---|
| 64 | throw new Error('cannot append to compressed archives')
|
---|
| 65 |
|
---|
| 66 | if (!bytes)
|
---|
| 67 | break POSITION
|
---|
| 68 | }
|
---|
| 69 |
|
---|
| 70 | const h = new Header(headBuf)
|
---|
| 71 | if (!h.cksumValid)
|
---|
| 72 | break
|
---|
| 73 | const entryBlockSize = 512 * Math.ceil(h.size / 512)
|
---|
| 74 | if (position + entryBlockSize + 512 > st.size)
|
---|
| 75 | break
|
---|
| 76 | // the 512 for the header we just parsed will be added as well
|
---|
| 77 | // also jump ahead all the blocks for the body
|
---|
| 78 | position += entryBlockSize
|
---|
| 79 | if (opt.mtimeCache)
|
---|
| 80 | opt.mtimeCache.set(h.path, h.mtime)
|
---|
| 81 | }
|
---|
| 82 | threw = false
|
---|
| 83 |
|
---|
| 84 | streamSync(opt, p, position, fd, files)
|
---|
| 85 | } finally {
|
---|
| 86 | if (threw) {
|
---|
| 87 | try {
|
---|
| 88 | fs.closeSync(fd)
|
---|
| 89 | } catch (er) {}
|
---|
| 90 | }
|
---|
| 91 | }
|
---|
| 92 | }
|
---|
| 93 |
|
---|
| 94 | const streamSync = (opt, p, position, fd, files) => {
|
---|
| 95 | const stream = new fsm.WriteStreamSync(opt.file, {
|
---|
| 96 | fd: fd,
|
---|
| 97 | start: position,
|
---|
| 98 | })
|
---|
| 99 | p.pipe(stream)
|
---|
| 100 | addFilesSync(p, files)
|
---|
| 101 | }
|
---|
| 102 |
|
---|
| 103 | const replace = (opt, files, cb) => {
|
---|
| 104 | files = Array.from(files)
|
---|
| 105 | const p = new Pack(opt)
|
---|
| 106 |
|
---|
| 107 | const getPos = (fd, size, cb_) => {
|
---|
| 108 | const cb = (er, pos) => {
|
---|
| 109 | if (er)
|
---|
| 110 | fs.close(fd, _ => cb_(er))
|
---|
| 111 | else
|
---|
| 112 | cb_(null, pos)
|
---|
| 113 | }
|
---|
| 114 |
|
---|
| 115 | let position = 0
|
---|
| 116 | if (size === 0)
|
---|
| 117 | return cb(null, 0)
|
---|
| 118 |
|
---|
| 119 | let bufPos = 0
|
---|
| 120 | const headBuf = Buffer.alloc(512)
|
---|
| 121 | const onread = (er, bytes) => {
|
---|
| 122 | if (er)
|
---|
| 123 | return cb(er)
|
---|
| 124 | bufPos += bytes
|
---|
| 125 | if (bufPos < 512 && bytes) {
|
---|
| 126 | return fs.read(
|
---|
| 127 | fd, headBuf, bufPos, headBuf.length - bufPos,
|
---|
| 128 | position + bufPos, onread
|
---|
| 129 | )
|
---|
| 130 | }
|
---|
| 131 |
|
---|
| 132 | if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
|
---|
| 133 | return cb(new Error('cannot append to compressed archives'))
|
---|
| 134 |
|
---|
| 135 | // truncated header
|
---|
| 136 | if (bufPos < 512)
|
---|
| 137 | return cb(null, position)
|
---|
| 138 |
|
---|
| 139 | const h = new Header(headBuf)
|
---|
| 140 | if (!h.cksumValid)
|
---|
| 141 | return cb(null, position)
|
---|
| 142 |
|
---|
| 143 | const entryBlockSize = 512 * Math.ceil(h.size / 512)
|
---|
| 144 | if (position + entryBlockSize + 512 > size)
|
---|
| 145 | return cb(null, position)
|
---|
| 146 |
|
---|
| 147 | position += entryBlockSize + 512
|
---|
| 148 | if (position >= size)
|
---|
| 149 | return cb(null, position)
|
---|
| 150 |
|
---|
| 151 | if (opt.mtimeCache)
|
---|
| 152 | opt.mtimeCache.set(h.path, h.mtime)
|
---|
| 153 | bufPos = 0
|
---|
| 154 | fs.read(fd, headBuf, 0, 512, position, onread)
|
---|
| 155 | }
|
---|
| 156 | fs.read(fd, headBuf, 0, 512, position, onread)
|
---|
| 157 | }
|
---|
| 158 |
|
---|
| 159 | const promise = new Promise((resolve, reject) => {
|
---|
| 160 | p.on('error', reject)
|
---|
| 161 | let flag = 'r+'
|
---|
| 162 | const onopen = (er, fd) => {
|
---|
| 163 | if (er && er.code === 'ENOENT' && flag === 'r+') {
|
---|
| 164 | flag = 'w+'
|
---|
| 165 | return fs.open(opt.file, flag, onopen)
|
---|
| 166 | }
|
---|
| 167 |
|
---|
| 168 | if (er)
|
---|
| 169 | return reject(er)
|
---|
| 170 |
|
---|
| 171 | fs.fstat(fd, (er, st) => {
|
---|
| 172 | if (er)
|
---|
| 173 | return fs.close(fd, () => reject(er))
|
---|
| 174 |
|
---|
| 175 | getPos(fd, st.size, (er, position) => {
|
---|
| 176 | if (er)
|
---|
| 177 | return reject(er)
|
---|
| 178 | const stream = new fsm.WriteStream(opt.file, {
|
---|
| 179 | fd: fd,
|
---|
| 180 | start: position,
|
---|
| 181 | })
|
---|
| 182 | p.pipe(stream)
|
---|
| 183 | stream.on('error', reject)
|
---|
| 184 | stream.on('close', resolve)
|
---|
| 185 | addFilesAsync(p, files)
|
---|
| 186 | })
|
---|
| 187 | })
|
---|
| 188 | }
|
---|
| 189 | fs.open(opt.file, flag, onopen)
|
---|
| 190 | })
|
---|
| 191 |
|
---|
| 192 | return cb ? promise.then(cb, cb) : promise
|
---|
| 193 | }
|
---|
| 194 |
|
---|
| 195 | const addFilesSync = (p, files) => {
|
---|
| 196 | files.forEach(file => {
|
---|
| 197 | if (file.charAt(0) === '@') {
|
---|
| 198 | t({
|
---|
| 199 | file: path.resolve(p.cwd, file.substr(1)),
|
---|
| 200 | sync: true,
|
---|
| 201 | noResume: true,
|
---|
| 202 | onentry: entry => p.add(entry),
|
---|
| 203 | })
|
---|
| 204 | } else
|
---|
| 205 | p.add(file)
|
---|
| 206 | })
|
---|
| 207 | p.end()
|
---|
| 208 | }
|
---|
| 209 |
|
---|
| 210 | const addFilesAsync = (p, files) => {
|
---|
| 211 | while (files.length) {
|
---|
| 212 | const file = files.shift()
|
---|
| 213 | if (file.charAt(0) === '@') {
|
---|
| 214 | return t({
|
---|
| 215 | file: path.resolve(p.cwd, file.substr(1)),
|
---|
| 216 | noResume: true,
|
---|
| 217 | onentry: entry => p.add(entry),
|
---|
| 218 | }).then(_ => addFilesAsync(p, files))
|
---|
| 219 | } else
|
---|
| 220 | p.add(file)
|
---|
| 221 | }
|
---|
| 222 | p.end()
|
---|
| 223 | }
|
---|