[6a3a178] | 1 | 'use strict'
|
---|
| 2 |
|
---|
| 3 | // A readable tar stream creator
|
---|
| 4 | // Technically, this is a transform stream that you write paths into,
|
---|
| 5 | // and tar format comes out of.
|
---|
| 6 | // The `add()` method is like `write()` but returns this,
|
---|
| 7 | // and end() return `this` as well, so you can
|
---|
| 8 | // do `new Pack(opt).add('files').add('dir').end().pipe(output)
|
---|
| 9 | // You could also do something like:
|
---|
| 10 | // streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
|
---|
| 11 |
|
---|
| 12 | class PackJob {
|
---|
| 13 | constructor (path, absolute) {
|
---|
| 14 | this.path = path || './'
|
---|
| 15 | this.absolute = absolute
|
---|
| 16 | this.entry = null
|
---|
| 17 | this.stat = null
|
---|
| 18 | this.readdir = null
|
---|
| 19 | this.pending = false
|
---|
| 20 | this.ignore = false
|
---|
| 21 | this.piped = false
|
---|
| 22 | }
|
---|
| 23 | }
|
---|
| 24 |
|
---|
| 25 | const MiniPass = require('minipass')
|
---|
| 26 | const zlib = require('minizlib')
|
---|
| 27 | const ReadEntry = require('./read-entry.js')
|
---|
| 28 | const WriteEntry = require('./write-entry.js')
|
---|
| 29 | const WriteEntrySync = WriteEntry.Sync
|
---|
| 30 | const WriteEntryTar = WriteEntry.Tar
|
---|
| 31 | const Yallist = require('yallist')
|
---|
| 32 | const EOF = Buffer.alloc(1024)
|
---|
| 33 | const ONSTAT = Symbol('onStat')
|
---|
| 34 | const ENDED = Symbol('ended')
|
---|
| 35 | const QUEUE = Symbol('queue')
|
---|
| 36 | const CURRENT = Symbol('current')
|
---|
| 37 | const PROCESS = Symbol('process')
|
---|
| 38 | const PROCESSING = Symbol('processing')
|
---|
| 39 | const PROCESSJOB = Symbol('processJob')
|
---|
| 40 | const JOBS = Symbol('jobs')
|
---|
| 41 | const JOBDONE = Symbol('jobDone')
|
---|
| 42 | const ADDFSENTRY = Symbol('addFSEntry')
|
---|
| 43 | const ADDTARENTRY = Symbol('addTarEntry')
|
---|
| 44 | const STAT = Symbol('stat')
|
---|
| 45 | const READDIR = Symbol('readdir')
|
---|
| 46 | const ONREADDIR = Symbol('onreaddir')
|
---|
| 47 | const PIPE = Symbol('pipe')
|
---|
| 48 | const ENTRY = Symbol('entry')
|
---|
| 49 | const ENTRYOPT = Symbol('entryOpt')
|
---|
| 50 | const WRITEENTRYCLASS = Symbol('writeEntryClass')
|
---|
| 51 | const WRITE = Symbol('write')
|
---|
| 52 | const ONDRAIN = Symbol('ondrain')
|
---|
| 53 |
|
---|
| 54 | const fs = require('fs')
|
---|
| 55 | const path = require('path')
|
---|
| 56 | const warner = require('./warn-mixin.js')
|
---|
| 57 | const normPath = require('./normalize-windows-path.js')
|
---|
| 58 |
|
---|
| 59 | const Pack = warner(class Pack extends MiniPass {
|
---|
| 60 | constructor (opt) {
|
---|
| 61 | super(opt)
|
---|
| 62 | opt = opt || Object.create(null)
|
---|
| 63 | this.opt = opt
|
---|
| 64 | this.file = opt.file || ''
|
---|
| 65 | this.cwd = opt.cwd || process.cwd()
|
---|
| 66 | this.maxReadSize = opt.maxReadSize
|
---|
| 67 | this.preservePaths = !!opt.preservePaths
|
---|
| 68 | this.strict = !!opt.strict
|
---|
| 69 | this.noPax = !!opt.noPax
|
---|
| 70 | this.prefix = normPath(opt.prefix || '')
|
---|
| 71 | this.linkCache = opt.linkCache || new Map()
|
---|
| 72 | this.statCache = opt.statCache || new Map()
|
---|
| 73 | this.readdirCache = opt.readdirCache || new Map()
|
---|
| 74 |
|
---|
| 75 | this[WRITEENTRYCLASS] = WriteEntry
|
---|
| 76 | if (typeof opt.onwarn === 'function')
|
---|
| 77 | this.on('warn', opt.onwarn)
|
---|
| 78 |
|
---|
| 79 | this.portable = !!opt.portable
|
---|
| 80 | this.zip = null
|
---|
| 81 | if (opt.gzip) {
|
---|
| 82 | if (typeof opt.gzip !== 'object')
|
---|
| 83 | opt.gzip = {}
|
---|
| 84 | if (this.portable)
|
---|
| 85 | opt.gzip.portable = true
|
---|
| 86 | this.zip = new zlib.Gzip(opt.gzip)
|
---|
| 87 | this.zip.on('data', chunk => super.write(chunk))
|
---|
| 88 | this.zip.on('end', _ => super.end())
|
---|
| 89 | this.zip.on('drain', _ => this[ONDRAIN]())
|
---|
| 90 | this.on('resume', _ => this.zip.resume())
|
---|
| 91 | } else
|
---|
| 92 | this.on('drain', this[ONDRAIN])
|
---|
| 93 |
|
---|
| 94 | this.noDirRecurse = !!opt.noDirRecurse
|
---|
| 95 | this.follow = !!opt.follow
|
---|
| 96 | this.noMtime = !!opt.noMtime
|
---|
| 97 | this.mtime = opt.mtime || null
|
---|
| 98 |
|
---|
| 99 | this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
|
---|
| 100 |
|
---|
| 101 | this[QUEUE] = new Yallist()
|
---|
| 102 | this[JOBS] = 0
|
---|
| 103 | this.jobs = +opt.jobs || 4
|
---|
| 104 | this[PROCESSING] = false
|
---|
| 105 | this[ENDED] = false
|
---|
| 106 | }
|
---|
| 107 |
|
---|
| 108 | [WRITE] (chunk) {
|
---|
| 109 | return super.write(chunk)
|
---|
| 110 | }
|
---|
| 111 |
|
---|
| 112 | add (path) {
|
---|
| 113 | this.write(path)
|
---|
| 114 | return this
|
---|
| 115 | }
|
---|
| 116 |
|
---|
| 117 | end (path) {
|
---|
| 118 | if (path)
|
---|
| 119 | this.write(path)
|
---|
| 120 | this[ENDED] = true
|
---|
| 121 | this[PROCESS]()
|
---|
| 122 | return this
|
---|
| 123 | }
|
---|
| 124 |
|
---|
| 125 | write (path) {
|
---|
| 126 | if (this[ENDED])
|
---|
| 127 | throw new Error('write after end')
|
---|
| 128 |
|
---|
| 129 | if (path instanceof ReadEntry)
|
---|
| 130 | this[ADDTARENTRY](path)
|
---|
| 131 | else
|
---|
| 132 | this[ADDFSENTRY](path)
|
---|
| 133 | return this.flowing
|
---|
| 134 | }
|
---|
| 135 |
|
---|
| 136 | [ADDTARENTRY] (p) {
|
---|
| 137 | const absolute = normPath(path.resolve(this.cwd, p.path))
|
---|
| 138 | // in this case, we don't have to wait for the stat
|
---|
| 139 | if (!this.filter(p.path, p))
|
---|
| 140 | p.resume()
|
---|
| 141 | else {
|
---|
| 142 | const job = new PackJob(p.path, absolute, false)
|
---|
| 143 | job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
|
---|
| 144 | job.entry.on('end', _ => this[JOBDONE](job))
|
---|
| 145 | this[JOBS] += 1
|
---|
| 146 | this[QUEUE].push(job)
|
---|
| 147 | }
|
---|
| 148 |
|
---|
| 149 | this[PROCESS]()
|
---|
| 150 | }
|
---|
| 151 |
|
---|
| 152 | [ADDFSENTRY] (p) {
|
---|
| 153 | const absolute = normPath(path.resolve(this.cwd, p))
|
---|
| 154 | this[QUEUE].push(new PackJob(p, absolute))
|
---|
| 155 | this[PROCESS]()
|
---|
| 156 | }
|
---|
| 157 |
|
---|
| 158 | [STAT] (job) {
|
---|
| 159 | job.pending = true
|
---|
| 160 | this[JOBS] += 1
|
---|
| 161 | const stat = this.follow ? 'stat' : 'lstat'
|
---|
| 162 | fs[stat](job.absolute, (er, stat) => {
|
---|
| 163 | job.pending = false
|
---|
| 164 | this[JOBS] -= 1
|
---|
| 165 | if (er)
|
---|
| 166 | this.emit('error', er)
|
---|
| 167 | else
|
---|
| 168 | this[ONSTAT](job, stat)
|
---|
| 169 | })
|
---|
| 170 | }
|
---|
| 171 |
|
---|
| 172 | [ONSTAT] (job, stat) {
|
---|
| 173 | this.statCache.set(job.absolute, stat)
|
---|
| 174 | job.stat = stat
|
---|
| 175 |
|
---|
| 176 | // now we have the stat, we can filter it.
|
---|
| 177 | if (!this.filter(job.path, stat))
|
---|
| 178 | job.ignore = true
|
---|
| 179 |
|
---|
| 180 | this[PROCESS]()
|
---|
| 181 | }
|
---|
| 182 |
|
---|
| 183 | [READDIR] (job) {
|
---|
| 184 | job.pending = true
|
---|
| 185 | this[JOBS] += 1
|
---|
| 186 | fs.readdir(job.absolute, (er, entries) => {
|
---|
| 187 | job.pending = false
|
---|
| 188 | this[JOBS] -= 1
|
---|
| 189 | if (er)
|
---|
| 190 | return this.emit('error', er)
|
---|
| 191 | this[ONREADDIR](job, entries)
|
---|
| 192 | })
|
---|
| 193 | }
|
---|
| 194 |
|
---|
| 195 | [ONREADDIR] (job, entries) {
|
---|
| 196 | this.readdirCache.set(job.absolute, entries)
|
---|
| 197 | job.readdir = entries
|
---|
| 198 | this[PROCESS]()
|
---|
| 199 | }
|
---|
| 200 |
|
---|
| 201 | [PROCESS] () {
|
---|
| 202 | if (this[PROCESSING])
|
---|
| 203 | return
|
---|
| 204 |
|
---|
| 205 | this[PROCESSING] = true
|
---|
| 206 | for (let w = this[QUEUE].head;
|
---|
| 207 | w !== null && this[JOBS] < this.jobs;
|
---|
| 208 | w = w.next) {
|
---|
| 209 | this[PROCESSJOB](w.value)
|
---|
| 210 | if (w.value.ignore) {
|
---|
| 211 | const p = w.next
|
---|
| 212 | this[QUEUE].removeNode(w)
|
---|
| 213 | w.next = p
|
---|
| 214 | }
|
---|
| 215 | }
|
---|
| 216 |
|
---|
| 217 | this[PROCESSING] = false
|
---|
| 218 |
|
---|
| 219 | if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
|
---|
| 220 | if (this.zip)
|
---|
| 221 | this.zip.end(EOF)
|
---|
| 222 | else {
|
---|
| 223 | super.write(EOF)
|
---|
| 224 | super.end()
|
---|
| 225 | }
|
---|
| 226 | }
|
---|
| 227 | }
|
---|
| 228 |
|
---|
| 229 | get [CURRENT] () {
|
---|
| 230 | return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
|
---|
| 231 | }
|
---|
| 232 |
|
---|
| 233 | [JOBDONE] (job) {
|
---|
| 234 | this[QUEUE].shift()
|
---|
| 235 | this[JOBS] -= 1
|
---|
| 236 | this[PROCESS]()
|
---|
| 237 | }
|
---|
| 238 |
|
---|
| 239 | [PROCESSJOB] (job) {
|
---|
| 240 | if (job.pending)
|
---|
| 241 | return
|
---|
| 242 |
|
---|
| 243 | if (job.entry) {
|
---|
| 244 | if (job === this[CURRENT] && !job.piped)
|
---|
| 245 | this[PIPE](job)
|
---|
| 246 | return
|
---|
| 247 | }
|
---|
| 248 |
|
---|
| 249 | if (!job.stat) {
|
---|
| 250 | if (this.statCache.has(job.absolute))
|
---|
| 251 | this[ONSTAT](job, this.statCache.get(job.absolute))
|
---|
| 252 | else
|
---|
| 253 | this[STAT](job)
|
---|
| 254 | }
|
---|
| 255 | if (!job.stat)
|
---|
| 256 | return
|
---|
| 257 |
|
---|
| 258 | // filtered out!
|
---|
| 259 | if (job.ignore)
|
---|
| 260 | return
|
---|
| 261 |
|
---|
| 262 | if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
|
---|
| 263 | if (this.readdirCache.has(job.absolute))
|
---|
| 264 | this[ONREADDIR](job, this.readdirCache.get(job.absolute))
|
---|
| 265 | else
|
---|
| 266 | this[READDIR](job)
|
---|
| 267 | if (!job.readdir)
|
---|
| 268 | return
|
---|
| 269 | }
|
---|
| 270 |
|
---|
| 271 | // we know it doesn't have an entry, because that got checked above
|
---|
| 272 | job.entry = this[ENTRY](job)
|
---|
| 273 | if (!job.entry) {
|
---|
| 274 | job.ignore = true
|
---|
| 275 | return
|
---|
| 276 | }
|
---|
| 277 |
|
---|
| 278 | if (job === this[CURRENT] && !job.piped)
|
---|
| 279 | this[PIPE](job)
|
---|
| 280 | }
|
---|
| 281 |
|
---|
| 282 | [ENTRYOPT] (job) {
|
---|
| 283 | return {
|
---|
| 284 | onwarn: (code, msg, data) => this.warn(code, msg, data),
|
---|
| 285 | noPax: this.noPax,
|
---|
| 286 | cwd: this.cwd,
|
---|
| 287 | absolute: job.absolute,
|
---|
| 288 | preservePaths: this.preservePaths,
|
---|
| 289 | maxReadSize: this.maxReadSize,
|
---|
| 290 | strict: this.strict,
|
---|
| 291 | portable: this.portable,
|
---|
| 292 | linkCache: this.linkCache,
|
---|
| 293 | statCache: this.statCache,
|
---|
| 294 | noMtime: this.noMtime,
|
---|
| 295 | mtime: this.mtime,
|
---|
| 296 | prefix: this.prefix,
|
---|
| 297 | }
|
---|
| 298 | }
|
---|
| 299 |
|
---|
| 300 | [ENTRY] (job) {
|
---|
| 301 | this[JOBS] += 1
|
---|
| 302 | try {
|
---|
| 303 | return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
|
---|
| 304 | .on('end', () => this[JOBDONE](job))
|
---|
| 305 | .on('error', er => this.emit('error', er))
|
---|
| 306 | } catch (er) {
|
---|
| 307 | this.emit('error', er)
|
---|
| 308 | }
|
---|
| 309 | }
|
---|
| 310 |
|
---|
| 311 | [ONDRAIN] () {
|
---|
| 312 | if (this[CURRENT] && this[CURRENT].entry)
|
---|
| 313 | this[CURRENT].entry.resume()
|
---|
| 314 | }
|
---|
| 315 |
|
---|
| 316 | // like .pipe() but using super, because our write() is special
|
---|
| 317 | [PIPE] (job) {
|
---|
| 318 | job.piped = true
|
---|
| 319 |
|
---|
| 320 | if (job.readdir) {
|
---|
| 321 | job.readdir.forEach(entry => {
|
---|
| 322 | const p = job.path
|
---|
| 323 | const base = p === './' ? '' : p.replace(/\/*$/, '/')
|
---|
| 324 | this[ADDFSENTRY](base + entry)
|
---|
| 325 | })
|
---|
| 326 | }
|
---|
| 327 |
|
---|
| 328 | const source = job.entry
|
---|
| 329 | const zip = this.zip
|
---|
| 330 |
|
---|
| 331 | if (zip) {
|
---|
| 332 | source.on('data', chunk => {
|
---|
| 333 | if (!zip.write(chunk))
|
---|
| 334 | source.pause()
|
---|
| 335 | })
|
---|
| 336 | } else {
|
---|
| 337 | source.on('data', chunk => {
|
---|
| 338 | if (!super.write(chunk))
|
---|
| 339 | source.pause()
|
---|
| 340 | })
|
---|
| 341 | }
|
---|
| 342 | }
|
---|
| 343 |
|
---|
| 344 | pause () {
|
---|
| 345 | if (this.zip)
|
---|
| 346 | this.zip.pause()
|
---|
| 347 | return super.pause()
|
---|
| 348 | }
|
---|
| 349 | })
|
---|
| 350 |
|
---|
| 351 | class PackSync extends Pack {
|
---|
| 352 | constructor (opt) {
|
---|
| 353 | super(opt)
|
---|
| 354 | this[WRITEENTRYCLASS] = WriteEntrySync
|
---|
| 355 | }
|
---|
| 356 |
|
---|
| 357 | // pause/resume are no-ops in sync streams.
|
---|
| 358 | pause () {}
|
---|
| 359 | resume () {}
|
---|
| 360 |
|
---|
| 361 | [STAT] (job) {
|
---|
| 362 | const stat = this.follow ? 'statSync' : 'lstatSync'
|
---|
| 363 | this[ONSTAT](job, fs[stat](job.absolute))
|
---|
| 364 | }
|
---|
| 365 |
|
---|
| 366 | [READDIR] (job, stat) {
|
---|
| 367 | this[ONREADDIR](job, fs.readdirSync(job.absolute))
|
---|
| 368 | }
|
---|
| 369 |
|
---|
| 370 | // gotta get it all in this tick
|
---|
| 371 | [PIPE] (job) {
|
---|
| 372 | const source = job.entry
|
---|
| 373 | const zip = this.zip
|
---|
| 374 |
|
---|
| 375 | if (job.readdir) {
|
---|
| 376 | job.readdir.forEach(entry => {
|
---|
| 377 | const p = job.path
|
---|
| 378 | const base = p === './' ? '' : p.replace(/\/*$/, '/')
|
---|
| 379 | this[ADDFSENTRY](base + entry)
|
---|
| 380 | })
|
---|
| 381 | }
|
---|
| 382 |
|
---|
| 383 | if (zip) {
|
---|
| 384 | source.on('data', chunk => {
|
---|
| 385 | zip.write(chunk)
|
---|
| 386 | })
|
---|
| 387 | } else {
|
---|
| 388 | source.on('data', chunk => {
|
---|
| 389 | super[WRITE](chunk)
|
---|
| 390 | })
|
---|
| 391 | }
|
---|
| 392 | }
|
---|
| 393 | }
|
---|
| 394 |
|
---|
| 395 | Pack.Sync = PackSync
|
---|
| 396 |
|
---|
| 397 | module.exports = Pack
|
---|