pack.js 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394
  1. 'use strict'
  2. const Buffer = require('./buffer.js')
  3. // A readable tar stream creator
  4. // Technically, this is a transform stream that you write paths into,
  5. // and tar format comes out of.
  6. // The `add()` method is like `write()` but returns this,
  7. // and end() return `this` as well, so you can
  8. // do `new Pack(opt).add('files').add('dir').end().pipe(output)
  9. // You could also do something like:
  10. // streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
  11. class PackJob {
  12. constructor (path, absolute) {
  13. this.path = path || './'
  14. this.absolute = absolute
  15. this.entry = null
  16. this.stat = null
  17. this.readdir = null
  18. this.pending = false
  19. this.ignore = false
  20. this.piped = false
  21. }
  22. }
  23. const MiniPass = require('minipass')
  24. const zlib = require('minizlib')
  25. const ReadEntry = require('./read-entry.js')
  26. const WriteEntry = require('./write-entry.js')
  27. const WriteEntrySync = WriteEntry.Sync
  28. const WriteEntryTar = WriteEntry.Tar
  29. const Yallist = require('yallist')
  30. const EOF = Buffer.alloc(1024)
  31. const ONSTAT = Symbol('onStat')
  32. const ENDED = Symbol('ended')
  33. const QUEUE = Symbol('queue')
  34. const CURRENT = Symbol('current')
  35. const PROCESS = Symbol('process')
  36. const PROCESSING = Symbol('processing')
  37. const PROCESSJOB = Symbol('processJob')
  38. const JOBS = Symbol('jobs')
  39. const JOBDONE = Symbol('jobDone')
  40. const ADDFSENTRY = Symbol('addFSEntry')
  41. const ADDTARENTRY = Symbol('addTarEntry')
  42. const STAT = Symbol('stat')
  43. const READDIR = Symbol('readdir')
  44. const ONREADDIR = Symbol('onreaddir')
  45. const PIPE = Symbol('pipe')
  46. const ENTRY = Symbol('entry')
  47. const ENTRYOPT = Symbol('entryOpt')
  48. const WRITEENTRYCLASS = Symbol('writeEntryClass')
  49. const WRITE = Symbol('write')
  50. const ONDRAIN = Symbol('ondrain')
  51. const fs = require('fs')
  52. const path = require('path')
  53. const warner = require('./warn-mixin.js')
  54. const normPath = require('./normalize-windows-path.js')
  55. const Pack = warner(class Pack extends MiniPass {
  56. constructor (opt) {
  57. super(opt)
  58. opt = opt || Object.create(null)
  59. this.opt = opt
  60. this.cwd = opt.cwd || process.cwd()
  61. this.maxReadSize = opt.maxReadSize
  62. this.preservePaths = !!opt.preservePaths
  63. this.strict = !!opt.strict
  64. this.noPax = !!opt.noPax
  65. this.prefix = normPath(opt.prefix || '')
  66. this.linkCache = opt.linkCache || new Map()
  67. this.statCache = opt.statCache || new Map()
  68. this.readdirCache = opt.readdirCache || new Map()
  69. this[WRITEENTRYCLASS] = WriteEntry
  70. if (typeof opt.onwarn === 'function')
  71. this.on('warn', opt.onwarn)
  72. this.zip = null
  73. if (opt.gzip) {
  74. if (typeof opt.gzip !== 'object')
  75. opt.gzip = {}
  76. this.zip = new zlib.Gzip(opt.gzip)
  77. this.zip.on('data', chunk => super.write(chunk))
  78. this.zip.on('end', _ => super.end())
  79. this.zip.on('drain', _ => this[ONDRAIN]())
  80. this.on('resume', _ => this.zip.resume())
  81. } else
  82. this.on('drain', this[ONDRAIN])
  83. this.portable = !!opt.portable
  84. this.noDirRecurse = !!opt.noDirRecurse
  85. this.follow = !!opt.follow
  86. this.noMtime = !!opt.noMtime
  87. this.mtime = opt.mtime || null
  88. this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
  89. this[QUEUE] = new Yallist
  90. this[JOBS] = 0
  91. this.jobs = +opt.jobs || 4
  92. this[PROCESSING] = false
  93. this[ENDED] = false
  94. }
  95. [WRITE] (chunk) {
  96. return super.write(chunk)
  97. }
  98. add (path) {
  99. this.write(path)
  100. return this
  101. }
  102. end (path) {
  103. if (path)
  104. this.write(path)
  105. this[ENDED] = true
  106. this[PROCESS]()
  107. return this
  108. }
  109. write (path) {
  110. if (this[ENDED])
  111. throw new Error('write after end')
  112. if (path instanceof ReadEntry)
  113. this[ADDTARENTRY](path)
  114. else
  115. this[ADDFSENTRY](path)
  116. return this.flowing
  117. }
  118. [ADDTARENTRY] (p) {
  119. const absolute = normPath(path.resolve(this.cwd, p.path))
  120. // in this case, we don't have to wait for the stat
  121. if (!this.filter(p.path, p))
  122. p.resume()
  123. else {
  124. const job = new PackJob(p.path, absolute, false)
  125. job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
  126. job.entry.on('end', _ => this[JOBDONE](job))
  127. this[JOBS] += 1
  128. this[QUEUE].push(job)
  129. }
  130. this[PROCESS]()
  131. }
  132. [ADDFSENTRY] (p) {
  133. const absolute = normPath(path.resolve(this.cwd, p))
  134. this[QUEUE].push(new PackJob(p, absolute))
  135. this[PROCESS]()
  136. }
  137. [STAT] (job) {
  138. job.pending = true
  139. this[JOBS] += 1
  140. const stat = this.follow ? 'stat' : 'lstat'
  141. fs[stat](job.absolute, (er, stat) => {
  142. job.pending = false
  143. this[JOBS] -= 1
  144. if (er)
  145. this.emit('error', er)
  146. else
  147. this[ONSTAT](job, stat)
  148. })
  149. }
  150. [ONSTAT] (job, stat) {
  151. this.statCache.set(job.absolute, stat)
  152. job.stat = stat
  153. // now we have the stat, we can filter it.
  154. if (!this.filter(job.path, stat))
  155. job.ignore = true
  156. this[PROCESS]()
  157. }
  158. [READDIR] (job) {
  159. job.pending = true
  160. this[JOBS] += 1
  161. fs.readdir(job.absolute, (er, entries) => {
  162. job.pending = false
  163. this[JOBS] -= 1
  164. if (er)
  165. return this.emit('error', er)
  166. this[ONREADDIR](job, entries)
  167. })
  168. }
  169. [ONREADDIR] (job, entries) {
  170. this.readdirCache.set(job.absolute, entries)
  171. job.readdir = entries
  172. this[PROCESS]()
  173. }
  174. [PROCESS] () {
  175. if (this[PROCESSING])
  176. return
  177. this[PROCESSING] = true
  178. for (let w = this[QUEUE].head;
  179. w !== null && this[JOBS] < this.jobs;
  180. w = w.next) {
  181. this[PROCESSJOB](w.value)
  182. if (w.value.ignore) {
  183. const p = w.next
  184. this[QUEUE].removeNode(w)
  185. w.next = p
  186. }
  187. }
  188. this[PROCESSING] = false
  189. if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
  190. if (this.zip)
  191. this.zip.end(EOF)
  192. else {
  193. super.write(EOF)
  194. super.end()
  195. }
  196. }
  197. }
  198. get [CURRENT] () {
  199. return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
  200. }
  201. [JOBDONE] (job) {
  202. this[QUEUE].shift()
  203. this[JOBS] -= 1
  204. this[PROCESS]()
  205. }
  206. [PROCESSJOB] (job) {
  207. if (job.pending)
  208. return
  209. if (job.entry) {
  210. if (job === this[CURRENT] && !job.piped)
  211. this[PIPE](job)
  212. return
  213. }
  214. if (!job.stat) {
  215. if (this.statCache.has(job.absolute))
  216. this[ONSTAT](job, this.statCache.get(job.absolute))
  217. else
  218. this[STAT](job)
  219. }
  220. if (!job.stat)
  221. return
  222. // filtered out!
  223. if (job.ignore)
  224. return
  225. if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
  226. if (this.readdirCache.has(job.absolute))
  227. this[ONREADDIR](job, this.readdirCache.get(job.absolute))
  228. else
  229. this[READDIR](job)
  230. if (!job.readdir)
  231. return
  232. }
  233. // we know it doesn't have an entry, because that got checked above
  234. job.entry = this[ENTRY](job)
  235. if (!job.entry) {
  236. job.ignore = true
  237. return
  238. }
  239. if (job === this[CURRENT] && !job.piped)
  240. this[PIPE](job)
  241. }
  242. [ENTRYOPT] (job) {
  243. return {
  244. onwarn: (msg, data) => {
  245. this.warn(msg, data)
  246. },
  247. noPax: this.noPax,
  248. cwd: this.cwd,
  249. absolute: job.absolute,
  250. preservePaths: this.preservePaths,
  251. maxReadSize: this.maxReadSize,
  252. strict: this.strict,
  253. portable: this.portable,
  254. linkCache: this.linkCache,
  255. statCache: this.statCache,
  256. noMtime: this.noMtime,
  257. mtime: this.mtime,
  258. prefix: this.prefix,
  259. }
  260. }
  261. [ENTRY] (job) {
  262. this[JOBS] += 1
  263. try {
  264. return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
  265. .on('end', () => this[JOBDONE](job))
  266. .on('error', er => this.emit('error', er))
  267. } catch (er) {
  268. this.emit('error', er)
  269. }
  270. }
  271. [ONDRAIN] () {
  272. if (this[CURRENT] && this[CURRENT].entry)
  273. this[CURRENT].entry.resume()
  274. }
  275. // like .pipe() but using super, because our write() is special
  276. [PIPE] (job) {
  277. job.piped = true
  278. if (job.readdir)
  279. job.readdir.forEach(entry => {
  280. const p = job.path
  281. const base = p === './' ? '' : p.replace(/\/*$/, '/')
  282. this[ADDFSENTRY](base + entry)
  283. })
  284. const source = job.entry
  285. const zip = this.zip
  286. if (zip)
  287. source.on('data', chunk => {
  288. if (!zip.write(chunk))
  289. source.pause()
  290. })
  291. else
  292. source.on('data', chunk => {
  293. if (!super.write(chunk))
  294. source.pause()
  295. })
  296. }
  297. pause () {
  298. if (this.zip)
  299. this.zip.pause()
  300. return super.pause()
  301. }
  302. })
  303. class PackSync extends Pack {
  304. constructor (opt) {
  305. super(opt)
  306. this[WRITEENTRYCLASS] = WriteEntrySync
  307. }
  308. // pause/resume are no-ops in sync streams.
  309. pause () {}
  310. resume () {}
  311. [STAT] (job) {
  312. const stat = this.follow ? 'statSync' : 'lstatSync'
  313. this[ONSTAT](job, fs[stat](job.absolute))
  314. }
  315. [READDIR] (job, stat) {
  316. this[ONREADDIR](job, fs.readdirSync(job.absolute))
  317. }
  318. // gotta get it all in this tick
  319. [PIPE] (job) {
  320. const source = job.entry
  321. const zip = this.zip
  322. if (job.readdir)
  323. job.readdir.forEach(entry => {
  324. const p = job.path
  325. const base = p === './' ? '' : p.replace(/\/*$/, '/')
  326. this[ADDFSENTRY](base + entry)
  327. })
  328. if (zip)
  329. source.on('data', chunk => {
  330. zip.write(chunk)
  331. })
  332. else
  333. source.on('data', chunk => {
  334. super[WRITE](chunk)
  335. })
  336. }
  337. }
  338. Pack.Sync = PackSync
  339. module.exports = Pack