replace.js 5.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221
  1. 'use strict'
  2. const Buffer = require('./buffer.js')
  3. // tar -r
  4. const hlo = require('./high-level-opt.js')
  5. const Pack = require('./pack.js')
  6. const Parse = require('./parse.js')
  7. const fs = require('fs')
  8. const fsm = require('fs-minipass')
  9. const t = require('./list.js')
  10. const path = require('path')
  11. // starting at the head of the file, read a Header
  12. // If the checksum is invalid, that's our position to start writing
  13. // If it is, jump forward by the specified size (round up to 512)
  14. // and try again.
  15. // Write the new Pack stream starting there.
  16. const Header = require('./header.js')
  17. const r = module.exports = (opt_, files, cb) => {
  18. const opt = hlo(opt_)
  19. if (!opt.file)
  20. throw new TypeError('file is required')
  21. if (opt.gzip)
  22. throw new TypeError('cannot append to compressed archives')
  23. if (!files || !Array.isArray(files) || !files.length)
  24. throw new TypeError('no files or directories specified')
  25. files = Array.from(files)
  26. return opt.sync ? replaceSync(opt, files)
  27. : replace(opt, files, cb)
  28. }
  29. const replaceSync = (opt, files) => {
  30. const p = new Pack.Sync(opt)
  31. let threw = true
  32. let fd
  33. let position
  34. try {
  35. try {
  36. fd = fs.openSync(opt.file, 'r+')
  37. } catch (er) {
  38. if (er.code === 'ENOENT')
  39. fd = fs.openSync(opt.file, 'w+')
  40. else
  41. throw er
  42. }
  43. const st = fs.fstatSync(fd)
  44. const headBuf = Buffer.alloc(512)
  45. POSITION: for (position = 0; position < st.size; position += 512) {
  46. for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
  47. bytes = fs.readSync(
  48. fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
  49. )
  50. if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
  51. throw new Error('cannot append to compressed archives')
  52. if (!bytes)
  53. break POSITION
  54. }
  55. let h = new Header(headBuf)
  56. if (!h.cksumValid)
  57. break
  58. let entryBlockSize = 512 * Math.ceil(h.size / 512)
  59. if (position + entryBlockSize + 512 > st.size)
  60. break
  61. // the 512 for the header we just parsed will be added as well
  62. // also jump ahead all the blocks for the body
  63. position += entryBlockSize
  64. if (opt.mtimeCache)
  65. opt.mtimeCache.set(h.path, h.mtime)
  66. }
  67. threw = false
  68. streamSync(opt, p, position, fd, files)
  69. } finally {
  70. if (threw)
  71. try { fs.closeSync(fd) } catch (er) {}
  72. }
  73. }
  74. const streamSync = (opt, p, position, fd, files) => {
  75. const stream = new fsm.WriteStreamSync(opt.file, {
  76. fd: fd,
  77. start: position
  78. })
  79. p.pipe(stream)
  80. addFilesSync(p, files)
  81. }
  82. const replace = (opt, files, cb) => {
  83. files = Array.from(files)
  84. const p = new Pack(opt)
  85. const getPos = (fd, size, cb_) => {
  86. const cb = (er, pos) => {
  87. if (er)
  88. fs.close(fd, _ => cb_(er))
  89. else
  90. cb_(null, pos)
  91. }
  92. let position = 0
  93. if (size === 0)
  94. return cb(null, 0)
  95. let bufPos = 0
  96. const headBuf = Buffer.alloc(512)
  97. const onread = (er, bytes) => {
  98. if (er)
  99. return cb(er)
  100. bufPos += bytes
  101. if (bufPos < 512 && bytes)
  102. return fs.read(
  103. fd, headBuf, bufPos, headBuf.length - bufPos,
  104. position + bufPos, onread
  105. )
  106. if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
  107. return cb(new Error('cannot append to compressed archives'))
  108. // truncated header
  109. if (bufPos < 512)
  110. return cb(null, position)
  111. const h = new Header(headBuf)
  112. if (!h.cksumValid)
  113. return cb(null, position)
  114. const entryBlockSize = 512 * Math.ceil(h.size / 512)
  115. if (position + entryBlockSize + 512 > size)
  116. return cb(null, position)
  117. position += entryBlockSize + 512
  118. if (position >= size)
  119. return cb(null, position)
  120. if (opt.mtimeCache)
  121. opt.mtimeCache.set(h.path, h.mtime)
  122. bufPos = 0
  123. fs.read(fd, headBuf, 0, 512, position, onread)
  124. }
  125. fs.read(fd, headBuf, 0, 512, position, onread)
  126. }
  127. const promise = new Promise((resolve, reject) => {
  128. p.on('error', reject)
  129. let flag = 'r+'
  130. const onopen = (er, fd) => {
  131. if (er && er.code === 'ENOENT' && flag === 'r+') {
  132. flag = 'w+'
  133. return fs.open(opt.file, flag, onopen)
  134. }
  135. if (er)
  136. return reject(er)
  137. fs.fstat(fd, (er, st) => {
  138. if (er)
  139. return fs.close(fd, () => reject(er))
  140. getPos(fd, st.size, (er, position) => {
  141. if (er)
  142. return reject(er)
  143. const stream = new fsm.WriteStream(opt.file, {
  144. fd: fd,
  145. start: position
  146. })
  147. p.pipe(stream)
  148. stream.on('error', reject)
  149. stream.on('close', resolve)
  150. addFilesAsync(p, files)
  151. })
  152. })
  153. }
  154. fs.open(opt.file, flag, onopen)
  155. })
  156. return cb ? promise.then(cb, cb) : promise
  157. }
  158. const addFilesSync = (p, files) => {
  159. files.forEach(file => {
  160. if (file.charAt(0) === '@')
  161. t({
  162. file: path.resolve(p.cwd, file.substr(1)),
  163. sync: true,
  164. noResume: true,
  165. onentry: entry => p.add(entry)
  166. })
  167. else
  168. p.add(file)
  169. })
  170. p.end()
  171. }
  172. const addFilesAsync = (p, files) => {
  173. while (files.length) {
  174. const file = files.shift()
  175. if (file.charAt(0) === '@')
  176. return t({
  177. file: path.resolve(p.cwd, file.substr(1)),
  178. noResume: true,
  179. onentry: entry => p.add(entry)
  180. }).then(_ => addFilesAsync(p, files))
  181. else
  182. p.add(file)
  183. }
  184. p.end()
  185. }