• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1'use strict'
2
3// tar -r
4const hlo = require('./high-level-opt.js')
5const Pack = require('./pack.js')
6const fs = require('fs')
7const fsm = require('fs-minipass')
8const t = require('./list.js')
9const path = require('path')
10
11// starting at the head of the file, read a Header
12// If the checksum is invalid, that's our position to start writing
13// If it is, jump forward by the specified size (round up to 512)
14// and try again.
15// Write the new Pack stream starting there.
16
17const Header = require('./header.js')
18
19module.exports = (opt_, files, cb) => {
20  const opt = hlo(opt_)
21
22  if (!opt.file) {
23    throw new TypeError('file is required')
24  }
25
26  if (opt.gzip) {
27    throw new TypeError('cannot append to compressed archives')
28  }
29
30  if (!files || !Array.isArray(files) || !files.length) {
31    throw new TypeError('no files or directories specified')
32  }
33
34  files = Array.from(files)
35
36  return opt.sync ? replaceSync(opt, files)
37    : replace(opt, files, cb)
38}
39
40const replaceSync = (opt, files) => {
41  const p = new Pack.Sync(opt)
42
43  let threw = true
44  let fd
45  let position
46
47  try {
48    try {
49      fd = fs.openSync(opt.file, 'r+')
50    } catch (er) {
51      if (er.code === 'ENOENT') {
52        fd = fs.openSync(opt.file, 'w+')
53      } else {
54        throw er
55      }
56    }
57
58    const st = fs.fstatSync(fd)
59    const headBuf = Buffer.alloc(512)
60
61    POSITION: for (position = 0; position < st.size; position += 512) {
62      for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
63        bytes = fs.readSync(
64          fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
65        )
66
67        if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
68          throw new Error('cannot append to compressed archives')
69        }
70
71        if (!bytes) {
72          break POSITION
73        }
74      }
75
76      const h = new Header(headBuf)
77      if (!h.cksumValid) {
78        break
79      }
80      const entryBlockSize = 512 * Math.ceil(h.size / 512)
81      if (position + entryBlockSize + 512 > st.size) {
82        break
83      }
84      // the 512 for the header we just parsed will be added as well
85      // also jump ahead all the blocks for the body
86      position += entryBlockSize
87      if (opt.mtimeCache) {
88        opt.mtimeCache.set(h.path, h.mtime)
89      }
90    }
91    threw = false
92
93    streamSync(opt, p, position, fd, files)
94  } finally {
95    if (threw) {
96      try {
97        fs.closeSync(fd)
98      } catch (er) {}
99    }
100  }
101}
102
103const streamSync = (opt, p, position, fd, files) => {
104  const stream = new fsm.WriteStreamSync(opt.file, {
105    fd: fd,
106    start: position,
107  })
108  p.pipe(stream)
109  addFilesSync(p, files)
110}
111
112const replace = (opt, files, cb) => {
113  files = Array.from(files)
114  const p = new Pack(opt)
115
116  const getPos = (fd, size, cb_) => {
117    const cb = (er, pos) => {
118      if (er) {
119        fs.close(fd, _ => cb_(er))
120      } else {
121        cb_(null, pos)
122      }
123    }
124
125    let position = 0
126    if (size === 0) {
127      return cb(null, 0)
128    }
129
130    let bufPos = 0
131    const headBuf = Buffer.alloc(512)
132    const onread = (er, bytes) => {
133      if (er) {
134        return cb(er)
135      }
136      bufPos += bytes
137      if (bufPos < 512 && bytes) {
138        return fs.read(
139          fd, headBuf, bufPos, headBuf.length - bufPos,
140          position + bufPos, onread
141        )
142      }
143
144      if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
145        return cb(new Error('cannot append to compressed archives'))
146      }
147
148      // truncated header
149      if (bufPos < 512) {
150        return cb(null, position)
151      }
152
153      const h = new Header(headBuf)
154      if (!h.cksumValid) {
155        return cb(null, position)
156      }
157
158      const entryBlockSize = 512 * Math.ceil(h.size / 512)
159      if (position + entryBlockSize + 512 > size) {
160        return cb(null, position)
161      }
162
163      position += entryBlockSize + 512
164      if (position >= size) {
165        return cb(null, position)
166      }
167
168      if (opt.mtimeCache) {
169        opt.mtimeCache.set(h.path, h.mtime)
170      }
171      bufPos = 0
172      fs.read(fd, headBuf, 0, 512, position, onread)
173    }
174    fs.read(fd, headBuf, 0, 512, position, onread)
175  }
176
177  const promise = new Promise((resolve, reject) => {
178    p.on('error', reject)
179    let flag = 'r+'
180    const onopen = (er, fd) => {
181      if (er && er.code === 'ENOENT' && flag === 'r+') {
182        flag = 'w+'
183        return fs.open(opt.file, flag, onopen)
184      }
185
186      if (er) {
187        return reject(er)
188      }
189
190      fs.fstat(fd, (er, st) => {
191        if (er) {
192          return fs.close(fd, () => reject(er))
193        }
194
195        getPos(fd, st.size, (er, position) => {
196          if (er) {
197            return reject(er)
198          }
199          const stream = new fsm.WriteStream(opt.file, {
200            fd: fd,
201            start: position,
202          })
203          p.pipe(stream)
204          stream.on('error', reject)
205          stream.on('close', resolve)
206          addFilesAsync(p, files)
207        })
208      })
209    }
210    fs.open(opt.file, flag, onopen)
211  })
212
213  return cb ? promise.then(cb, cb) : promise
214}
215
216const addFilesSync = (p, files) => {
217  files.forEach(file => {
218    if (file.charAt(0) === '@') {
219      t({
220        file: path.resolve(p.cwd, file.slice(1)),
221        sync: true,
222        noResume: true,
223        onentry: entry => p.add(entry),
224      })
225    } else {
226      p.add(file)
227    }
228  })
229  p.end()
230}
231
232const addFilesAsync = (p, files) => {
233  while (files.length) {
234    const file = files.shift()
235    if (file.charAt(0) === '@') {
236      return t({
237        file: path.resolve(p.cwd, file.slice(1)),
238        noResume: true,
239        onentry: entry => p.add(entry),
240      }).then(_ => addFilesAsync(p, files))
241    } else {
242      p.add(file)
243    }
244  }
245  p.end()
246}
247