1'use strict' 2 3const Buffer = require('./buffer.js') 4 5// XXX: This shares a lot in common with extract.js 6// maybe some DRY opportunity here? 7 8// tar -t 9const hlo = require('./high-level-opt.js') 10const Parser = require('./parse.js') 11const fs = require('fs') 12const fsm = require('fs-minipass') 13const path = require('path') 14 15const t = module.exports = (opt_, files, cb) => { 16 if (typeof opt_ === 'function') 17 cb = opt_, files = null, opt_ = {} 18 else if (Array.isArray(opt_)) 19 files = opt_, opt_ = {} 20 21 if (typeof files === 'function') 22 cb = files, files = null 23 24 if (!files) 25 files = [] 26 else 27 files = Array.from(files) 28 29 const opt = hlo(opt_) 30 31 if (opt.sync && typeof cb === 'function') 32 throw new TypeError('callback not supported for sync tar functions') 33 34 if (!opt.file && typeof cb === 'function') 35 throw new TypeError('callback only supported with file option') 36 37 if (files.length) 38 filesFilter(opt, files) 39 40 if (!opt.noResume) 41 onentryFunction(opt) 42 43 return opt.file && opt.sync ? listFileSync(opt) 44 : opt.file ? listFile(opt, cb) 45 : list(opt) 46} 47 48const onentryFunction = opt => { 49 const onentry = opt.onentry 50 opt.onentry = onentry ? e => { 51 onentry(e) 52 e.resume() 53 } : e => e.resume() 54} 55 56// construct a filter that limits the file entries listed 57// include child entries if a dir is included 58const filesFilter = (opt, files) => { 59 const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) 60 const filter = opt.filter 61 62 const mapHas = (file, r) => { 63 const root = r || path.parse(file).root || '.' 64 const ret = file === root ? false 65 : map.has(file) ? map.get(file) 66 : mapHas(path.dirname(file), root) 67 68 map.set(file, ret) 69 return ret 70 } 71 72 opt.filter = filter 73 ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) 74 : file => mapHas(file.replace(/\/+$/, '')) 75} 76 77const listFileSync = opt => { 78 const p = list(opt) 79 const file = opt.file 80 let threw = true 81 let fd 82 try { 83 const stat = fs.statSync(file) 84 const readSize = opt.maxReadSize || 16*1024*1024 85 if (stat.size < readSize) { 86 p.end(fs.readFileSync(file)) 87 } else { 88 let pos = 0 89 const buf = Buffer.allocUnsafe(readSize) 90 fd = fs.openSync(file, 'r') 91 while (pos < stat.size) { 92 let bytesRead = fs.readSync(fd, buf, 0, readSize, pos) 93 pos += bytesRead 94 p.write(buf.slice(0, bytesRead)) 95 } 96 p.end() 97 } 98 threw = false 99 } finally { 100 if (threw && fd) 101 try { fs.closeSync(fd) } catch (er) {} 102 } 103} 104 105const listFile = (opt, cb) => { 106 const parse = new Parser(opt) 107 const readSize = opt.maxReadSize || 16*1024*1024 108 109 const file = opt.file 110 const p = new Promise((resolve, reject) => { 111 parse.on('error', reject) 112 parse.on('end', resolve) 113 114 fs.stat(file, (er, stat) => { 115 if (er) 116 reject(er) 117 else { 118 const stream = new fsm.ReadStream(file, { 119 readSize: readSize, 120 size: stat.size 121 }) 122 stream.on('error', reject) 123 stream.pipe(parse) 124 } 125 }) 126 }) 127 return cb ? p.then(cb, cb) : p 128} 129 130const list = opt => new Parser(opt) 131