1'use strict' 2 3// npm pack <pkg> 4// Packs the specified package into a .tgz file, which can then 5// be installed. 6 7// Set this early to avoid issues with circular dependencies. 8module.exports = pack 9 10const BB = require('bluebird') 11 12const byteSize = require('byte-size') 13const cacache = require('cacache') 14const columnify = require('columnify') 15const cp = require('child_process') 16const deprCheck = require('./utils/depr-check') 17const fpm = require('./fetch-package-metadata') 18const fs = require('graceful-fs') 19const install = require('./install') 20const lifecycle = BB.promisify(require('./utils/lifecycle')) 21const log = require('npmlog') 22const move = require('move-concurrently') 23const npm = require('./npm') 24const npmConfig = require('./config/figgy-config.js') 25const output = require('./utils/output') 26const pacote = require('pacote') 27const path = require('path') 28const PassThrough = require('stream').PassThrough 29const pathIsInside = require('path-is-inside') 30const pipe = BB.promisify(require('mississippi').pipe) 31const prepublishWarning = require('./utils/warn-deprecated')('prepublish-on-install') 32const pinflight = require('promise-inflight') 33const readJson = BB.promisify(require('read-package-json')) 34const tar = require('tar') 35const packlist = require('npm-packlist') 36const ssri = require('ssri') 37 38pack.usage = 'npm pack [[<@scope>/]<pkg>...] [--dry-run]' 39 40// if it can be installed, it can be packed. 41pack.completion = install.completion 42 43function pack (args, silent, cb) { 44 const cwd = process.cwd() 45 if (typeof cb !== 'function') { 46 cb = silent 47 silent = false 48 } 49 50 if (args.length === 0) args = ['.'] 51 52 BB.all( 53 args.map((arg) => pack_(arg, cwd)) 54 ).then((tarballs) => { 55 if (!silent && npm.config.get('json')) { 56 output(JSON.stringify(tarballs, null, 2)) 57 } else if (!silent) { 58 tarballs.forEach(logContents) 59 output(tarballs.map((f) => path.relative(cwd, f.filename)).join('\n')) 60 } 61 return tarballs 62 }).nodeify(cb) 63} 64 65function pack_ (pkg, dir) { 66 return BB.fromNode((cb) => fpm(pkg, dir, cb)).then((mani) => { 67 let name = mani.name[0] === '@' 68 // scoped packages get special treatment 69 ? mani.name.substr(1).replace(/\//g, '-') 70 : mani.name 71 const target = `${name}-${mani.version}.tgz` 72 return pinflight(target, () => { 73 const dryRun = npm.config.get('dry-run') 74 if (mani._requested.type === 'directory') { 75 return prepareDirectory(mani._resolved) 76 .then(() => { 77 return packDirectory(mani, mani._resolved, target, target, true, dryRun) 78 }) 79 } else if (dryRun) { 80 log.verbose('pack', '--dry-run mode enabled. Skipping write.') 81 return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => { 82 const tmpTarget = path.join(tmp, path.basename(target)) 83 return packFromPackage(pkg, tmpTarget, target) 84 }) 85 } else { 86 return packFromPackage(pkg, target, target) 87 } 88 }) 89 }) 90} 91 92function packFromPackage (arg, target, filename) { 93 const opts = npmConfig() 94 return pacote.tarball.toFile(arg, target, opts) 95 .then(() => cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'unpacking'}, (tmp) => { 96 const tmpTarget = path.join(tmp, filename) 97 return pacote.extract(arg, tmpTarget, opts) 98 .then(() => readJson(path.join(tmpTarget, 'package.json'))) 99 })) 100 .then((pkg) => getContents(pkg, target, filename)) 101} 102 103module.exports.prepareDirectory = prepareDirectory 104function prepareDirectory (dir) { 105 return readJson(path.join(dir, 'package.json')).then((pkg) => { 106 if (!pkg.name) { 107 throw new Error('package.json requires a "name" field') 108 } 109 if (!pkg.version) { 110 throw new Error('package.json requires a valid "version" field') 111 } 112 if (!pathIsInside(dir, npm.tmp)) { 113 if (pkg.scripts && pkg.scripts.prepublish) { 114 prepublishWarning([ 115 'As of npm@5, `prepublish` scripts are deprecated.', 116 'Use `prepare` for build steps and `prepublishOnly` for upload-only.', 117 'See the deprecation note in `npm help scripts` for more information.' 118 ]) 119 } 120 if (npm.config.get('ignore-prepublish')) { 121 return lifecycle(pkg, 'prepare', dir).then(() => pkg) 122 } else { 123 return lifecycle(pkg, 'prepublish', dir).then(() => { 124 return lifecycle(pkg, 'prepare', dir) 125 }).then(() => pkg) 126 } 127 } 128 return pkg 129 }) 130} 131 132module.exports.packDirectory = packDirectory 133function packDirectory (mani, dir, target, filename, logIt, dryRun) { 134 deprCheck(mani) 135 return readJson(path.join(dir, 'package.json')).then((pkg) => { 136 return lifecycle(pkg, 'prepack', dir) 137 }).then(() => { 138 return readJson(path.join(dir, 'package.json')) 139 }).then((pkg) => { 140 return cacache.tmp.withTmp(npm.tmp, {tmpPrefix: 'packing'}, (tmp) => { 141 const tmpTarget = path.join(tmp, path.basename(target)) 142 143 const tarOpt = { 144 file: tmpTarget, 145 cwd: dir, 146 prefix: 'package/', 147 portable: true, 148 // Provide a specific date in the 1980s for the benefit of zip, 149 // which is confounded by files dated at the Unix epoch 0. 150 mtime: new Date('1985-10-26T08:15:00.000Z'), 151 gzip: true 152 } 153 154 return BB.resolve(packlist({ path: dir })) 155 // NOTE: node-tar does some Magic Stuff depending on prefixes for files 156 // specifically with @ signs, so we just neutralize that one 157 // and any such future "features" by prepending `./` 158 .then((files) => tar.create(tarOpt, files.map((f) => `./${f}`))) 159 .then(() => getContents(pkg, tmpTarget, filename, logIt)) 160 // thread the content info through 161 .tap(() => { 162 if (dryRun) { 163 log.verbose('pack', '--dry-run mode enabled. Skipping write.') 164 } else { 165 return move(tmpTarget, target, {Promise: BB, fs}) 166 } 167 }) 168 .tap(() => lifecycle(pkg, 'postpack', dir)) 169 }) 170 }) 171} 172 173module.exports.logContents = logContents 174function logContents (tarball) { 175 log.notice('') 176 log.notice('', `${npm.config.get('unicode') ? ' ' : 'package:'} ${tarball.name}@${tarball.version}`) 177 log.notice('=== Tarball Contents ===') 178 if (tarball.files.length) { 179 log.notice('', columnify(tarball.files.map((f) => { 180 const bytes = byteSize(f.size) 181 return {path: f.path, size: `${bytes.value}${bytes.unit}`} 182 }), { 183 include: ['size', 'path'], 184 showHeaders: false 185 })) 186 } 187 if (tarball.bundled.length) { 188 log.notice('=== Bundled Dependencies ===') 189 tarball.bundled.forEach((name) => log.notice('', name)) 190 } 191 log.notice('=== Tarball Details ===') 192 log.notice('', columnify([ 193 {name: 'name:', value: tarball.name}, 194 {name: 'version:', value: tarball.version}, 195 tarball.filename && {name: 'filename:', value: tarball.filename}, 196 {name: 'package size:', value: byteSize(tarball.size)}, 197 {name: 'unpacked size:', value: byteSize(tarball.unpackedSize)}, 198 {name: 'shasum:', value: tarball.shasum}, 199 { 200 name: 'integrity:', 201 value: tarball.integrity.toString().substr(0, 20) + '[...]' + tarball.integrity.toString().substr(80)}, 202 tarball.bundled.length && {name: 'bundled deps:', value: tarball.bundled.length}, 203 tarball.bundled.length && {name: 'bundled files:', value: tarball.entryCount - tarball.files.length}, 204 tarball.bundled.length && {name: 'own files:', value: tarball.files.length}, 205 {name: 'total files:', value: tarball.entryCount} 206 ].filter((x) => x), { 207 include: ['name', 'value'], 208 showHeaders: false 209 })) 210 log.notice('', '') 211} 212 213module.exports.getContents = getContents 214function getContents (pkg, target, filename, silent) { 215 const bundledWanted = new Set( 216 pkg.bundleDependencies || 217 pkg.bundledDependencies || 218 [] 219 ) 220 const files = [] 221 const bundled = new Set() 222 let totalEntries = 0 223 let totalEntrySize = 0 224 return tar.t({ 225 file: target, 226 onentry (entry) { 227 totalEntries++ 228 totalEntrySize += entry.size 229 const p = entry.path 230 if (p.startsWith('package/node_modules/')) { 231 const name = p.match(/^package\/node_modules\/((?:@[^/]+\/)?[^/]+)/)[1] 232 if (bundledWanted.has(name)) { 233 bundled.add(name) 234 } 235 } else { 236 files.push({ 237 path: entry.path.replace(/^package\//, ''), 238 size: entry.size, 239 mode: entry.mode 240 }) 241 } 242 }, 243 strip: 1 244 }) 245 .then(() => BB.all([ 246 BB.fromNode((cb) => fs.stat(target, cb)), 247 ssri.fromStream(fs.createReadStream(target), { 248 algorithms: ['sha1', 'sha512'] 249 }) 250 ])) 251 .then(([stat, integrity]) => { 252 const shasum = integrity['sha1'][0].hexDigest() 253 return { 254 id: pkg._id, 255 name: pkg.name, 256 version: pkg.version, 257 from: pkg._from, 258 size: stat.size, 259 unpackedSize: totalEntrySize, 260 shasum, 261 integrity: ssri.parse(integrity['sha512'][0]), 262 filename, 263 files, 264 entryCount: totalEntries, 265 bundled: Array.from(bundled) 266 } 267 }) 268} 269 270const PASSTHROUGH_OPTS = [ 271 'always-auth', 272 'auth-type', 273 'ca', 274 'cafile', 275 'cert', 276 'git', 277 'local-address', 278 'maxsockets', 279 'offline', 280 'prefer-offline', 281 'prefer-online', 282 'proxy', 283 'https-proxy', 284 'registry', 285 'send-metrics', 286 'sso-poll-frequency', 287 'sso-type', 288 'strict-ssl' 289] 290 291module.exports.packGitDep = packGitDep 292function packGitDep (manifest, dir) { 293 const stream = new PassThrough() 294 readJson(path.join(dir, 'package.json')).then((pkg) => { 295 if (pkg.scripts && pkg.scripts.prepare) { 296 log.verbose('prepareGitDep', `${manifest._spec}: installing devDeps and running prepare script.`) 297 const cliArgs = PASSTHROUGH_OPTS.reduce((acc, opt) => { 298 if (npm.config.get(opt, 'cli') != null) { 299 acc.push(`--${opt}=${npm.config.get(opt)}`) 300 } 301 return acc 302 }, []) 303 const child = cp.spawn(process.env.NODE || process.execPath, [ 304 require.resolve('../bin/npm-cli.js'), 305 'install', 306 '--dev', 307 '--prod', 308 '--ignore-prepublish', 309 '--no-progress', 310 '--no-save' 311 ].concat(cliArgs), { 312 cwd: dir, 313 env: process.env 314 }) 315 let errData = [] 316 let errDataLen = 0 317 let outData = [] 318 let outDataLen = 0 319 child.stdout.on('data', (data) => { 320 outData.push(data) 321 outDataLen += data.length 322 log.gauge.pulse('preparing git package') 323 }) 324 child.stderr.on('data', (data) => { 325 errData.push(data) 326 errDataLen += data.length 327 log.gauge.pulse('preparing git package') 328 }) 329 return BB.fromNode((cb) => { 330 child.on('error', cb) 331 child.on('exit', (code, signal) => { 332 if (code > 0) { 333 const err = new Error(`${signal}: npm exited with code ${code} while attempting to build ${manifest._requested}. Clone the repository manually and run 'npm install' in it for more information.`) 334 err.code = code 335 err.signal = signal 336 cb(err) 337 } else { 338 cb() 339 } 340 }) 341 }).then(() => { 342 if (outDataLen > 0) log.silly('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString()) 343 if (errDataLen > 0) log.silly('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString()) 344 }, (err) => { 345 if (outDataLen > 0) log.error('prepareGitDep', '1>', Buffer.concat(outData, outDataLen).toString()) 346 if (errDataLen > 0) log.error('prepareGitDep', '2>', Buffer.concat(errData, errDataLen).toString()) 347 throw err 348 }) 349 } 350 }).then(() => { 351 return readJson(path.join(dir, 'package.json')) 352 }).then((pkg) => { 353 return cacache.tmp.withTmp(npm.tmp, { 354 tmpPrefix: 'pacote-packing' 355 }, (tmp) => { 356 const tmpTar = path.join(tmp, 'package.tgz') 357 return packDirectory(manifest, dir, tmpTar).then(() => { 358 return pipe(fs.createReadStream(tmpTar), stream) 359 }) 360 }) 361 }).catch((err) => stream.emit('error', err)) 362 return stream 363} 364