• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1var fs
2try {
3  fs = require('graceful-fs')
4} catch (er) {
5  fs = require('fs')
6}
7
8var path = require('path')
9
10var glob = require('glob')
11var normalizeData = require('normalize-package-data')
12var safeJSON = require('json-parse-better-errors')
13var util = require('util')
14var normalizePackageBin = require('npm-normalize-package-bin')
15
16module.exports = readJson
17
18// put more stuff on here to customize.
19readJson.extraSet = [
20  bundleDependencies,
21  gypfile,
22  serverjs,
23  scriptpath,
24  authors,
25  readme,
26  mans,
27  bins,
28  githead
29]
30
31var typoWarned = {}
32var cache = {}
33
34function readJson (file, log_, strict_, cb_) {
35  var log, strict, cb
36  for (var i = 1; i < arguments.length - 1; i++) {
37    if (typeof arguments[i] === 'boolean') {
38      strict = arguments[i]
39    } else if (typeof arguments[i] === 'function') {
40      log = arguments[i]
41    }
42  }
43
44  if (!log) log = function () {}
45  cb = arguments[ arguments.length - 1 ]
46
47  readJson_(file, log, strict, cb)
48}
49
50function readJson_ (file, log, strict, cb) {
51  fs.readFile(file, 'utf8', function (er, d) {
52    parseJson(file, er, d, log, strict, cb)
53  })
54}
55
56function stripBOM (content) {
57  // Remove byte order marker. This catches EF BB BF (the UTF-8 BOM)
58  // because the buffer-to-string conversion in `fs.readFileSync()`
59  // translates it to FEFF, the UTF-16 BOM.
60  if (content.charCodeAt(0) === 0xFEFF) content = content.slice(1)
61  return content
62}
63
64function jsonClone (obj) {
65  if (obj == null) {
66    return obj
67  } else if (Array.isArray(obj)) {
68    var newarr = new Array(obj.length)
69    for (var ii in obj) {
70      newarr[ii] = obj[ii]
71    }
72  } else if (typeof obj === 'object') {
73    var newobj = {}
74    for (var kk in obj) {
75      newobj[kk] = jsonClone[kk]
76    }
77  } else {
78    return obj
79  }
80}
81
82function parseJson (file, er, d, log, strict, cb) {
83  if (er && er.code === 'ENOENT') {
84    return fs.stat(path.dirname(file), function (err, stat) {
85      if (!err && stat && !stat.isDirectory()) {
86        // ENOTDIR isn't used on Windows, but npm expects it.
87        er = Object.create(er)
88        er.code = 'ENOTDIR'
89        return cb(er)
90      } else {
91        return indexjs(file, er, log, strict, cb)
92      }
93    })
94  }
95  if (er) return cb(er)
96
97  if (cache[d]) return cb(null, jsonClone(cache[d]))
98
99  var data
100
101  try {
102    data = safeJSON(stripBOM(d))
103  } catch (er) {
104    data = parseIndex(d)
105    if (!data) return cb(parseError(er, file))
106  }
107
108  extrasCached(file, d, data, log, strict, cb)
109}
110
111function extrasCached (file, d, data, log, strict, cb) {
112  extras(file, data, log, strict, function (err, data) {
113    if (!err) {
114      cache[d] = jsonClone(data)
115    }
116    cb(err, data)
117  })
118}
119
120function indexjs (file, er, log, strict, cb) {
121  if (path.basename(file) === 'index.js') return cb(er)
122
123  var index = path.resolve(path.dirname(file), 'index.js')
124  fs.readFile(index, 'utf8', function (er2, d) {
125    if (er2) return cb(er)
126
127    if (cache[d]) return cb(null, cache[d])
128
129    var data = parseIndex(d)
130    if (!data) return cb(er)
131
132    extrasCached(file, d, data, log, strict, cb)
133  })
134}
135
136readJson.extras = extras
137function extras (file, data, log_, strict_, cb_) {
138  var log, strict, cb
139  for (var i = 2; i < arguments.length - 1; i++) {
140    if (typeof arguments[i] === 'boolean') {
141      strict = arguments[i]
142    } else if (typeof arguments[i] === 'function') {
143      log = arguments[i]
144    }
145  }
146
147  if (!log) log = function () {}
148  cb = arguments[i]
149
150  var set = readJson.extraSet
151  var n = set.length
152  var errState = null
153  set.forEach(function (fn) {
154    fn(file, data, then)
155  })
156
157  function then (er) {
158    if (errState) return
159    if (er) return cb(errState = er)
160    if (--n > 0) return
161    final(file, data, log, strict, cb)
162  }
163}
164
165function scriptpath (file, data, cb) {
166  if (!data.scripts) return cb(null, data)
167  var k = Object.keys(data.scripts)
168  k.forEach(scriptpath_, data.scripts)
169  cb(null, data)
170}
171
172function scriptpath_ (key) {
173  var s = this[key]
174  // This is never allowed, and only causes problems
175  if (typeof s !== 'string') return delete this[key]
176
177  var spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
178  if (s.match(spre)) {
179    this[key] = this[key].replace(spre, '')
180  }
181}
182
183function gypfile (file, data, cb) {
184  var dir = path.dirname(file)
185  var s = data.scripts || {}
186  if (s.install || s.preinstall) return cb(null, data)
187
188  glob('*.gyp', { cwd: dir }, function (er, files) {
189    if (er) return cb(er)
190    if (data.gypfile === false) return cb(null, data)
191    gypfile_(file, data, files, cb)
192  })
193}
194
195function gypfile_ (file, data, files, cb) {
196  if (!files.length) return cb(null, data)
197  var s = data.scripts || {}
198  s.install = 'node-gyp rebuild'
199  data.scripts = s
200  data.gypfile = true
201  return cb(null, data)
202}
203
204function serverjs (file, data, cb) {
205  var dir = path.dirname(file)
206  var s = data.scripts || {}
207  if (s.start) return cb(null, data)
208  glob('server.js', { cwd: dir }, function (er, files) {
209    if (er) return cb(er)
210    serverjs_(file, data, files, cb)
211  })
212}
213
214function serverjs_ (file, data, files, cb) {
215  if (!files.length) return cb(null, data)
216  var s = data.scripts || {}
217  s.start = 'node server.js'
218  data.scripts = s
219  return cb(null, data)
220}
221
222function authors (file, data, cb) {
223  if (data.contributors) return cb(null, data)
224  var af = path.resolve(path.dirname(file), 'AUTHORS')
225  fs.readFile(af, 'utf8', function (er, ad) {
226    // ignore error.  just checking it.
227    if (er) return cb(null, data)
228    authors_(file, data, ad, cb)
229  })
230}
231
232function authors_ (file, data, ad, cb) {
233  ad = ad.split(/\r?\n/g).map(function (line) {
234    return line.replace(/^\s*#.*$/, '').trim()
235  }).filter(function (line) {
236    return line
237  })
238  data.contributors = ad
239  return cb(null, data)
240}
241
242function readme (file, data, cb) {
243  if (data.readme) return cb(null, data)
244  var dir = path.dirname(file)
245  var globOpts = { cwd: dir, nocase: true, mark: true }
246  glob('{README,README.*}', globOpts, function (er, files) {
247    if (er) return cb(er)
248    // don't accept directories.
249    files = files.filter(function (file) {
250      return !file.match(/\/$/)
251    })
252    if (!files.length) return cb()
253    var fn = preferMarkdownReadme(files)
254    var rm = path.resolve(dir, fn)
255    readme_(file, data, rm, cb)
256  })
257}
258
259function preferMarkdownReadme (files) {
260  var fallback = 0
261  var re = /\.m?a?r?k?d?o?w?n?$/i
262  for (var i = 0; i < files.length; i++) {
263    if (files[i].match(re)) {
264      return files[i]
265    } else if (files[i].match(/README$/)) {
266      fallback = i
267    }
268  }
269  // prefer README.md, followed by README; otherwise, return
270  // the first filename (which could be README)
271  return files[fallback]
272}
273
274function readme_ (file, data, rm, cb) {
275  var rmfn = path.basename(rm)
276  fs.readFile(rm, 'utf8', function (er, rm) {
277    // maybe not readable, or something.
278    if (er) return cb()
279    data.readme = rm
280    data.readmeFilename = rmfn
281    return cb(er, data)
282  })
283}
284
285function mans (file, data, cb) {
286  var m = data.directories && data.directories.man
287  if (data.man || !m) return cb(null, data)
288  m = path.resolve(path.dirname(file), m)
289  glob('**/*.[0-9]', { cwd: m }, function (er, mans) {
290    if (er) return cb(er)
291    mans_(file, data, mans, cb)
292  })
293}
294
295function mans_ (file, data, mans, cb) {
296  var m = data.directories && data.directories.man
297  data.man = mans.map(function (mf) {
298    return path.resolve(path.dirname(file), m, mf)
299  })
300  return cb(null, data)
301}
302
303function bins (file, data, cb) {
304  data = normalizePackageBin(data)
305
306  var m = data.directories && data.directories.bin
307  if (data.bin || !m) return cb(null, data)
308
309  m = path.resolve(path.dirname(file), m)
310  glob('**', { cwd: m }, function (er, bins) {
311    if (er) return cb(er)
312    bins_(file, data, bins, cb)
313  })
314}
315
316function bins_ (file, data, bins, cb) {
317  var m = (data.directories && data.directories.bin) || '.'
318  data.bin = bins.reduce(function (acc, mf) {
319    if (mf && mf.charAt(0) !== '.') {
320      var f = path.basename(mf)
321      acc[f] = path.join(m, mf)
322    }
323    return acc
324  }, {})
325  return cb(null, normalizePackageBin(data))
326}
327
328function bundleDependencies (file, data, cb) {
329  var bd = 'bundleDependencies'
330  var bdd = 'bundledDependencies'
331  // normalize key name
332  if (data[bdd] !== undefined) {
333    if (data[bd] === undefined) data[bd] = data[bdd]
334    delete data[bdd]
335  }
336  if (data[bd] === false) delete data[bd]
337  else if (data[bd] === true) {
338    data[bd] = Object.keys(data.dependencies || {})
339  } else if (data[bd] !== undefined && !Array.isArray(data[bd])) {
340    delete data[bd]
341  }
342  return cb(null, data)
343}
344
345function githead (file, data, cb) {
346  if (data.gitHead) return cb(null, data)
347  var dir = path.dirname(file)
348  var head = path.resolve(dir, '.git/HEAD')
349  fs.readFile(head, 'utf8', function (er, head) {
350    if (er) return cb(null, data)
351    githead_(file, data, dir, head, cb)
352  })
353}
354
355function githead_ (file, data, dir, head, cb) {
356  if (!head.match(/^ref: /)) {
357    data.gitHead = head.trim()
358    return cb(null, data)
359  }
360  var headRef = head.replace(/^ref: /, '').trim()
361  var headFile = path.resolve(dir, '.git', headRef)
362  fs.readFile(headFile, 'utf8', function (er, head) {
363    if (er || !head) {
364      var packFile = path.resolve(dir, '.git/packed-refs')
365      return fs.readFile(packFile, 'utf8', function (er, refs) {
366        if (er || !refs) {
367          return cb(null, data)
368        }
369        refs = refs.split('\n')
370        for (var i = 0; i < refs.length; i++) {
371          var match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
372          if (match && match[2].trim() === headRef) {
373            data.gitHead = match[1]
374            break
375          }
376        }
377        return cb(null, data)
378      })
379    }
380    head = head.replace(/^ref: /, '').trim()
381    data.gitHead = head
382    return cb(null, data)
383  })
384}
385
386/**
387 * Warn if the bin references don't point to anything.  This might be better in
388 * normalize-package-data if it had access to the file path.
389 */
390function checkBinReferences_ (file, data, warn, cb) {
391  if (!(data.bin instanceof Object)) return cb()
392
393  var keys = Object.keys(data.bin)
394  var keysLeft = keys.length
395  if (!keysLeft) return cb()
396
397  function handleExists (relName, result) {
398    keysLeft--
399    if (!result) warn('No bin file found at ' + relName)
400    if (!keysLeft) cb()
401  }
402
403  keys.forEach(function (key) {
404    var dirName = path.dirname(file)
405    var relName = data.bin[key]
406    /* istanbul ignore if - impossible, bins have been normalized */
407    if (typeof relName !== 'string') {
408      var msg = 'Bin filename for ' + key +
409        ' is not a string: ' + util.inspect(relName)
410      warn(msg)
411      delete data.bin[key]
412      handleExists(relName, true)
413      return
414    }
415    var binPath = path.resolve(dirName, relName)
416    fs.stat(binPath, (err) => handleExists(relName, !err))
417  })
418}
419
420function final (file, data, log, strict, cb) {
421  var pId = makePackageId(data)
422
423  function warn (msg) {
424    if (typoWarned[pId]) return
425    if (log) log('package.json', pId, msg)
426  }
427
428  try {
429    normalizeData(data, warn, strict)
430  } catch (error) {
431    return cb(error)
432  }
433
434  checkBinReferences_(file, data, warn, function () {
435    typoWarned[pId] = true
436    cb(null, data)
437  })
438}
439
440function makePackageId (data) {
441  var name = cleanString(data.name)
442  var ver = cleanString(data.version)
443  return name + '@' + ver
444}
445
446function cleanString (str) {
447  return (!str || typeof (str) !== 'string') ? '' : str.trim()
448}
449
450// /**package { "name": "foo", "version": "1.2.3", ... } **/
451function parseIndex (data) {
452  data = data.split(/^\/\*\*package(?:\s|$)/m)
453
454  if (data.length < 2) return null
455  data = data[1]
456  data = data.split(/\*\*\/$/m)
457
458  if (data.length < 2) return null
459  data = data[0]
460  data = data.replace(/^\s*\*/mg, '')
461
462  try {
463    return safeJSON(data)
464  } catch (er) {
465    return null
466  }
467}
468
469function parseError (ex, file) {
470  var e = new Error('Failed to parse json\n' + ex.message)
471  e.code = 'EJSONPARSE'
472  e.file = file
473  return e
474}
475