• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1'use strict'
2
3const BB = require('bluebird')
4
5const contentPath = require('./content/path')
6const crypto = require('crypto')
7const figgyPudding = require('figgy-pudding')
8const fixOwner = require('./util/fix-owner')
9const fs = require('graceful-fs')
10const hashToSegments = require('./util/hash-to-segments')
11const ms = require('mississippi')
12const path = require('path')
13const ssri = require('ssri')
14const Y = require('./util/y.js')
15
16const indexV = require('../package.json')['cache-version'].index
17
18const appendFileAsync = BB.promisify(fs.appendFile)
19const readFileAsync = BB.promisify(fs.readFile)
20const readdirAsync = BB.promisify(fs.readdir)
21const concat = ms.concat
22const from = ms.from
23
24module.exports.NotFoundError = class NotFoundError extends Error {
25  constructor (cache, key) {
26    super(Y`No cache entry for \`${key}\` found in \`${cache}\``)
27    this.code = 'ENOENT'
28    this.cache = cache
29    this.key = key
30  }
31}
32
33const IndexOpts = figgyPudding({
34  metadata: {},
35  size: {}
36})
37
38module.exports.insert = insert
39function insert (cache, key, integrity, opts) {
40  opts = IndexOpts(opts)
41  const bucket = bucketPath(cache, key)
42  const entry = {
43    key,
44    integrity: integrity && ssri.stringify(integrity),
45    time: Date.now(),
46    size: opts.size,
47    metadata: opts.metadata
48  }
49  return fixOwner.mkdirfix(
50    cache, path.dirname(bucket)
51  ).then(() => {
52    const stringified = JSON.stringify(entry)
53    // NOTE - Cleverness ahoy!
54    //
55    // This works because it's tremendously unlikely for an entry to corrupt
56    // another while still preserving the string length of the JSON in
57    // question. So, we just slap the length in there and verify it on read.
58    //
59    // Thanks to @isaacs for the whiteboarding session that ended up with this.
60    return appendFileAsync(
61      bucket, `\n${hashEntry(stringified)}\t${stringified}`
62    )
63  }).then(
64    () => fixOwner.chownr(cache, bucket)
65  ).catch({ code: 'ENOENT' }, () => {
66    // There's a class of race conditions that happen when things get deleted
67    // during fixOwner, or between the two mkdirfix/chownr calls.
68    //
69    // It's perfectly fine to just not bother in those cases and lie
70    // that the index entry was written. Because it's a cache.
71  }).then(() => {
72    return formatEntry(cache, entry)
73  })
74}
75
76module.exports.insert.sync = insertSync
77function insertSync (cache, key, integrity, opts) {
78  opts = IndexOpts(opts)
79  const bucket = bucketPath(cache, key)
80  const entry = {
81    key,
82    integrity: integrity && ssri.stringify(integrity),
83    time: Date.now(),
84    size: opts.size,
85    metadata: opts.metadata
86  }
87  fixOwner.mkdirfix.sync(cache, path.dirname(bucket))
88  const stringified = JSON.stringify(entry)
89  fs.appendFileSync(
90    bucket, `\n${hashEntry(stringified)}\t${stringified}`
91  )
92  try {
93    fixOwner.chownr.sync(cache, bucket)
94  } catch (err) {
95    if (err.code !== 'ENOENT') {
96      throw err
97    }
98  }
99  return formatEntry(cache, entry)
100}
101
102module.exports.find = find
103function find (cache, key) {
104  const bucket = bucketPath(cache, key)
105  return bucketEntries(bucket).then(entries => {
106    return entries.reduce((latest, next) => {
107      if (next && next.key === key) {
108        return formatEntry(cache, next)
109      } else {
110        return latest
111      }
112    }, null)
113  }).catch(err => {
114    if (err.code === 'ENOENT') {
115      return null
116    } else {
117      throw err
118    }
119  })
120}
121
122module.exports.find.sync = findSync
123function findSync (cache, key) {
124  const bucket = bucketPath(cache, key)
125  try {
126    return bucketEntriesSync(bucket).reduce((latest, next) => {
127      if (next && next.key === key) {
128        return formatEntry(cache, next)
129      } else {
130        return latest
131      }
132    }, null)
133  } catch (err) {
134    if (err.code === 'ENOENT') {
135      return null
136    } else {
137      throw err
138    }
139  }
140}
141
142module.exports.delete = del
143function del (cache, key, opts) {
144  return insert(cache, key, null, opts)
145}
146
147module.exports.delete.sync = delSync
148function delSync (cache, key, opts) {
149  return insertSync(cache, key, null, opts)
150}
151
152module.exports.lsStream = lsStream
153function lsStream (cache) {
154  const indexDir = bucketDir(cache)
155  const stream = from.obj()
156
157  // "/cachename/*"
158  readdirOrEmpty(indexDir).map(bucket => {
159    const bucketPath = path.join(indexDir, bucket)
160
161    // "/cachename/<bucket 0xFF>/*"
162    return readdirOrEmpty(bucketPath).map(subbucket => {
163      const subbucketPath = path.join(bucketPath, subbucket)
164
165      // "/cachename/<bucket 0xFF>/<bucket 0xFF>/*"
166      return readdirOrEmpty(subbucketPath).map(entry => {
167        const getKeyToEntry = bucketEntries(
168          path.join(subbucketPath, entry)
169        ).reduce((acc, entry) => {
170          acc.set(entry.key, entry)
171          return acc
172        }, new Map())
173
174        return getKeyToEntry.then(reduced => {
175          for (let entry of reduced.values()) {
176            const formatted = formatEntry(cache, entry)
177            formatted && stream.push(formatted)
178          }
179        }).catch({ code: 'ENOENT' }, nop)
180      })
181    })
182  }).then(() => {
183    stream.push(null)
184  }, err => {
185    stream.emit('error', err)
186  })
187
188  return stream
189}
190
191module.exports.ls = ls
192function ls (cache) {
193  return BB.fromNode(cb => {
194    lsStream(cache).on('error', cb).pipe(concat(entries => {
195      cb(null, entries.reduce((acc, xs) => {
196        acc[xs.key] = xs
197        return acc
198      }, {}))
199    }))
200  })
201}
202
203function bucketEntries (bucket, filter) {
204  return readFileAsync(
205    bucket, 'utf8'
206  ).then(data => _bucketEntries(data, filter))
207}
208
209function bucketEntriesSync (bucket, filter) {
210  const data = fs.readFileSync(bucket, 'utf8')
211  return _bucketEntries(data, filter)
212}
213
214function _bucketEntries (data, filter) {
215  let entries = []
216  data.split('\n').forEach(entry => {
217    if (!entry) { return }
218    const pieces = entry.split('\t')
219    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
220      // Hash is no good! Corruption or malice? Doesn't matter!
221      // EJECT EJECT
222      return
223    }
224    let obj
225    try {
226      obj = JSON.parse(pieces[1])
227    } catch (e) {
228      // Entry is corrupted!
229      return
230    }
231    if (obj) {
232      entries.push(obj)
233    }
234  })
235  return entries
236}
237
238module.exports._bucketDir = bucketDir
239function bucketDir (cache) {
240  return path.join(cache, `index-v${indexV}`)
241}
242
243module.exports._bucketPath = bucketPath
244function bucketPath (cache, key) {
245  const hashed = hashKey(key)
246  return path.join.apply(path, [bucketDir(cache)].concat(
247    hashToSegments(hashed)
248  ))
249}
250
251module.exports._hashKey = hashKey
252function hashKey (key) {
253  return hash(key, 'sha256')
254}
255
256module.exports._hashEntry = hashEntry
257function hashEntry (str) {
258  return hash(str, 'sha1')
259}
260
261function hash (str, digest) {
262  return crypto
263    .createHash(digest)
264    .update(str)
265    .digest('hex')
266}
267
268function formatEntry (cache, entry) {
269  // Treat null digests as deletions. They'll shadow any previous entries.
270  if (!entry.integrity) { return null }
271  return {
272    key: entry.key,
273    integrity: entry.integrity,
274    path: contentPath(cache, entry.integrity),
275    size: entry.size,
276    time: entry.time,
277    metadata: entry.metadata
278  }
279}
280
281function readdirOrEmpty (dir) {
282  return readdirAsync(dir)
283    .catch({ code: 'ENOENT' }, () => [])
284    .catch({ code: 'ENOTDIR' }, () => [])
285}
286
287function nop () {
288}
289