• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1const os = require('os')
2const { join, dirname, basename } = require('path')
3const { format } = require('util')
4const { glob } = require('glob')
5const { Minipass } = require('minipass')
6const fsMiniPass = require('fs-minipass')
7const fs = require('fs/promises')
8const log = require('./log-shim')
9
10const padZero = (n, length) => n.toString().padStart(length.toString().length, '0')
11const globify = pattern => pattern.split('\\').join('/')
12
13class LogFiles {
14  // Default to a plain minipass stream so we can buffer
15  // initial writes before we know the cache location
16  #logStream = null
17
18  // We cap log files at a certain number of log events per file.
19  // Note that each log event can write more than one line to the
20  // file. Then we rotate log files once this number of events is reached
21  #MAX_LOGS_PER_FILE = null
22
23  // Now that we write logs continuously we need to have a backstop
24  // here for infinite loops that still log. This is also partially handled
25  // by the config.get('max-files') option, but this is a failsafe to
26  // prevent runaway log file creation
27  #MAX_FILES_PER_PROCESS = null
28
29  #fileLogCount = 0
30  #totalLogCount = 0
31  #path = null
32  #logsMax = null
33  #files = []
34
35  constructor ({
36    maxLogsPerFile = 50_000,
37    maxFilesPerProcess = 5,
38  } = {}) {
39    this.#MAX_LOGS_PER_FILE = maxLogsPerFile
40    this.#MAX_FILES_PER_PROCESS = maxFilesPerProcess
41    this.on()
42  }
43
44  static format (count, level, title, ...args) {
45    let prefix = `${count} ${level}`
46    if (title) {
47      prefix += ` ${title}`
48    }
49
50    return format(...args)
51      .split(/\r?\n/)
52      .reduce((lines, line) =>
53        lines += prefix + (line ? ' ' : '') + line + os.EOL,
54      ''
55      )
56  }
57
58  on () {
59    this.#logStream = new Minipass()
60    process.on('log', this.#logHandler)
61  }
62
63  off () {
64    process.off('log', this.#logHandler)
65    this.#endStream()
66  }
67
68  load ({ path, logsMax = Infinity } = {}) {
69    // dir is user configurable and is required to exist so
70    // this can error if the dir is missing or not configured correctly
71    this.#path = path
72    this.#logsMax = logsMax
73
74    // Log stream has already ended
75    if (!this.#logStream) {
76      return
77    }
78
79    log.verbose('logfile', `logs-max:${logsMax} dir:${this.#path}`)
80
81    // Pipe our initial stream to our new file stream and
82    // set that as the new log logstream for future writes
83    // if logs max is 0 then the user does not want a log file
84    if (this.#logsMax > 0) {
85      const initialFile = this.#openLogFile()
86      if (initialFile) {
87        this.#logStream = this.#logStream.pipe(initialFile)
88      }
89    }
90
91    // Kickoff cleaning process, even if we aren't writing a logfile.
92    // This is async but it will always ignore the current logfile
93    // Return the result so it can be awaited in tests
94    return this.#cleanLogs()
95  }
96
97  log (...args) {
98    this.#logHandler(...args)
99  }
100
101  get files () {
102    return this.#files
103  }
104
105  get #isBuffered () {
106    return this.#logStream instanceof Minipass
107  }
108
109  #endStream (output) {
110    if (this.#logStream) {
111      this.#logStream.end(output)
112      this.#logStream = null
113    }
114  }
115
116  #logHandler = (level, ...args) => {
117    // Ignore pause and resume events since we
118    // write everything to the log file
119    if (level === 'pause' || level === 'resume') {
120      return
121    }
122
123    // If the stream is ended then do nothing
124    if (!this.#logStream) {
125      return
126    }
127
128    const logOutput = this.#formatLogItem(level, ...args)
129
130    if (this.#isBuffered) {
131      // Cant do anything but buffer the output if we dont
132      // have a file stream yet
133      this.#logStream.write(logOutput)
134      return
135    }
136
137    // Open a new log file if we've written too many logs to this one
138    if (this.#fileLogCount >= this.#MAX_LOGS_PER_FILE) {
139      // Write last chunk to the file and close it
140      this.#endStream(logOutput)
141      if (this.#files.length >= this.#MAX_FILES_PER_PROCESS) {
142        // but if its way too many then we just stop listening
143        this.off()
144      } else {
145        // otherwise we are ready for a new file for the next event
146        this.#logStream = this.#openLogFile()
147      }
148    } else {
149      this.#logStream.write(logOutput)
150    }
151  }
152
153  #formatLogItem (...args) {
154    this.#fileLogCount += 1
155    return LogFiles.format(this.#totalLogCount++, ...args)
156  }
157
158  #getLogFilePath (count = '') {
159    return `${this.#path}debug-${count}.log`
160  }
161
162  #openLogFile () {
163    // Count in filename will be 0 indexed
164    const count = this.#files.length
165
166    try {
167      // Pad with zeros so that our log files are always sorted properly
168      // We never want to write files ending in `-9.log` and `-10.log` because
169      // log file cleaning is done by deleting the oldest so in this example
170      // `-10.log` would be deleted next
171      const f = this.#getLogFilePath(padZero(count, this.#MAX_FILES_PER_PROCESS))
172      // Some effort was made to make the async, but we need to write logs
173      // during process.on('exit') which has to be synchronous. So in order
174      // to never drop log messages, it is easiest to make it sync all the time
175      // and this was measured to be about 1.5% slower for 40k lines of output
176      const logStream = new fsMiniPass.WriteStreamSync(f, { flags: 'a' })
177      if (count > 0) {
178        // Reset file log count if we are opening
179        // after our first file
180        this.#fileLogCount = 0
181      }
182      this.#files.push(logStream.path)
183      return logStream
184    } catch (e) {
185      // If the user has a readonly logdir then we don't want to
186      // warn this on every command so it should be verbose
187      log.verbose('logfile', `could not be created: ${e}`)
188    }
189  }
190
191  async #cleanLogs () {
192    // module to clean out the old log files
193    // this is a best-effort attempt.  if a rm fails, we just
194    // log a message about it and move on.  We do return a
195    // Promise that succeeds when we've tried to delete everything,
196    // just for the benefit of testing this function properly.
197
198    try {
199      const logPath = this.#getLogFilePath()
200      const logGlob = join(dirname(logPath), basename(logPath)
201        // tell glob to only match digits
202        .replace(/\d/g, '[0123456789]')
203        // Handle the old (prior to 8.2.0) log file names which did not have a
204        // counter suffix
205        .replace(/-\.log$/, '*.log')
206      )
207
208      // Always ignore the currently written files
209      const files = await glob(globify(logGlob), { ignore: this.#files.map(globify), silent: true })
210      const toDelete = files.length - this.#logsMax
211
212      if (toDelete <= 0) {
213        return
214      }
215
216      log.silly('logfile', `start cleaning logs, removing ${toDelete} files`)
217
218      for (const file of files.slice(0, toDelete)) {
219        try {
220          await fs.rm(file, { force: true })
221        } catch (e) {
222          log.silly('logfile', 'error removing log file', file, e)
223        }
224      }
225    } catch (e) {
226      // Disable cleanup failure warnings when log writing is disabled
227      if (this.#logsMax > 0) {
228        log.warn('logfile', 'error cleaning log files', e)
229      }
230    } finally {
231      log.silly('logfile', 'done cleaning log files')
232    }
233  }
234}
235
236module.exports = LogFiles
237