1# Copyright (c) 2012 The Chromium OS Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import abc, logging, os, re, time 6import subprocess 7 8import common 9from autotest_lib.client.bin import utils 10from autotest_lib.client.common_lib import error 11from constants import CLEANUP_LOGS_PAUSED_FILE 12 13 14def strip_timestamp(msg): 15 """ 16 Strips timestamps and PIDs from a syslog message to facilitate 17 failure reason aggregation when this message is used as an autotest 18 exception text. 19 """ 20 kernel = re.search(r' kernel: \[ *\d+.\d+\] (.*)$', msg) 21 if kernel: 22 return 'kernel: ' + kernel.group(1) 23 24 user = re.match(r'[^a-z]+ [^ ]* ([^\[ ]+)(?:\[.*\])?(: .*)$', msg) 25 if user: 26 return user.group(1) + user.group(2) 27 28 logging.warning('Could not parse syslog message: ' + msg) 29 return msg 30 31 32def extract_kernel_timestamp(msg): 33 """Extract a timestmap that appears in kernel log messages and looks 34 like this: 35 ... kernel: [78791.721832] ... 36 37 Returns: 38 The timestamp as float in seconds since last boot. 39 """ 40 41 match = re.search('\[\s*([0-9]+\.[0-9]+)\] ', msg) 42 if match: 43 return float(match.group(1)) 44 raise error.TestError('Could not extract timestamp from message: ' + msg) 45 46 47class AbstractLogReader(object): 48 49 def __init__(self): 50 self._start_line = 1 51 52 @abc.abstractmethod 53 def read_all_logs(self): 54 """Read all content from log files. 55 56 Generator function. 57 Return an iterator on the content of files. 58 59 This generator can peek a line once (and only once!) by using 60 .send(offset). Must iterate over the peeked line before you can 61 peek again. 62 """ 63 pass 64 65 def set_start_by_regexp(self, index, regexp): 66 """Set the start of logs based on a regular expression. 67 68 @param index: line matching regexp to start at, earliest log at 0. 69 Negative numbers indicate matches since end of log. 70 71 @return True if a matching line was found, false otherwise 72 """ 73 regexp_compiled = re.compile(regexp) 74 starts = [] 75 line_number = 1 76 self._start_line = 1 77 for line in self.read_all_logs(): 78 if regexp_compiled.search(line): 79 starts.append(line_number) 80 line_number += 1 81 if index < -len(starts): 82 self._start_line = 1 83 return False 84 elif index >= len(starts): 85 self._start_line = line_number 86 else: 87 self._start_line = starts[index] 88 return True 89 90 91 def set_start_by_reboot(self, index): 92 """ Set the start of logs (must be system log) based on reboot. 93 94 @param index: reboot to start at, earliest log at 0. Negative 95 numbers indicate reboots since end of log. 96 97 @return True if the boot message line was found, False otherwise 98 """ 99 # Include a 'kernel' tag to avoid matching boot messages logged by 100 # crosvm: https://crbug.com/817946 101 return self.set_start_by_regexp(index, 102 r'kernel:(.*000\])? Linux version \d') 103 104 105 def set_start_by_current(self, relative=0): 106 """ Set start of logs based on current last line. 107 108 @param relative: line relative to current to start at. 1 means 109 to start the log after this line. 110 """ 111 count = self._start_line + relative 112 for line in self.read_all_logs(): 113 count += 1 114 self._start_line = count 115 116 117 def get_logs(self): 118 """ Get logs since the start line. 119 120 Start line is set by set_start_* functions or 121 since the start of the file if none were called. 122 123 @return string of contents of file since start line. 124 """ 125 logs = [] 126 for line in self.read_all_logs(): 127 logs.append(line) 128 return ''.join(logs) 129 130 131 def can_find(self, string): 132 """ Try to find string in the logs. 133 134 @return boolean indicating if we found the string. 135 """ 136 return string in self.get_logs() 137 138 139 def get_last_msg(self, patterns, retries=0, sleep_seconds=0.2): 140 """Search the logs and return the latest occurrence of a message 141 matching one of the patterns. 142 143 Args: 144 patterns: A regexp or a list of regexps to search the logs with. 145 The function returns as soon as it finds any match to one of 146 the patters, it will not search for the other patterns. 147 retries: Number of times to retry if none of the patterns were 148 found. Default is one attempt. 149 sleep_seconds: Time to sleep between retries. 150 151 Returns: 152 The last occurrence of the first matching pattern. "None" if none 153 of the patterns matched. 154 """ 155 156 if not type(patterns) in (list, tuple): 157 patterns = [patterns] 158 159 for retry in xrange(retries + 1): 160 for pattern in patterns: 161 regexp_compiled = re.compile(pattern) 162 last_match = None 163 for line in self.read_all_logs(): 164 if regexp_compiled.search(line): 165 last_match = line 166 if last_match: 167 return last_match 168 time.sleep(sleep_seconds) 169 170 return None 171 172 173class LogRotationPauser(object): 174 """ 175 Class to control when logs are rotated from either server or client. 176 177 Assumes all setting of CLEANUP_LOGS_PAUSED_FILE is done by this class 178 and that all calls to begin and end are properly 179 nested. For instance, [ a.begin(), b.begin(), b.end(), a.end() ] is 180 supported, but [ a.begin(), b.begin(), a.end(), b.end() ] is not. 181 We do support redundant calls to the same class, such as 182 [ a.begin(), a.begin(), a.end() ]. 183 """ 184 def __init__(self, host=None): 185 self._host = host 186 self._begun = False 187 self._is_nested = True 188 189 190 def _run(self, command, *args, **dargs): 191 if self._host: 192 return self._host.run(command, *args, **dargs).exit_status 193 else: 194 return utils.system(command, *args, **dargs) 195 196 197 def begin(self): 198 """Make sure that log rotation is disabled.""" 199 if self._begun: 200 return 201 self._is_nested = (self._run(('[ -r %s ]' % 202 CLEANUP_LOGS_PAUSED_FILE), 203 ignore_status=True) == 0) 204 if self._is_nested: 205 logging.info('File %s was already present' % 206 CLEANUP_LOGS_PAUSED_FILE) 207 else: 208 self._run('touch ' + CLEANUP_LOGS_PAUSED_FILE) 209 self._begun = True 210 211 212 def end(self): 213 assert self._begun 214 if not self._is_nested: 215 self._run('rm -f ' + CLEANUP_LOGS_PAUSED_FILE) 216 else: 217 logging.info('Leaving existing %s file' % CLEANUP_LOGS_PAUSED_FILE) 218 self._begun = False 219 220 221class LogReader(AbstractLogReader): 222 """Class to read traditional text log files. 223 224 Be default reads all logs from /var/log/messages. 225 """ 226 227 def __init__(self, filename='/var/log/messages', include_rotated_logs=True): 228 AbstractLogReader.__init__(self) 229 self._filename = filename 230 self._include_rotated_logs = include_rotated_logs 231 if not os.path.exists(CLEANUP_LOGS_PAUSED_FILE): 232 raise error.TestError('LogReader created without ' + 233 CLEANUP_LOGS_PAUSED_FILE) 234 235 def read_all_logs(self): 236 log_files = [] 237 line_number = 0 238 if self._include_rotated_logs: 239 log_files.extend(utils.system_output( 240 'ls -tr1 %s.*' % self._filename, 241 ignore_status=True).splitlines()) 242 log_files.append(self._filename) 243 for log_file in log_files: 244 f = open(log_file) 245 for line in f: 246 line_number += 1 247 if line_number < self._start_line: 248 continue 249 peek = yield line 250 if peek: 251 buf = [f.next() for _ in xrange(peek)] 252 yield buf[-1] 253 while buf: 254 yield buf.pop(0) 255 f.close() 256 257 258class JournalLogReader(AbstractLogReader): 259 """A class to read logs stored by systemd-journald. 260 """ 261 262 def read_all_logs(self): 263 proc = subprocess.Popen(['journalctl'], stdout=subprocess.PIPE) 264 line_number = 0 265 for line in proc.stdout: 266 line_number += 1 267 if line_number < self._start_line: 268 continue 269 yield line 270 proc.terminate() 271 272 273class ContinuousLogReader(AbstractLogReader): 274 """Reads log file from where it left over last time. 275 276 Useful when reading a growing log file. 277 """ 278 def __init__(self, filename): 279 self._filename = filename 280 # Keeps file offset of last read. 281 self._last_pos = 0 282 283 def read_all_logs(self): 284 try: 285 with open(self._filename, 'r') as f: 286 f.seek(self._last_pos) 287 for line in f: 288 yield line 289 # Notice that _last_pos is recorded only if all lines in the file is 290 # read up. Maybe put this into a context manager so it's always 291 # recorded if needed. 292 self._last_pos = f.tell() 293 except IOError: 294 logging.error('Could not read log file %s', self._filename) 295 296 297def make_system_log_reader(): 298 """Create a system log reader. 299 300 This will create JournalLogReader() or LogReader() depending on 301 whether the system is configured with systemd. 302 """ 303 if os.path.exists("/var/log/journal"): 304 return JournalLogReader() 305 else: 306 return LogReader() 307