1# Copyright (c) 2011 The Chromium OS Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import logging 6import os 7 8from autotest_lib.client.common_lib import log 9from autotest_lib.client.common_lib import error, utils, global_config 10from autotest_lib.client.bin import base_sysinfo, utils 11from autotest_lib.client.cros import constants 12 13get_value = global_config.global_config.get_config_value 14collect_corefiles = get_value('CLIENT', 'collect_corefiles', 15 type=bool, default=False) 16 17 18logfile = base_sysinfo.logfile 19command = base_sysinfo.command 20 21 22class logdir(base_sysinfo.loggable): 23 """Represents a log directory.""" 24 25 DEFAULT_EXCLUDES = ("**autoserv*",) 26 27 def __init__(self, directory, excludes=DEFAULT_EXCLUDES): 28 super(logdir, self).__init__(directory, log_in_keyval=False) 29 self.dir = directory 30 self._excludes = excludes 31 self._infer_old_attributes() 32 33 34 def __setstate__(self, state): 35 """Unpickle handler 36 37 When client tests are run without SSP, we pickle this object on the 38 server-side (using the version of the class deployed in the lab) and 39 unpickle it on the DUT (using the version of the class from the build). 40 This means that when adding a new attribute to this class, for a while 41 the server-side code does not populate that attribute. So, deal with 42 missing attributes in a sane way. 43 """ 44 self.__dict__ = state 45 if '_excludes' not in state: 46 self._excludes = self.DEFAULT_EXCLUDES 47 if self.additional_exclude: 48 self._excludes += tuple(self.additional_exclude) 49 50 51 def __repr__(self): 52 return "site_sysinfo.logdir(%r, %s)" % (self.dir, 53 self._excludes) 54 55 56 def __eq__(self, other): 57 if isinstance(other, logdir): 58 return (self.dir == other.dir and self._excludes == other._excludes) 59 elif isinstance(other, base_sysinfo.loggable): 60 return False 61 return NotImplemented 62 63 64 def __ne__(self, other): 65 result = self.__eq__(other) 66 if result is NotImplemented: 67 return result 68 return not result 69 70 71 def __hash__(self): 72 return hash(self.dir) + hash(self._excludes) 73 74 75 def run(self, log_dir): 76 """Copies this log directory to the specified directory. 77 78 @param log_dir: The destination log directory. 79 """ 80 from_dir = os.path.realpath(self.dir) 81 if os.path.exists(from_dir): 82 parent_dir = os.path.dirname(from_dir) 83 utils.system("mkdir -p %s%s" % (log_dir, parent_dir)) 84 85 excludes = [ 86 "--exclude=%s" % self._anchored_exclude_pattern(from_dir, x) 87 for x in self._excludes] 88 # Take source permissions and add ugo+r so files are accessible via 89 # archive server. 90 utils.system( 91 "rsync --no-perms --chmod=ugo+r -a --safe-links %s %s %s%s" 92 % (" ".join(excludes), from_dir, log_dir, parent_dir)) 93 94 95 def _anchored_exclude_pattern(self, from_dir, pattern): 96 return '/%s/%s' % (os.path.basename(from_dir), pattern) 97 98 99 def _infer_old_attributes(self): 100 """ Backwards compatibility attributes. 101 102 YOU MUST NEVER DROP / REINTERPRET THESE. 103 A logdir object is pickled on server-side and unpickled on 104 client-side. This means that, when running aginst client-side code 105 from an older build, we need to be able to unpickle an instance of 106 logdir pickled from a newer version of the class. 107 108 Some old attributes are not sanely handled via __setstate__, so we can't 109 drop them without breaking compatibility. 110 """ 111 additional_excludes = list(set(self._excludes) - 112 set(self.DEFAULT_EXCLUDES)) 113 if additional_excludes: 114 # Old API only allowed a single additional exclude. 115 # Best effort, keep the first one, throw the rest. 116 self.additional_exclude = additional_excludes[0] 117 else: 118 self.additional_exclude = None 119 120 121class file_stat(object): 122 """Store the file size and inode, used for retrieving new data in file.""" 123 def __init__(self, file_path): 124 """Collect the size and inode information of a file. 125 126 @param file_path: full path to the file. 127 128 """ 129 stat = os.stat(file_path) 130 # Start size of the file, skip that amount of bytes when do diff. 131 self.st_size = stat.st_size 132 # inode of the file. If inode is changed, treat this as a new file and 133 # copy the whole file. 134 self.st_ino = stat.st_ino 135 136 137class diffable_logdir(logdir): 138 """Represents a log directory that only new content will be copied. 139 140 An instance of this class should be added in both 141 before_iteration_loggables and after_iteration_loggables. This is to 142 guarantee the file status information is collected when run method is 143 called in before_iteration_loggables, and diff is executed when run 144 method is called in after_iteration_loggables. 145 146 """ 147 def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES, 148 keep_file_hierarchy=True, append_diff_in_name=True): 149 """ 150 Constructor of a diffable_logdir instance. 151 152 @param directory: directory to be diffed after an iteration finished. 153 @param excludes: path patterns to exclude for rsync. 154 @param keep_file_hierarchy: True if need to preserve full path, e.g., 155 sysinfo/var/log/sysstat, v.s. sysinfo/sysstat if it's False. 156 @param append_diff_in_name: True if you want to append '_diff' to the 157 folder name to indicate it's a diff, e.g., var/log_diff. Option 158 keep_file_hierarchy must be True for this to take effect. 159 160 """ 161 super(diffable_logdir, self).__init__(directory, excludes) 162 self.keep_file_hierarchy = keep_file_hierarchy 163 self.append_diff_in_name = append_diff_in_name 164 # Init dictionary to store all file status for files in the directory. 165 self._log_stats = {} 166 167 168 def _get_init_status_of_src_dir(self, src_dir): 169 """Get initial status of files in src_dir folder. 170 171 @param src_dir: directory to be diff-ed. 172 173 """ 174 # Dictionary used to store the initial status of files in src_dir. 175 for file_path in self._get_all_files(src_dir): 176 self._log_stats[file_path] = file_stat(file_path) 177 self.file_stats_collected = True 178 179 180 def _get_all_files(self, path): 181 """Iterate through files in given path including subdirectories. 182 183 @param path: root directory. 184 @return: an iterator that iterates through all files in given path 185 including subdirectories. 186 187 """ 188 if not os.path.exists(path): 189 yield [] 190 for root, dirs, files in os.walk(path): 191 for f in files: 192 if f.startswith('autoserv'): 193 continue 194 yield os.path.join(root, f) 195 196 197 def _copy_new_data_in_file(self, file_path, src_dir, dest_dir): 198 """Copy all new data in a file to target directory. 199 200 @param file_path: full path to the file to be copied. 201 @param src_dir: source directory to do the diff. 202 @param dest_dir: target directory to store new data of src_dir. 203 204 """ 205 bytes_to_skip = 0 206 if self._log_stats.has_key(file_path): 207 prev_stat = self._log_stats[file_path] 208 new_stat = os.stat(file_path) 209 if new_stat.st_ino == prev_stat.st_ino: 210 bytes_to_skip = prev_stat.st_size 211 if new_stat.st_size == bytes_to_skip: 212 return 213 elif new_stat.st_size < prev_stat.st_size: 214 # File is modified to a smaller size, copy whole file. 215 bytes_to_skip = 0 216 try: 217 with open(file_path, 'r') as in_log: 218 if bytes_to_skip > 0: 219 in_log.seek(bytes_to_skip) 220 # Skip src_dir in path, e.g., src_dir/[sub_dir]/file_name. 221 target_path = os.path.join(dest_dir, 222 os.path.relpath(file_path, src_dir)) 223 target_dir = os.path.dirname(target_path) 224 if not os.path.exists(target_dir): 225 os.makedirs(target_dir) 226 with open(target_path, "w") as out_log: 227 out_log.write(in_log.read()) 228 except IOError as e: 229 logging.error('Diff %s failed with error: %s', file_path, e) 230 231 232 def _log_diff(self, src_dir, dest_dir): 233 """Log all of the new data in src_dir to dest_dir. 234 235 @param src_dir: source directory to do the diff. 236 @param dest_dir: target directory to store new data of src_dir. 237 238 """ 239 if self.keep_file_hierarchy: 240 dir = src_dir.lstrip('/') 241 if self.append_diff_in_name: 242 dir = dir.rstrip('/') + '_diff' 243 dest_dir = os.path.join(dest_dir, dir) 244 245 if not os.path.exists(dest_dir): 246 os.makedirs(dest_dir) 247 248 for src_file in self._get_all_files(src_dir): 249 self._copy_new_data_in_file(src_file, src_dir, dest_dir) 250 251 252 def run(self, log_dir, collect_init_status=True, collect_all=False): 253 """Copies new content from self.dir to the destination log_dir. 254 255 @param log_dir: The destination log directory. 256 @param collect_init_status: Set to True if run method is called to 257 collect the initial status of files. 258 @param collect_all: Set to True to force to collect all files. 259 260 """ 261 if collect_init_status: 262 self._get_init_status_of_src_dir(self.dir) 263 elif os.path.exists(self.dir): 264 # Always create a copy of the new logs to help debugging. 265 self._log_diff(self.dir, log_dir) 266 if collect_all: 267 logdir_temp = logdir(self.dir) 268 logdir_temp.run(log_dir) 269 270 271class purgeable_logdir(logdir): 272 """Represents a log directory that will be purged.""" 273 def __init__(self, directory, excludes=logdir.DEFAULT_EXCLUDES): 274 super(purgeable_logdir, self).__init__(directory, excludes) 275 276 def run(self, log_dir): 277 """Copies this log dir to the destination dir, then purges the source. 278 279 @param log_dir: The destination log directory. 280 """ 281 super(purgeable_logdir, self).run(log_dir) 282 283 if os.path.exists(self.dir): 284 utils.system("rm -rf %s/*" % (self.dir)) 285 286 287class site_sysinfo(base_sysinfo.base_sysinfo): 288 """Represents site system info.""" 289 def __init__(self, job_resultsdir): 290 super(site_sysinfo, self).__init__(job_resultsdir) 291 crash_exclude_string = None 292 if not collect_corefiles: 293 crash_exclude_string = "*.core" 294 295 # This is added in before and after_iteration_loggables. When run is 296 # called in before_iteration_loggables, it collects file status in 297 # the directory. When run is called in after_iteration_loggables, diff 298 # is executed. 299 # self.diffable_loggables is only initialized if the instance does not 300 # have this attribute yet. The sysinfo instance could be loaded 301 # from an earlier pickle dump, which has already initialized attribute 302 # self.diffable_loggables. 303 if not hasattr(self, 'diffable_loggables'): 304 diffable_log = diffable_logdir(constants.LOG_DIR) 305 self.diffable_loggables = set() 306 self.diffable_loggables.add(diffable_log) 307 308 # add in some extra command logging 309 self.boot_loggables.add(command("ls -l /boot", 310 "boot_file_list")) 311 self.before_iteration_loggables.add( 312 command(constants.CHROME_VERSION_COMMAND, "chrome_version")) 313 self.boot_loggables.add(command("crossystem", "crossystem")) 314 self.test_loggables.add( 315 purgeable_logdir( 316 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "log"))) 317 # We only want to gather and purge crash reports after the client test 318 # runs in case a client test is checking that a crash found at boot 319 # (such as a kernel crash) is handled. 320 self.after_iteration_loggables.add( 321 purgeable_logdir( 322 os.path.join(constants.CRYPTOHOME_MOUNT_PT, "crash"), 323 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,))) 324 self.after_iteration_loggables.add( 325 purgeable_logdir( 326 constants.CRASH_DIR, 327 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,))) 328 self.test_loggables.add( 329 logfile(os.path.join(constants.USER_DATA_DIR, 330 ".Google/Google Talk Plugin/gtbplugin.log"))) 331 self.test_loggables.add(purgeable_logdir( 332 constants.CRASH_DIR, 333 excludes=logdir.DEFAULT_EXCLUDES + (crash_exclude_string,))) 334 # Collect files under /tmp/crash_reporter, which contain the procfs 335 # copy of those crashed processes whose core file didn't get converted 336 # into minidump. We need these additional files for post-mortem analysis 337 # of the conversion failure. 338 self.test_loggables.add( 339 purgeable_logdir(constants.CRASH_REPORTER_RESIDUE_DIR)) 340 341 342 @log.log_and_ignore_errors("pre-test sysinfo error:") 343 def log_before_each_test(self, test): 344 """Logging hook called before a test starts. 345 346 @param test: A test object. 347 """ 348 super(site_sysinfo, self).log_before_each_test(test) 349 350 for log in self.diffable_loggables: 351 log.run(log_dir=None, collect_init_status=True) 352 353 354 @log.log_and_ignore_errors("post-test sysinfo error:") 355 def log_after_each_test(self, test): 356 """Logging hook called after a test finishs. 357 358 @param test: A test object. 359 """ 360 super(site_sysinfo, self).log_after_each_test(test) 361 362 test_sysinfodir = self._get_sysinfodir(test.outputdir) 363 364 for log in self.diffable_loggables: 365 log.run(log_dir=test_sysinfodir, collect_init_status=False, 366 collect_all=not test.success) 367 368 369 def _get_chrome_version(self): 370 """Gets the Chrome version number and milestone as strings. 371 372 Invokes "chrome --version" to get the version number and milestone. 373 374 @return A tuple (chrome_ver, milestone) where "chrome_ver" is the 375 current Chrome version number as a string (in the form "W.X.Y.Z") 376 and "milestone" is the first component of the version number 377 (the "W" from "W.X.Y.Z"). If the version number cannot be parsed 378 in the "W.X.Y.Z" format, the "chrome_ver" will be the full output 379 of "chrome --version" and the milestone will be the empty string. 380 381 """ 382 version_string = utils.system_output(constants.CHROME_VERSION_COMMAND, 383 ignore_status=True) 384 return utils.parse_chrome_version(version_string) 385 386 387 def log_test_keyvals(self, test_sysinfodir): 388 keyval = super(site_sysinfo, self).log_test_keyvals(test_sysinfodir) 389 390 lsb_lines = utils.system_output( 391 "cat /etc/lsb-release", 392 ignore_status=True).splitlines() 393 lsb_dict = dict(item.split("=") for item in lsb_lines) 394 395 for lsb_key in lsb_dict.keys(): 396 # Special handling for build number 397 if lsb_key == "CHROMEOS_RELEASE_DESCRIPTION": 398 keyval["CHROMEOS_BUILD"] = ( 399 lsb_dict[lsb_key].rstrip(")").split(" ")[3]) 400 keyval[lsb_key] = lsb_dict[lsb_key] 401 402 # Get the hwid (hardware ID), if applicable. 403 try: 404 keyval["hwid"] = utils.system_output('crossystem hwid') 405 except error.CmdError: 406 # The hwid may not be available (e.g, when running on a VM). 407 # If the output of 'crossystem mainfw_type' is 'nonchrome', then 408 # we expect the hwid to not be avilable, and we can proceed in this 409 # case. Otherwise, the hwid is missing unexpectedly. 410 mainfw_type = utils.system_output('crossystem mainfw_type') 411 if mainfw_type == 'nonchrome': 412 logging.info( 413 'HWID not available; not logging it as a test keyval.') 414 else: 415 logging.exception('HWID expected but could not be identified; ' 416 'output of "crossystem mainfw_type" is "%s"', 417 mainfw_type) 418 raise 419 420 # Get the chrome version and milestone numbers. 421 keyval["CHROME_VERSION"], keyval["MILESTONE"] = ( 422 self._get_chrome_version()) 423 424 # TODO(kinaba): crbug.com/707448 Import at the head of this file. 425 # Currently a server-side script server/server_job.py is indirectly 426 # importing this file, so we cannot globaly import cryptohome that 427 # has dependency to a client-only library. 428 from autotest_lib.client.cros import cryptohome 429 # Get the dictionary attack counter. 430 keyval["TPM_DICTIONARY_ATTACK_COUNTER"] = ( 431 cryptohome.get_tpm_more_status().get( 432 'dictionary_attack_counter', 'Failed to query cryptohome')) 433 434 # Return the updated keyvals. 435 return keyval 436 437 438 def add_logdir(self, loggable): 439 """Collect files in log_path to sysinfo folder. 440 441 This method can be called from a control file for test to collect files 442 in a specified folder. autotest creates a folder [test result 443 dir]/sysinfo folder with the full path of log_path and copy all files in 444 log_path to that folder. 445 446 @param loggable: A logdir instance corresponding to the logs to collect. 447 """ 448 self.test_loggables.add(loggable) 449