1# Copyright 2017 The Chromium OS Authors. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5import json 6import logging 7import os 8import urlparse 9 10from autotest_lib.client.common_lib import error 11from autotest_lib.client.common_lib import lsbrelease_utils 12from autotest_lib.client.common_lib import utils 13from autotest_lib.client.common_lib.cros import dev_server 14from autotest_lib.client.cros.update_engine import update_engine_event as uee 15from autotest_lib.client.cros.update_engine import update_engine_util 16from autotest_lib.server import autotest 17from autotest_lib.server import test 18from autotest_lib.server.cros.dynamic_suite import tools 19from autotest_lib.server.cros.update_engine import omaha_devserver 20from chromite.lib import retry_util 21from datetime import datetime, timedelta 22 23 24class UpdateEngineTest(test.test, update_engine_util.UpdateEngineUtil): 25 """Class for comparing expected update_engine events against actual ones. 26 27 During a rootfs update, there are several events that are fired (e.g. 28 download_started, download_finished, update_started etc). Each event has 29 properties associated with it that need to be verified. 30 31 In this class we build a list of expected events (list of 32 UpdateEngineEvent objects), and compare that against a "hostlog" returned 33 from update_engine from the update. This hostlog is a json list of 34 events fired during the update. It is accessed by the api/hostlog URL on the 35 devserver during the update. 36 37 We can also verify the hostlog of a one-time update event that is fired 38 after rebooting after an update. 39 40 During a typical autoupdate we will check both of these hostlogs. 41 """ 42 version = 1 43 44 # Timeout periods, given in seconds. 45 _INITIAL_CHECK_TIMEOUT = 12 * 60 46 _DOWNLOAD_STARTED_TIMEOUT = 4 * 60 47 # See https://crbug.com/731214 before changing _DOWNLOAD_FINISHED_TIMEOUT 48 _DOWNLOAD_FINISHED_TIMEOUT = 20 * 60 49 _UPDATE_COMPLETED_TIMEOUT = 4 * 60 50 _POST_REBOOT_TIMEOUT = 15 * 60 51 52 # The names of the two hostlog files we will be verifying 53 _DEVSERVER_HOSTLOG_ROOTFS = 'devserver_hostlog_rootfs' 54 _DEVSERVER_HOSTLOG_REBOOT = 'devserver_hostlog_reboot' 55 56 # Version we tell the DUT it is on before update. 57 _CUSTOM_LSB_VERSION = '0.0.0.0' 58 59 # Expected hostlog events during update: 4 during rootfs 60 _ROOTFS_HOSTLOG_EVENTS = 4 61 62 _CELLULAR_BUCKET = 'gs://chromeos-throw-away-bucket/CrOSPayloads/Cellular/' 63 64 65 def initialize(self, host=None, hosts=None): 66 """ 67 Sets default variables for the test. 68 69 @param host: The DUT we will be running on. 70 @param hosts: If we are running a test with multiple DUTs (eg P2P) 71 we will use hosts instead of host. 72 73 """ 74 self._hostlog_filename = None 75 self._hostlog_events = [] 76 self._num_consumed_events = 0 77 self._current_timestamp = None 78 self._expected_events = [] 79 self._omaha_devserver = None 80 self._host = host 81 # Some AU tests use multiple DUTs 82 self._hosts = hosts 83 84 # Define functions used in update_engine_util. 85 self._run = self._host.run if self._host else None 86 self._get_file = self._host.get_file if self._host else None 87 88 89 def cleanup(self): 90 """Clean up update_engine autotests.""" 91 if self._omaha_devserver is not None: 92 self._omaha_devserver.stop_devserver() 93 if self._host: 94 self._host.get_file(self._UPDATE_ENGINE_LOG, self.resultsdir) 95 96 97 def _get_expected_events_for_rootfs_update(self, source_release): 98 """Creates a list of expected events fired during a rootfs update. 99 100 There are 4 events fired during a rootfs update. We will create these 101 in the correct order with the correct data, timeout, and error 102 condition function. 103 """ 104 initial_check = uee.UpdateEngineEvent( 105 version=source_release, 106 on_error=self._error_initial_check) 107 download_started = uee.UpdateEngineEvent( 108 event_type=uee.EVENT_TYPE_DOWNLOAD_STARTED, 109 event_result=uee.EVENT_RESULT_SUCCESS, 110 version=source_release, 111 on_error=self._error_incorrect_event) 112 download_finished = uee.UpdateEngineEvent( 113 event_type=uee.EVENT_TYPE_DOWNLOAD_FINISHED, 114 event_result=uee.EVENT_RESULT_SUCCESS, 115 version=source_release, 116 on_error=self._error_incorrect_event) 117 update_complete = uee.UpdateEngineEvent( 118 event_type=uee.EVENT_TYPE_UPDATE_COMPLETE, 119 event_result=uee.EVENT_RESULT_SUCCESS, 120 version=source_release, 121 on_error=self._error_incorrect_event) 122 123 # There is an error message if any of them take too long to fire. 124 initial_error = self._timeout_error_message('an initial update check', 125 self._INITIAL_CHECK_TIMEOUT) 126 dls_error = self._timeout_error_message('a download started ' 127 'notification', 128 self._DOWNLOAD_STARTED_TIMEOUT, 129 uee.EVENT_TYPE_DOWNLOAD_STARTED) 130 dlf_error = self._timeout_error_message('a download finished ' 131 'notification', 132 self._DOWNLOAD_FINISHED_TIMEOUT, 133 uee.EVENT_TYPE_DOWNLOAD_FINISHED 134 ) 135 uc_error = self._timeout_error_message('an update complete ' 136 'notification', 137 self._UPDATE_COMPLETED_TIMEOUT, 138 uee.EVENT_TYPE_UPDATE_COMPLETE) 139 140 # Build an array of tuples (event, timeout, timeout_error_message) 141 self._expected_events = [ 142 (initial_check, self._INITIAL_CHECK_TIMEOUT, initial_error), 143 (download_started, self._DOWNLOAD_STARTED_TIMEOUT, dls_error), 144 (download_finished, self._DOWNLOAD_FINISHED_TIMEOUT, dlf_error), 145 (update_complete, self._UPDATE_COMPLETED_TIMEOUT, uc_error) 146 ] 147 148 149 def _get_expected_event_for_post_reboot_check(self, source_release, 150 target_release): 151 """Creates the expected event fired during post-reboot update check.""" 152 post_reboot_check = uee.UpdateEngineEvent( 153 event_type=uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE, 154 event_result=uee.EVENT_RESULT_SUCCESS, 155 version=target_release, 156 previous_version=source_release, 157 on_error=self._error_reboot_after_update) 158 err = self._timeout_error_message('a successful reboot ' 159 'notification', 160 self._POST_REBOOT_TIMEOUT, 161 uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE) 162 163 self._expected_events = [ 164 (post_reboot_check, self._POST_REBOOT_TIMEOUT, err) 165 ] 166 167 168 def _read_hostlog_events(self): 169 """Read the list of events from the hostlog json file.""" 170 if len(self._hostlog_events) <= self._num_consumed_events: 171 try: 172 with open(self._hostlog_filename, 'r') as out_log: 173 self._hostlog_events = json.loads(out_log.read()) 174 except Exception as e: 175 raise error.TestFail('Error while reading the hostlogs ' 176 'from devserver: %s' % e) 177 178 179 def _get_next_hostlog_event(self): 180 """Returns the next event from the hostlog json file. 181 182 @return The next new event in the host log 183 None if no such event was found or an error occurred. 184 """ 185 self._read_hostlog_events() 186 # Return next new event, if one is found. 187 if len(self._hostlog_events) > self._num_consumed_events: 188 new_event = { 189 key: str(val) for key, val 190 in self._hostlog_events[self._num_consumed_events].iteritems() 191 } 192 self._num_consumed_events += 1 193 logging.info('Consumed new event: %s', new_event) 194 return new_event 195 196 197 def _verify_event_with_timeout(self, expected_event, timeout, on_timeout): 198 """Verify an expected event occurs within a given timeout. 199 200 @param expected_event: an expected event 201 @param timeout: specified in seconds 202 @param on_timeout: A string to return if timeout occurs, or None. 203 204 @return None if event complies, an error string otherwise. 205 """ 206 actual_event = self._get_next_hostlog_event() 207 if actual_event: 208 # If this is the first event, set it as the current time 209 if self._current_timestamp is None: 210 self._current_timestamp = datetime.strptime(actual_event[ 211 'timestamp'], 212 '%Y-%m-%d %H:%M:%S') 213 214 # Get the time stamp for the current event and convert to datetime 215 timestamp = actual_event['timestamp'] 216 event_timestamp = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S') 217 218 # Add the timeout onto the timestamp to get its expiry 219 event_timeout = self._current_timestamp + timedelta(seconds=timeout) 220 221 # If the event happened before the timeout 222 if event_timestamp < event_timeout: 223 difference = event_timestamp - self._current_timestamp 224 logging.info('Event took %s seconds to fire during the ' 225 'update', difference.seconds) 226 result = expected_event.equals(actual_event) 227 self._current_timestamp = event_timestamp 228 return result 229 230 logging.error('The expected event was not found in the hostlog: %s', 231 expected_event) 232 return on_timeout 233 234 235 def _error_initial_check(self, expected, actual, mismatched_attrs): 236 """Error message for when update fails at initial update check.""" 237 err_msg = ('The update test appears to have completed successfully but ' 238 'we found a problem while verifying the hostlog of events ' 239 'returned from the update. Some attributes reported for ' 240 'the initial update check event are not what we expected: ' 241 '%s. ' % mismatched_attrs) 242 if 'version' in mismatched_attrs: 243 err_msg += ('The expected version is (%s) but reported version was ' 244 '(%s). ' % (expected['version'], actual['version'])) 245 err_msg += ('If reported version = target version, it is likely ' 246 'we retried the update because the test thought the ' 247 'first attempt failed but it actually succeeded ' 248 '(e.g due to SSH disconnect, DUT not reachable by ' 249 'hostname, applying stateful failed after rootfs ' 250 'succeeded). This second update attempt is then started' 251 ' from the target version instead of the source ' 252 'version, so our hostlog verification is invalid.') 253 err_msg += ('Check the full hostlog for this update in the %s file in ' 254 'the %s directory.' % (self._DEVSERVER_HOSTLOG_ROOTFS, 255 dev_server.AUTO_UPDATE_LOG_DIR)) 256 return err_msg 257 258 259 def _error_incorrect_event(self, expected, actual, mismatched_attrs): 260 """Error message for when an event is not what we expect.""" 261 return ('The update appears to have completed successfully but ' 262 'when analysing the update events in the hostlog we have ' 263 'found that one of the events is incorrect. This should ' 264 'never happen. The mismatched attributes are: %s. We expected ' 265 '%s, but got %s.' % (mismatched_attrs, expected, actual)) 266 267 268 def _error_reboot_after_update(self, expected, actual, mismatched_attrs): 269 """Error message for problems in the post-reboot update check.""" 270 err_msg = ('The update completed successfully but there was a problem ' 271 'with the post-reboot update check. After a successful ' 272 'update, we do a final update check to parse a unique ' 273 'omaha request. The mistmatched attributes for this update ' 274 'check were %s. ' % mismatched_attrs) 275 if 'event_result' in mismatched_attrs: 276 err_msg += ('The event_result was expected to be (%s:%s) but ' 277 'reported (%s:%s). ' % 278 (expected['event_result'], 279 uee.get_event_result(expected['event_result']), 280 actual.get('event_result'), 281 uee.get_event_result(actual.get('event_result')))) 282 if 'event_type' in mismatched_attrs: 283 err_msg += ('The event_type was expeted to be (%s:%s) but ' 284 'reported (%s:%s). ' % 285 (expected['event_type'], 286 uee.get_event_type(expected['event_type']), 287 actual.get('event_type'), 288 uee.get_event_type(actual.get('event_type')))) 289 if 'version' in mismatched_attrs: 290 err_msg += ('The version was expected to be (%s) but ' 291 'reported (%s). This probably means that the payload ' 292 'we applied was incorrect or corrupt. ' % 293 (expected['version'], actual['version'])) 294 if 'previous_version' in mismatched_attrs: 295 err_msg += ('The previous version is expected to be (%s) but ' 296 'reported (%s). This can happen if we retried the ' 297 'update after rootfs update completed on the first ' 298 'attempt then we failed. Or if stateful got wiped and ' 299 '/var/lib/update_engine/prefs/previous-version was ' 300 'deleted. ' % (expected['previous_version'], 301 actual['previous_version'])) 302 err_msg += ('You can see the full hostlog for this update check in ' 303 'the %s file within the %s directory. ' % 304 (self._DEVSERVER_HOSTLOG_REBOOT, 305 dev_server.AUTO_UPDATE_LOG_DIR)) 306 return err_msg 307 308 309 def _timeout_error_message(self, desc, timeout, event_type=None): 310 """Error message for when an event takes too long to fire.""" 311 if event_type is not None: 312 desc += ' (%s)' % uee.get_event_type(event_type) 313 return ('The update completed successfully but one of the steps of ' 314 'the update took longer than we would like. We failed to ' 315 'receive %s within %d seconds.' % (desc, timeout)) 316 317 318 def _stage_payload_by_uri(self, payload_uri, properties_file=True): 319 """Stage a payload based on its GS URI. 320 321 This infers the build's label, filename and GS archive from the 322 provided GS URI. 323 324 @param payload_uri: The full GS URI of the payload. 325 @param properties_file: If true, it will stage the update payload 326 properties file too. 327 328 @return URL of the staged payload (and properties file) on the server. 329 330 @raise error.TestError if there's a problem with staging. 331 332 """ 333 archive_url, _, filename = payload_uri.rpartition('/') 334 build_name = urlparse.urlsplit(archive_url).path.strip('/') 335 filenames = [filename] 336 if properties_file: 337 filenames.append(filename + '.json') 338 return self._stage_payload(build_name, filenames, 339 archive_url=archive_url) 340 341 342 def _stage_payload(self, build_name, filenames, archive_url=None): 343 """Stage the given payload onto the devserver. 344 345 Works for either a stateful or full/delta test payload. Expects the 346 gs_path or a combo of build_name + filename. 347 348 @param build_name: The build name e.g. x86-mario-release/<version>. 349 If set, assumes default gs archive bucket and 350 requires filename to be specified. 351 @param filenames: In conjunction with build_name, these are the files 352 you are downloading. 353 @param archive_url: An optional GS archive location, if not using the 354 devserver's default. 355 356 @return URL of the staged payload (and properties file) on the server. 357 358 @raise error.TestError if there's a problem with staging. 359 360 """ 361 try: 362 self._autotest_devserver.stage_artifacts(image=build_name, 363 files=filenames, 364 archive_url=archive_url) 365 return (self._autotest_devserver.get_staged_file_url(f, build_name) 366 for f in filenames) 367 except dev_server.DevServerException, e: 368 raise error.TestError('Failed to stage payload: %s' % e) 369 370 371 def _get_least_loaded_devserver(self, test_conf): 372 """Find a devserver to use. 373 374 We first try to pick a devserver with the least load. In case all 375 devservers' load are higher than threshold, fall back to 376 the old behavior by picking a devserver based on the payload URI, 377 with which ImageServer.resolve will return a random devserver based on 378 the hash of the URI. The picked devserver needs to respect the 379 location of the host if 'prefer_local_devserver' is set to True or 380 'restricted_subnets' is set. 381 382 @param test_conf: a dictionary of test settings. 383 """ 384 # TODO(dhaddock): Change back to using least loaded when 385 # crbug.com/1010226 is resolved. 386 autotest_devserver = dev_server.ImageServer.resolve( 387 test_conf['target_payload_uri'], self._host.hostname) 388 devserver_hostname = urlparse.urlparse( 389 autotest_devserver.url()).hostname 390 391 logging.info('Devserver chosen for this run: %s', devserver_hostname) 392 return autotest_devserver 393 394 395 def _get_payload_url(self, build=None, full_payload=True): 396 """ 397 Gets the GStorage URL of the full or delta payload for this build. 398 399 @param build: build string e.g samus-release/R65-10225.0.0. 400 @param full_payload: True for full payload. False for delta. 401 402 @returns the payload URL. 403 404 """ 405 if build is None: 406 if self._job_repo_url is None: 407 self._job_repo_url = self._get_job_repo_url() 408 ds_url, build = tools.get_devserver_build_from_package_url( 409 self._job_repo_url) 410 self._autotest_devserver = dev_server.ImageServer(ds_url) 411 412 gs = dev_server._get_image_storage_server() 413 if full_payload: 414 # Example: chromeos_R65-10225.0.0_samus_full_dev.bin 415 regex = 'chromeos_%s*_full_*' % build.rpartition('/')[2] 416 else: 417 # Example: chromeos_R65-10225.0.0_R65-10225.0.0_samus_delta_dev.bin 418 regex = 'chromeos_%s*_delta_*' % build.rpartition('/')[2] 419 payload_url_regex = gs + build + '/' + regex 420 logging.debug('Trying to find payloads at %s', payload_url_regex) 421 payloads = utils.gs_ls(payload_url_regex) 422 if not payloads: 423 raise error.TestFail('Could not find payload for %s', build) 424 logging.debug('Payloads found: %s', payloads) 425 return payloads[0] 426 427 428 def _get_partial_path_from_url(self, url): 429 """ 430 Strip partial path to payload from GS Url. 431 432 Example: gs://chromeos-image-archive/samus-release/R77-112.0.0/bla.bin 433 returns samus-release/R77-112.0.0/bla.bin. 434 435 @param url: The Google Storage url. 436 437 """ 438 gs = dev_server._get_image_storage_server() 439 staged_path = url.partition(gs) 440 return staged_path[2] 441 442 443 @staticmethod 444 def _get_stateful_uri(build_uri): 445 """Returns a complete GS URI of a stateful update given a build path.""" 446 return '/'.join([build_uri.rstrip('/'), 'stateful.tgz']) 447 448 449 def _get_job_repo_url(self): 450 """Gets the job_repo_url argument supplied to the test by the lab.""" 451 if self._hosts is not None: 452 self._host = self._hosts[0] 453 if self._host is None: 454 raise error.TestFail('No host specified by AU test.') 455 info = self._host.host_info_store.get() 456 return info.attributes.get(self._host.job_repo_url_attribute, '') 457 458 459 def _stage_payloads(self, payload_uri, archive_uri, payload_type='full'): 460 """Stages a payload and its associated stateful on devserver.""" 461 if payload_uri: 462 staged_uri, _ = self._stage_payload_by_uri(payload_uri) 463 logging.info('Staged %s payload from %s at %s.', payload_type, 464 payload_uri, staged_uri) 465 466 # Figure out where to get the matching stateful payload. 467 if archive_uri: 468 stateful_uri = self._get_stateful_uri(archive_uri) 469 else: 470 stateful_uri = self._payload_to_stateful_uri(payload_uri) 471 staged_stateful = self._stage_payload_by_uri( 472 stateful_uri, properties_file=False) 473 474 logging.info('Staged stateful from %s at %s.', stateful_uri, 475 staged_stateful) 476 return staged_uri, staged_stateful 477 478 return None, None 479 480 def _payload_to_stateful_uri(self, payload_uri): 481 """Given a payload GS URI, returns the corresponding stateful URI.""" 482 build_uri = payload_uri.rpartition('/')[0] 483 if build_uri.endswith('payloads'): 484 build_uri = build_uri.rpartition('/')[0] 485 return self._get_stateful_uri(build_uri) 486 487 488 def _copy_payload_to_public_bucket(self, payload_url): 489 """ 490 Copy payload and make link public. 491 492 @param payload_url: Payload URL on Google Storage. 493 494 @returns The payload URL that is now publicly accessible. 495 496 """ 497 payload_filename = payload_url.rpartition('/')[2] 498 utils.run('gsutil cp %s* %s' % (payload_url, self._CELLULAR_BUCKET)) 499 new_gs_url = self._CELLULAR_BUCKET + payload_filename 500 utils.run('gsutil acl ch -u AllUsers:R %s*' % new_gs_url) 501 return new_gs_url.replace('gs://', 'https://storage.googleapis.com/') 502 503 504 def _get_chromeos_version(self): 505 """Read the ChromeOS version from /etc/lsb-release.""" 506 lsb = self._host.run('cat /etc/lsb-release').stdout 507 return lsbrelease_utils.get_chromeos_release_version(lsb) 508 509 510 def _check_for_cellular_entries_in_update_log(self, update_engine_log=None): 511 """ 512 Check update_engine.log for log entries about cellular. 513 514 @param update_engine_log: The text of an update_engine.log file. 515 516 """ 517 logging.info('Making sure we have cellular entries in update_engine ' 518 'log.') 519 line1 = "Allowing updates over cellular as permission preference is " \ 520 "set to true." 521 line2 = "We are connected via cellular, Updates allowed: Yes" 522 for line in [line1, line2]: 523 self._check_update_engine_log_for_entry(line, raise_error=True, 524 update_engine_log= 525 update_engine_log) 526 527 528 def _disconnect_then_reconnect_network(self, update_url): 529 """ 530 Disconnects the network for a couple of minutes then reconnects. 531 532 @param update_url: A URL to use to check we are online. 533 534 """ 535 self._run_client_test_and_check_result( 536 'autoupdate_DisconnectReconnectNetwork', update_url=update_url) 537 538 539 def _suspend_then_resume(self): 540 """Susepends and resumes the host DUT.""" 541 try: 542 self._host.suspend(suspend_time=30) 543 except error.AutoservSuspendError: 544 logging.exception('Suspend did not last the entire time.') 545 546 547 def _run_client_test_and_check_result(self, test_name, **kwargs): 548 """ 549 Kicks of a client autotest and checks that it didn't fail. 550 551 @param test_name: client test name 552 @param **kwargs: key-value arguments to pass to the test. 553 554 """ 555 client_at = autotest.Autotest(self._host) 556 client_at.run_test(test_name, **kwargs) 557 client_at._check_client_test_result(self._host, test_name) 558 559 560 def _create_hostlog_files(self): 561 """Create the two hostlog files for the update. 562 563 To ensure the update was successful we need to compare the update 564 events against expected update events. There is a hostlog for the 565 rootfs update and for the post reboot update check. 566 """ 567 hostlog = self._omaha_devserver.get_hostlog(self._host.ip, 568 wait_for_reboot_events=True) 569 if hostlog is None: 570 err_str = 'Timed out getting the hostlog from the devserver.' 571 err_code = self._get_last_error_string() 572 if err_code is not None: 573 err_str = ('%s Last error in update_engine.log: %s' % 574 (err_str, err_code)) 575 raise error.TestError(err_str) 576 577 logging.info('Hostlog: %s', hostlog) 578 # File names to save the hostlog events to. 579 rootfs_hostlog = os.path.join(self.resultsdir, 'hostlog_rootfs') 580 reboot_hostlog = os.path.join(self.resultsdir, 'hostlog_reboot') 581 582 # Each time we reboot in the middle of an update we ping omaha again 583 # for each update event. So parse the list backwards to get the final 584 # events. 585 with open(reboot_hostlog, 'w') as outfile: 586 json.dump(hostlog[-1:], outfile) 587 with open(rootfs_hostlog, 'w') as outfile: 588 json.dump( 589 hostlog[len(hostlog) - 1 - self._ROOTFS_HOSTLOG_EVENTS:-1], 590 outfile) 591 592 return rootfs_hostlog, reboot_hostlog 593 594 595 def _set_active_p2p_host(self, host): 596 """ 597 Choose which p2p host device to run commands on. 598 599 For P2P tests with multiple DUTs we need to be able to choose which 600 host within self._hosts we want to issue commands on. 601 602 @param host: The host to run commands on. 603 604 """ 605 self._create_update_engine_variables(host.run, host.get_file) 606 607 608 def _change_cellular_setting_in_update_engine(self, 609 update_over_cellular=True): 610 """ 611 Toggles the update_over_cellular setting in update_engine. 612 613 @param update_over_cellular: True to enable, False to disable. 614 615 """ 616 answer = 'yes' if update_over_cellular else 'no' 617 cmd = 'update_engine_client --update_over_cellular=%s' % answer 618 retry_util.RetryException(error.AutoservRunError, 2, self._run, cmd) 619 620 621 def verify_update_events(self, source_release, hostlog_filename, 622 target_release=None): 623 """Compares a hostlog file against a set of expected events. 624 625 This is the main function of this class. It takes in an expected 626 source and target version along with a hostlog file location. It will 627 then generate the expected events based on the data and compare it 628 against the events listed in the hostlog json file. 629 """ 630 self._hostlog_events = [] 631 self._num_consumed_events = 0 632 self._current_timestamp = None 633 if target_release is not None: 634 self._get_expected_event_for_post_reboot_check(source_release, 635 target_release) 636 else: 637 self._get_expected_events_for_rootfs_update(source_release) 638 639 self._hostlog_filename = hostlog_filename 640 logging.info('Checking update steps with hostlog file: %s', 641 self._hostlog_filename) 642 643 for expected_event, timeout, on_timeout in self._expected_events: 644 logging.info('Expecting %s within %s seconds', expected_event, 645 timeout) 646 err_msg = self._verify_event_with_timeout( 647 expected_event, timeout, on_timeout) 648 if err_msg is not None: 649 logging.error('Failed expected event: %s', err_msg) 650 raise UpdateEngineEventMissing(err_msg) 651 652 653 def get_update_url_for_test(self, job_repo_url, full_payload=True, 654 critical_update=False, public=False, 655 moblab=False): 656 """ 657 Get the correct update URL for autoupdate tests to use. 658 659 There are bunch of different update configurations that are required 660 by AU tests. Some tests need a full payload, some need a delta payload. 661 Some require the omaha response to be critical or be able to handle 662 multiple DUTs etc. This function returns the correct update URL to the 663 test based on the inputs parameters. 664 665 Ideally all updates would use an existing lab devserver to handle the 666 updates. However the lab devservers default setup does not work for 667 all test needs. So we also kick off our own omaha_devserver for the 668 test run some times. 669 670 This tests expects the test to set self._host or self._hosts. 671 672 @param job_repo_url: string url containing the current build. 673 @param full_payload: bool whether we want a full payload. 674 @param critical_update: bool whether we need a critical update. 675 @param public: url needs to be publicly accessible. 676 @param moblab: True if we are running on moblab. 677 678 @returns an update url string. 679 680 """ 681 if job_repo_url is None: 682 self._job_repo_url = self._get_job_repo_url() 683 else: 684 self._job_repo_url = job_repo_url 685 if not self._job_repo_url: 686 raise error.TestFail('There was no job_repo_url so we cannot get ' 687 'a payload to use.') 688 ds_url, build = tools.get_devserver_build_from_package_url( 689 self._job_repo_url) 690 691 # The lab devserver assigned to this test. 692 lab_devserver = dev_server.ImageServer(ds_url) 693 694 if public: 695 # Get the google storage url of the payload. We will be copying 696 # the payload to a public google storage bucket (similar location 697 # to updates via autest command). 698 payload_url = self._get_payload_url(build, 699 full_payload=full_payload) 700 url = self._copy_payload_to_public_bucket(payload_url) 701 logging.info('Public update URL: %s', url) 702 return url 703 704 if full_payload: 705 if not critical_update: 706 # Stage payloads on the lab devserver. 707 self._autotest_devserver = lab_devserver 708 self._autotest_devserver.stage_artifacts(build, 709 ['full_payload']) 710 # Use the same lab devserver to also handle the update. 711 url = self._autotest_devserver.get_update_url(build) 712 logging.info('Full payload, non-critical update URL: %s', url) 713 return url 714 else: 715 url_to_stage = self._get_payload_url(build, full_payload=True) 716 else: 717 # We need to stage delta ourselves due to crbug.com/793434. 718 url_to_stage = self._get_payload_url(build, full_payload=False) 719 720 # Get partial path to payload eg samus-release/R77-113.0,0/blah.bin 721 payload_location = self._get_partial_path_from_url(url_to_stage) 722 723 # We need to start our own devserver instance on the lab devserver 724 # for the rest of the test scenarios. 725 self._omaha_devserver = omaha_devserver.OmahaDevserver( 726 lab_devserver.hostname, payload_location, 727 critical_update=critical_update, moblab=moblab) 728 self._omaha_devserver.start_devserver() 729 730 # Stage the payloads on our new devserver. 731 ds_url = 'http://%s' % self._omaha_devserver.get_netloc() 732 self._autotest_devserver = dev_server.ImageServer(ds_url) 733 self._stage_payload_by_uri(url_to_stage) 734 url = self._omaha_devserver.get_update_url() 735 logging.info('Update URL: %s', url) 736 return url 737 738 739class UpdateEngineEventMissing(error.TestFail): 740 """Raised if the hostlog is missing an expected event.""" 741