1# Lint as: python2, python3 2# Copyright 2017 The Chromium OS Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6from __future__ import absolute_import 7from __future__ import division 8from __future__ import print_function 9 10import base64 11import json 12import logging 13import os 14import re 15import shutil 16import time 17from six.moves import zip 18from six.moves import zip_longest 19import six.moves.urllib.parse 20 21from datetime import datetime, timedelta 22from xml.etree import ElementTree 23 24from autotest_lib.client.common_lib import autotemp 25from autotest_lib.client.common_lib import autotest_enum 26from autotest_lib.client.common_lib import error 27from autotest_lib.client.common_lib import global_config 28from autotest_lib.client.common_lib import lsbrelease_utils 29from autotest_lib.client.common_lib import utils 30from autotest_lib.client.common_lib.cros import dev_server 31from autotest_lib.client.cros import constants 32from autotest_lib.client.cros.update_engine import dlc_util 33from autotest_lib.client.cros.update_engine import update_engine_event as uee 34from autotest_lib.client.cros.update_engine import update_engine_util 35from autotest_lib.server import autotest 36from autotest_lib.server import test 37from autotest_lib.server.cros import gsutil_wrapper 38from autotest_lib.server.cros.dynamic_suite import tools 39from autotest_lib.utils.frozen_chromite.lib import auto_updater 40from autotest_lib.utils.frozen_chromite.lib import auto_updater_transfer 41from autotest_lib.utils.frozen_chromite.lib import constants as chromite_constants 42from autotest_lib.utils.frozen_chromite.lib import gob_util 43from autotest_lib.utils.frozen_chromite.lib import osutils 44from autotest_lib.utils.frozen_chromite.lib import remote_access 45from autotest_lib.utils.frozen_chromite.lib import retry_util 46 47 48class UpdateEngineTest(test.test, update_engine_util.UpdateEngineUtil): 49 """Base class for all autoupdate_ server tests. 50 51 Contains useful functions shared between tests like staging payloads 52 on devservers, verifying hostlogs, and launching client tests. 53 54 """ 55 version = 1 56 57 # Timeout periods, given in seconds. 58 _INITIAL_CHECK_TIMEOUT = 12 * 60 59 _DOWNLOAD_STARTED_TIMEOUT = 4 * 60 60 _DOWNLOAD_FINISHED_TIMEOUT = 60 * 60 61 _UPDATE_COMPLETED_TIMEOUT = 8 * 60 62 _POST_REBOOT_TIMEOUT = 15 * 60 63 64 # Name of the logfile generated by nebraska.py. 65 _NEBRASKA_LOG = 'nebraska.log' 66 67 # Version we tell the DUT it is on before update. 68 _CUSTOM_LSB_VERSION = '0.0.0.0' 69 70 _CELLULAR_BUCKET = 'gs://chromeos-throw-away-bucket/CrOSPayloads/Cellular/' 71 72 _TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S' 73 74 # Paygen.json file provides information about all builds on all channels. 75 _PAYGEN_JSON_URI = 'gs://chromeos-build-release-console/paygen.json' 76 77 # Subtest to use for logging into DUTs in a test. 78 _LOGIN_TEST = 'login_LoginSuccess' 79 _LOGIN_TEST_PIN = 'login_LoginPin' 80 81 _CORRUPT_STATEFUL_PATH = '/mnt/stateful_partition/.corrupt_stateful' 82 83 _STATEFUL_ARCHIVE_NAME = 'stateful.tgz' 84 _KERNEL_ARCHIVE_NAME = 'full_dev_part_KERN.bin.gz' 85 _ROOTFS_ARCHIVE_NAME = 'full_dev_part_ROOT.bin.gz' 86 87 _PAYLOAD_TYPE = autotest_enum.AutotestEnum('CROS', 'DLC', 'MINIOS') 88 89 def initialize(self, host=None): 90 """ 91 Sets default variables for the test. 92 93 @param host: The DUT we will be running on. 94 95 """ 96 self._current_timestamp = None 97 self._host = host 98 99 # Define functions used in update_engine_util. 100 self._run = self._host.run if self._host else None 101 self._get_file = self._host.get_file if self._host else None 102 103 # Utilities for DLC management 104 self._dlc_util = dlc_util.DLCUtil(self._run) 105 106 # URL pointing to the autotest package on a lab cache server. It is 107 # used by lab runs to select the right board+build when finding the 108 # update payloads. The cache server will also be used for downloading 109 # the update. 110 self._job_repo_url = None 111 112 # The target build for the update. Uses the release builder path 113 # format, ex: octopus-release/R102-14650.0.0 114 self._build = None 115 116 # Flag to indicate that the test has progressed far enough that 117 # stateful should be restored on failure. 118 self._should_restore_stateful = False 119 120 self._autotest_devserver = None 121 122 def cleanup(self): 123 """Clean up update_engine autotests.""" 124 if self._host: 125 self._host.get_file(self._UPDATE_ENGINE_LOG, self.resultsdir) 126 127 128 def _get_release_builder_path(self): 129 """ 130 Returns the release builder path currently provisioned on the device 131 which can be used to get the current board and build number. 132 Ex: octopus-release/R102-14650.0.0 133 """ 134 lsb_release_content = self._run(['cat', constants.LSB_RELEASE]).stdout 135 builder_path = lsbrelease_utils.get_chromeos_release_builder_path( 136 lsb_release_content) 137 logging.info("Current release builder path on the DUT is %s", 138 builder_path) 139 return builder_path 140 141 142 def _get_expected_events_for_rootfs_update(self, source_release): 143 """ 144 Creates a list of expected events fired during a rootfs update. 145 146 There are 4 events fired during a rootfs update. We will create these 147 in the correct order. 148 149 @param source_release: The source build version. 150 151 """ 152 return [ 153 uee.UpdateEngineEvent( 154 version=source_release, 155 timeout=self._INITIAL_CHECK_TIMEOUT), 156 uee.UpdateEngineEvent( 157 event_type=uee.EVENT_TYPE_DOWNLOAD_STARTED, 158 event_result=uee.EVENT_RESULT_SUCCESS, 159 version=source_release, 160 timeout=self._DOWNLOAD_STARTED_TIMEOUT), 161 uee.UpdateEngineEvent( 162 event_type=uee.EVENT_TYPE_DOWNLOAD_FINISHED, 163 event_result=uee.EVENT_RESULT_SUCCESS, 164 version=source_release, 165 timeout=self._DOWNLOAD_FINISHED_TIMEOUT), 166 uee.UpdateEngineEvent( 167 event_type=uee.EVENT_TYPE_UPDATE_COMPLETE, 168 event_result=uee.EVENT_RESULT_SUCCESS, 169 version=source_release, 170 timeout=self._UPDATE_COMPLETED_TIMEOUT) 171 ] 172 173 174 def _get_expected_event_for_post_reboot_check(self, source_release, 175 target_release): 176 """ 177 Creates the expected event fired during post-reboot update check. 178 179 @param source_release: The source build version. 180 @param target_release: The target build version. 181 182 """ 183 return [ 184 uee.UpdateEngineEvent( 185 event_type=uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE, 186 event_result=uee.EVENT_RESULT_SUCCESS, 187 version=target_release, 188 previous_version=source_release, 189 timeout = self._POST_REBOOT_TIMEOUT) 190 ] 191 192 193 def _verify_event_with_timeout(self, expected_event, actual_event): 194 """ 195 Verify an expected event occurred before its timeout. 196 197 @param expected_event: an expected event. 198 @param actual_event: an actual event from the hostlog. 199 200 @return None if event complies, an error string otherwise. 201 202 """ 203 logging.info('Expecting %s within %s seconds', expected_event, 204 expected_event._timeout) 205 if not actual_event: 206 return ('No entry found for %s event.' % uee.get_event_type 207 (expected_event._expected_attrs['event_type'])) 208 logging.info('Consumed new event: %s', actual_event) 209 # If this is the first event, set it as the current time 210 if self._current_timestamp is None: 211 self._current_timestamp = datetime.strptime( 212 actual_event['timestamp'], self._TIMESTAMP_FORMAT) 213 214 # Get the time stamp for the current event and convert to datetime 215 timestamp = actual_event['timestamp'] 216 event_timestamp = datetime.strptime(timestamp, 217 self._TIMESTAMP_FORMAT) 218 219 # If the event happened before the timeout 220 difference = event_timestamp - self._current_timestamp 221 222 # We check if the difference > 7 hs. If so we assume that this is due to 223 # timezone jump from local to utc caused by log format change 224 # crrev.com/c/2652108 and adjust accordingly. Another assumption here is 225 # that the DUT are in PST timezone. The jump will be 7 hs with DST and 8 226 # hs without. This hack should be removed once reasonable time has 227 # passed and we do not need to consider the old log format anymore. 228 # TODO(crbug.com/c/1178930): Remove the hack below. 229 if difference > timedelta(hours=7): 230 logging.info( 231 'Detected a timezone jump with difference %s with event %s', 232 difference, 233 uee.get_event_type( 234 expected_event._expected_attrs['event_type'])) 235 if difference > timedelta(hours=8): 236 difference -= timedelta(hours=8) 237 else: 238 difference -= timedelta(hours=7) 239 240 if difference < timedelta(seconds=expected_event._timeout): 241 logging.info('Event took %s seconds to fire during the ' 242 'update', difference.seconds) 243 self._current_timestamp = event_timestamp 244 mismatched_attrs = expected_event.equals(actual_event) 245 if mismatched_attrs is None: 246 return None 247 else: 248 return self._error_incorrect_event( 249 expected_event, actual_event, mismatched_attrs) 250 else: 251 return self._timeout_error_message(expected_event, 252 difference.seconds) 253 254 255 def _error_incorrect_event(self, expected, actual, mismatched_attrs): 256 """ 257 Error message for when an event is not what we expect. 258 259 @param expected: The expected event that did not match the hostlog. 260 @param actual: The actual event with the mismatched arg(s). 261 @param mismatched_attrs: A list of mismatched attributes. 262 263 """ 264 et = uee.get_event_type(expected._expected_attrs['event_type']) 265 return ('Event %s had mismatched attributes: %s. We expected %s, but ' 266 'got %s.' % (et, mismatched_attrs, expected, actual)) 267 268 269 def _timeout_error_message(self, expected, time_taken): 270 """ 271 Error message for when an event takes too long to fire. 272 273 @param expected: The expected event that timed out. 274 @param time_taken: How long it actually took. 275 276 """ 277 et = uee.get_event_type(expected._expected_attrs['event_type']) 278 return ('Event %s should take less than %ds. It took %ds.' 279 % (et, expected._timeout, time_taken)) 280 281 282 def _stage_payload_by_uri(self, payload_uri, properties_file=True): 283 """Stage a payload based on its GS URI. 284 285 This infers the build's label, filename and GS archive from the 286 provided GS URI. 287 288 @param payload_uri: The full GS URI of the payload. 289 @param properties_file: If true, it will stage the update payload 290 properties file too. 291 292 @return URL of the staged payload (and properties file) on the server. 293 294 @raise error.TestError if there's a problem with staging. 295 296 """ 297 archive_url, _, filename = payload_uri.rpartition('/') 298 build_name = six.moves.urllib.parse.urlsplit(archive_url).path.strip( 299 '/') 300 filenames = [filename] 301 if properties_file: 302 filenames.append(filename + '.json') 303 try: 304 if self._autotest_devserver is None: 305 self._autotest_devserver = dev_server.ImageServer.resolve( 306 build_name, self._host.hostname) 307 308 self._autotest_devserver.stage_artifacts(image=build_name, 309 files=filenames, 310 archive_url=archive_url) 311 return (self._autotest_devserver.get_staged_file_url(f, build_name) 312 for f in filenames) 313 except dev_server.DevServerException as e: 314 raise error.TestError('Failed to stage payload: %s' % e) 315 316 317 def _get_devserver_for_test(self, test_conf): 318 """Find a devserver to use. 319 320 We use the payload URI as the hash for ImageServer.resolve. The chosen 321 devserver needs to respect the location of the host if 322 'prefer_local_devserver' is set to True or 'restricted_subnets' is set. 323 324 @param test_conf: a dictionary of test settings. 325 326 """ 327 autotest_devserver = dev_server.ImageServer.resolve( 328 test_conf['target_payload_uri'], self._host.hostname) 329 devserver_hostname = six.moves.urllib.parse.urlparse( 330 autotest_devserver.url()).hostname 331 logging.info('Devserver chosen for this run: %s', devserver_hostname) 332 return autotest_devserver 333 334 335 def _get_payload_url(self, 336 full_payload=True, 337 payload_type=_PAYLOAD_TYPE.CROS): 338 """ 339 Gets the GStorage URL of the full or delta payload for the target 340 update version for either platform or DLC payloads. 341 342 @param full_payload: True for full payload. False for delta. 343 @param payload_type: The type of payload to get. Can be a value of the 344 _PAYLOAD_TYPE enum. 345 346 @returns the payload URL. For example, a full payload URL looks like: 347 gs://chromeos-image-archive/octopus-release/R102-14650.0.0/chromeos_R102-14650.0.0_octopus_full_dev.bin 348 349 """ 350 image_path = global_config.global_config.get_config_value( 351 'CROS', 'image_storage_server', type=str) 352 # This forces a trailing '/'' if not there yet. 353 gs = os.path.join(image_path, '') 354 355 # Example payload names (AU): 356 # chromeos_R85-13265.0.0_eve_full_dev.bin 357 # chromeos_R85-13265.0.0_R85-13265.0.0_eve_delta_dev.bin 358 # Example payload names (DLC): 359 # dlc_sample-dlc_package_R85-13265.0.0_eve_full_dev.bin 360 # dlc_sample-dlc_package_R85-13265.0.0_R85-13265.0.0_eve_delta_dev.bin 361 # Example payload names (MiniOS): 362 # minios_R102-14667.0.0_guybrush_full_dev.bin 363 # minios_R102-14667.0.0_R102-14667.0.0_guybrush_delta_dev.bin 364 if payload_type is self._PAYLOAD_TYPE.DLC: 365 payload_prefix = 'dlc_*_%s_*.bin' 366 elif payload_type is self._PAYLOAD_TYPE.MINIOS: 367 payload_prefix = 'minios_*_%s_*.bin' 368 else: 369 payload_prefix = 'chromeos_*_%s_*.bin' 370 371 regex = payload_prefix % ('full' if full_payload else 'delta') 372 373 payload_url_regex = gs + self._build + '/' + regex 374 logging.debug('Trying to find payloads at %s', payload_url_regex) 375 payloads = utils.gs_ls(payload_url_regex) 376 if not payloads: 377 raise error.TestFail('Could not find payload for %s', self._build) 378 logging.debug('Payloads found: %s', payloads) 379 return payloads[0] 380 381 382 @staticmethod 383 def _get_stateful_uri(build_uri): 384 """Returns a complete GS URI of a stateful update given a build path.""" 385 return '/'.join([build_uri.rstrip('/'), 'stateful.tgz']) 386 387 388 def _get_job_repo_url(self, job_repo_url=None): 389 """Gets the job_repo_url argument supplied to the test by the lab.""" 390 if job_repo_url is not None: 391 return job_repo_url 392 if self._host is None: 393 raise error.TestFail('No host specified by AU test.') 394 info = self._host.host_info_store.get() 395 return info.attributes.get(self._host.job_repo_url_attribute, '') 396 397 398 def _stage_payloads(self, payload_uri, archive_uri): 399 """ 400 Stages payloads on the devserver. 401 402 @param payload_uri: URI for a GS payload to stage. 403 @param archive_uri: URI for GS folder containing payloads. This is used 404 to find the related stateful payload. 405 406 @returns URI of staged payload, URI of staged stateful. 407 408 """ 409 if not payload_uri: 410 return None, None 411 staged_uri, _ = self._stage_payload_by_uri(payload_uri) 412 logging.info('Staged %s at %s.', payload_uri, staged_uri) 413 414 # Figure out where to get the matching stateful payload. 415 if archive_uri: 416 stateful_uri = self._get_stateful_uri(archive_uri) 417 else: 418 stateful_uri = self._payload_to_stateful_uri(payload_uri) 419 staged_stateful = self._stage_payload_by_uri(stateful_uri, 420 properties_file=False) 421 logging.info('Staged stateful from %s at %s.', stateful_uri, 422 staged_stateful) 423 return staged_uri, staged_stateful 424 425 426 427 def _payload_to_stateful_uri(self, payload_uri): 428 """Given a payload GS URI, returns the corresponding stateful URI.""" 429 build_uri = payload_uri.rpartition('/payloads/')[0] 430 return self._get_stateful_uri(build_uri) 431 432 433 def _copy_payload_to_public_bucket(self, 434 payload_url, 435 use_globbing=True, 436 destination_filename=None): 437 """ 438 Copy payload and make link public (if not already there). 439 440 @param payload_url: Payload URL on Google Storage. 441 @param use_globbing: Use globbing with payload url as prefix. 442 @param destination_filename: Filename of payload on public bucket if it 443 should be different from the source filename. Note that gsutil will 444 treat this as destination directory if `use_globbing` is true and 445 resolves to multiple files. 446 447 @returns The payload URL that is now publicly accessible. 448 449 """ 450 payload_filename = payload_url.rpartition('/')[2] 451 if destination_filename: 452 payload_filename = destination_filename 453 new_gs_url = self._CELLULAR_BUCKET + payload_filename 454 public_url = new_gs_url.replace('gs://', 455 'https://storage.googleapis.com/') 456 457 src_url = '%s*' % payload_url if use_globbing else payload_url 458 dst_url = new_gs_url if destination_filename else self._CELLULAR_BUCKET 459 460 # Check if public bucket already has the payload. 461 try: 462 payloads = utils.gs_ls(new_gs_url) 463 if payloads: 464 logging.info( 465 'Payload already exists in public bucket. Returning ' 466 'url to existing payload') 467 return public_url 468 except error.CmdError: 469 logging.warning('No existing payload exists. Copying payload...') 470 471 utils.run(['gsutil', 'cp', '-n', src_url, dst_url]) 472 utils.run(['gsutil', 'acl', 'ch', '-u', 'AllUsers:R', 473 '%s*' % new_gs_url]) 474 return public_url 475 476 477 def _suspend_then_resume(self): 478 """Suspends and resumes the host DUT.""" 479 try: 480 self._host.suspend(suspend_time=30) 481 except error.AutoservSuspendError: 482 logging.exception('Suspend did not last the entire time.') 483 484 485 def _run_client_test_and_check_result(self, test_name, **kwargs): 486 """ 487 Kicks of a client autotest and checks that it didn't fail. 488 489 @param test_name: client test name 490 @param **kwargs: key-value arguments to pass to the test. 491 492 """ 493 client_at = autotest.Autotest(self._host) 494 client_at.run_test(test_name, **kwargs) 495 client_at._check_client_test_result(self._host, test_name) 496 497 498 def _extract_request_logs(self, update_engine_log, is_dlc=False): 499 """ 500 Extracts request logs from an update_engine log. 501 502 @param update_engine_log: The update_engine log as a string. 503 @param is_dlc: True to return the request logs for the DLC updates 504 instead of the platform update. 505 @returns a list object representing the platform (OS) request logs, or 506 a dictionary of lists representing DLC request logs, 507 keyed by DLC ID, if is_dlc is True. 508 509 """ 510 # Looking for all request XML strings in the log. 511 pattern = re.compile(r'<request.*?</request>', re.DOTALL) 512 requests = pattern.findall(update_engine_log) 513 514 # We are looking for patterns like this: 515 # "2021-01-28T10:14:33.998217Z INFO update_engine: \ 516 # [omaha_request_action.cc(794)] Request: <?xml" 517 timestamp_pattern = re.compile( 518 r'(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}).* Request:.*xml') 519 timestamps = [ 520 datetime.strptime(ts, '%Y-%m-%dT%H:%M:%S') 521 for ts in timestamp_pattern.findall(update_engine_log) 522 ] 523 if len(timestamps) == 0: 524 # We might be reading log in old format so try parsing with another regexp. 525 # [0324/151230.562305:INFO:omaha_request_action.cc(501)] Request: 526 timestamp_pattern_old = re.compile( 527 r'\[([0-9]+)/([0-9]+).*?\] Request:') 528 529 # Since the old format uses local time, we want to convert it to utc time. 530 is_dst = time.daylight and time.localtime().tm_isdst > 0 531 utc_offset = timedelta( 532 seconds=(time.altzone if is_dst else time.timezone)) 533 if utc_offset > timedelta(seconds=0): 534 logging.info('Parsing old log format. Adding utc offset of %s', 535 utc_offset) 536 else: 537 logging.warning( 538 'Local time to UTC conversion might fail. utc_offset=%s. time.altzone=%s, time.timezone=%s', 539 utc_offset, time.altzone, time.timezone) 540 timestamps = [ 541 # Just use the current year since the logs don't have the year 542 # value. Let's all hope tests don't start to fail on new year's 543 # eve LOL. 544 datetime( 545 datetime.now().year, 546 int(ts[0][0:2]), # Month 547 int(ts[0][2:4]), # Day 548 int(ts[1][0:2]), # Hours 549 int(ts[1][2:4]), # Minutes 550 int(ts[1][4:6]) # Seconds 551 ) + utc_offset 552 for ts in timestamp_pattern_old.findall(update_engine_log) 553 ] 554 555 if len(requests) != len(timestamps): 556 raise error.TestFail('Failed to properly parse the update_engine ' 557 'log file.') 558 559 result = [] 560 dlc_results = {} 561 for timestamp, request in zip(timestamps, requests): 562 563 root = ElementTree.fromstring(request) 564 565 # There may be events for multiple apps if DLCs are installed. 566 # See below (trimmed) example request including DLC: 567 # 568 # <request requestid=...> 569 # <os version="Indy" platform=...></os> 570 # <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}" 571 # version="13265.0.0" track=...> 572 # <event eventtype="13" eventresult="1"></event> 573 # </app> 574 # <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}_sample-dlc" 575 # version="0.0.0.0" track=...> 576 # <event eventtype="13" eventresult="1"></event> 577 # </app> 578 # </request> 579 # 580 # The first <app> section is for the platform update. The second 581 # is for the DLC update. 582 # 583 # Example without DLC: 584 # <request requestid=...> 585 # <os version="Indy" platform=...></os> 586 # <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}" 587 # version="13265.0.0" track=...> 588 # <event eventtype="13" eventresult="1"></event> 589 # </app> 590 # </request> 591 592 apps = root.findall('app') 593 for app in apps: 594 event = app.find('event') 595 596 event_info = { 597 'version': app.attrib.get('version'), 598 'event_type': (int(event.attrib.get('eventtype')) 599 if event is not None else None), 600 'event_result': (int(event.attrib.get('eventresult')) 601 if event is not None else None), 602 'timestamp': timestamp.strftime(self._TIMESTAMP_FORMAT), 603 } 604 605 previous_version = (event.attrib.get('previousversion') 606 if event is not None else None) 607 if previous_version: 608 event_info['previous_version'] = previous_version 609 610 # Check if the event is for the platform update or for a DLC 611 # by checking the appid. For platform, the appid looks like: 612 # {DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38} 613 # For DLCs, it is the platform app ID + _ + the DLC ID: 614 # {DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}_sample-dlc 615 id_segments = app.attrib.get('appid').split('_') 616 if len(id_segments) > 1: 617 dlc_id = id_segments[1] 618 if dlc_id in dlc_results: 619 dlc_results[dlc_id].append(event_info) 620 else: 621 dlc_results[dlc_id] = [event_info] 622 else: 623 result.append(event_info) 624 625 if is_dlc: 626 logging.info('Extracted DLC request logs: %s', dlc_results) 627 return dlc_results 628 else: 629 logging.info('Extracted platform (OS) request log: %s', result) 630 return result 631 632 633 def _create_hostlog_files(self, ignore_event_rootfs=False): 634 """Create the two hostlog files for the update. 635 636 To ensure the update was successful we need to compare the update 637 events against expected update events. There is a hostlog for the 638 rootfs update and for the post reboot update check. 639 640 @param ignore_event_rootfs: Ignores the last event in the rootfs 641 log. 642 643 """ 644 # Check that update logs exist for the update that just happened. 645 if len(self._get_update_engine_logs()) < 2: 646 err_msg = 'update_engine logs are missing. Cannot verify update.' 647 raise error.TestFail(err_msg) 648 649 # Each time we reboot in the middle of an update we ping omaha again 650 # for each update event. So parse the list backwards to get the final 651 # events. 652 rootfs_hostlog = os.path.join(self.resultsdir, 'hostlog_rootfs') 653 with open(rootfs_hostlog, 'w') as fp: 654 # There are four expected hostlog events during update. 655 extract_logs = self._extract_request_logs( 656 self._get_update_engine_log(1)) 657 if ignore_event_rootfs: 658 logs = extract_logs[-5:-1] 659 else: 660 logs = extract_logs[-4:] 661 json.dump(logs, fp) 662 663 reboot_hostlog = os.path.join(self.resultsdir, 'hostlog_reboot') 664 with open(reboot_hostlog, 'w') as fp: 665 # There is one expected hostlog events after reboot. 666 json.dump(self._extract_request_logs( 667 self._get_update_engine_log(0))[:1], fp) 668 669 return rootfs_hostlog, reboot_hostlog 670 671 672 def _create_dlc_hostlog_files(self): 673 """Create the rootfs and reboot hostlog files for DLC updates. 674 675 Each DLC has its own set of update requests in the logs together with 676 the platform update requests. To ensure the DLC update was successful 677 we will compare the update events against the expected events, which 678 are the same expected events as for the platform update. There is a 679 hostlog for the rootfs update and the post-reboot update check for 680 each DLC. 681 682 @returns two dictionaries, one for the rootfs DLC update and one for 683 the post-reboot check. The keys are DLC IDs and the values 684 are the hostlog filenames. 685 686 """ 687 dlc_rootfs_hostlogs = {} 688 dlc_reboot_hostlogs = {} 689 690 dlc_rootfs_request_logs = self._extract_request_logs( 691 self._get_update_engine_log(1), is_dlc=True) 692 693 for dlc_id in dlc_rootfs_request_logs: 694 dlc_rootfs_hostlog = os.path.join(self.resultsdir, 695 'hostlog_' + dlc_id) 696 dlc_rootfs_hostlogs[dlc_id] = dlc_rootfs_hostlog 697 with open(dlc_rootfs_hostlog, 'w') as fp: 698 # Same number of events for DLC updates as for platform 699 json.dump(dlc_rootfs_request_logs[dlc_id][-4:], fp) 700 701 dlc_reboot_request_logs = self._extract_request_logs( 702 self._get_update_engine_log(0), is_dlc=True) 703 704 for dlc_id in dlc_reboot_request_logs: 705 dlc_reboot_hostlog = os.path.join(self.resultsdir, 706 'hostlog_' + dlc_id + '_reboot') 707 dlc_reboot_hostlogs[dlc_id] = dlc_reboot_hostlog 708 with open(dlc_reboot_hostlog, 'w') as fp: 709 # Same number of events for DLC updates as for platform 710 json.dump(dlc_reboot_request_logs[dlc_id][:1], fp) 711 712 return dlc_rootfs_hostlogs, dlc_reboot_hostlogs 713 714 715 def _set_active_p2p_host(self, host): 716 """ 717 Choose which p2p host device to run commands on. 718 719 For P2P tests with multiple DUTs we need to be able to choose which 720 host within self._hosts we want to issue commands on. 721 722 @param host: The host to run commands on. 723 724 """ 725 self._set_util_functions(host.run, host.get_file) 726 727 728 def _set_update_over_cellular_setting(self, update_over_cellular=True): 729 """ 730 Toggles the update_over_cellular setting in update_engine. 731 732 @param update_over_cellular: True to enable, False to disable. 733 734 """ 735 answer = 'yes' if update_over_cellular else 'no' 736 cmd = [self._UPDATE_ENGINE_CLIENT_CMD, 737 '--update_over_cellular=%s' % answer] 738 retry_util.RetryException(error.AutoservRunError, 2, self._run, cmd) 739 740 741 def _copy_generated_nebraska_logs(self, logs_dir, identifier): 742 """Copies nebraska logs from logs_dir into job results directory. 743 744 The nebraska process on the device generates logs and stores those logs 745 in a /tmp directory. The update engine generates update_engine.log 746 during the auto-update which is also stored in the same /tmp directory. 747 This method copies these logfiles from the /tmp directory into the job 748 749 @param logs_dir: Directory containing paths to the log files generated 750 by the nebraska process. 751 @param identifier: A string that is appended to the logfile when it is 752 saved so that multiple files with the same name can 753 be differentiated. 754 """ 755 partial_filename = '%s_%s_%s' % ('%s', self._host.hostname, identifier) 756 src_files = [ 757 self._NEBRASKA_LOG, 758 os.path.basename(self._UPDATE_ENGINE_LOG), 759 ] 760 761 for src_fname in src_files: 762 source = os.path.join(logs_dir, src_fname) 763 dest = os.path.join(self.resultsdir, partial_filename % src_fname) 764 logging.debug('Copying logs from %s to %s', source, dest) 765 try: 766 shutil.copyfile(source, dest) 767 except Exception as e: 768 logging.error('Could not copy logs from %s into %s due to ' 769 'exception: %s', source, dest, e) 770 771 @staticmethod 772 def _get_update_parameters_from_uri(payload_uri): 773 """Extract vars needed to update with a Google Storage payload URI. 774 775 The two values we need are: 776 (1) A build_name string e.g dev-channel/samus/9583.0.0 777 (2) A filename of the exact payload file to use for the update. This 778 payload needs to have already been staged on the devserver. 779 780 @param payload_uri: Google Storage URI to extract values from 781 782 """ 783 784 # gs://chromeos-releases/dev-channel/samus/9334.0.0/payloads/blah.bin 785 # build_name = dev-channel/samus/9334.0.0 786 # payload_file = payloads/blah.bin 787 build_name = payload_uri[:payload_uri.index('payloads/')] 788 build_name = six.moves.urllib.parse.urlsplit(build_name).path.strip( 789 '/') 790 payload_file = payload_uri[payload_uri.index('payloads/'):] 791 792 logging.debug('Extracted build_name: %s, payload_file: %s from %s.', 793 build_name, payload_file, payload_uri) 794 return build_name, payload_file 795 796 797 def _update_stateful(self): 798 # Tries to update stateful without clobbering it. 799 return self._restore_stateful(clobber_stateful=False) 800 801 def _restore_stateful(self, clobber_stateful=True, public_bucket=False): 802 """ 803 Restore the stateful partition after a destructive test. 804 805 @param clobber_stateful: True to wipe clobber user state. 806 @param public_bucket: True to restore from a public bucket. 807 808 """ 809 # Test failed before any update preparations began. No need to fix stateful. 810 if not self._should_restore_stateful: 811 return 812 813 # Fallback to lab provisioning if this function fails to restore. 814 self._run(['touch', self._CORRUPT_STATEFUL_PATH]) 815 816 # Stage stateful payload. 817 statefuldev_url = self._stage_stateful(public_bucket) 818 819 logging.info('Restoring stateful partition...') 820 # Setup local dir. 821 self._run(['mkdir', '-p', '-m', '1777', '/usr/local/tmp']) 822 823 # Download and extract the stateful payload. 824 try: 825 self._download_and_extract_stateful(statefuldev_url, 826 self._STATEFUL_MOUNT_DIR) 827 except error.AutoservRunError as e: 828 err_str = 'Failed to restore the stateful partition' 829 raise error.TestFail('%s: %s' % (err_str, str(e))) 830 831 # Touch a file so changes are picked up after reboot. 832 update_file = '/mnt/stateful_partition/.update_available' 833 if clobber_stateful: 834 self._run(['echo', '-n', 'clobber', '>', update_file]) 835 else: 836 self._run(['touch', update_file]) 837 self._host.reboot() 838 839 # Make sure python is available again. 840 try: 841 self._run(['python', '--version']) 842 except error.AutoservRunError as e: 843 err_str = 'Python not available after restoring stateful.' 844 raise error.TestFail(err_str) 845 846 logging.info('Stateful restored successfully.') 847 848 849 def _download_and_extract_stateful(self, 850 stateful_url, 851 destination, 852 keep_symlinks=False, 853 members=None): 854 """ 855 Download and extract the stateful partition. 856 857 @param stateful_url: The url of the stateful archive. 858 @param destination: The directory that the stateful archive will be 859 extracted into. 860 @param keep_symlinks: Don't overwrite symlinks in destination directory. 861 @param members: When provided, they specify the names of the stateful 862 archive members to be extracted else everything will be extracted. 863 864 """ 865 cmd = [ 866 'curl', '--silent', '--show-error', '--max-time', '600', 867 stateful_url, '|', '/bin/tar', '--ignore-command-error', 868 '--overwrite', '--directory', destination, '-xz' 869 ] 870 if keep_symlinks: 871 cmd += ['--keep-directory-symlink'] 872 if members: 873 # Normalize members to be a list. 874 if not isinstance(members, list): 875 members = [members] 876 cmd += members 877 self._run(cmd) 878 879 def _stage_stateful(self, public_bucket=False): 880 """ 881 Stage the stateful archive for download. 882 883 @param public_bucket: True to return archive on a public bucket. 884 885 """ 886 if public_bucket: 887 statefuldev_url = self._get_stateful_url_on_public_bucket() 888 else: 889 # Stage stateful payload. 890 ds_url, build = tools.get_devserver_build_from_package_url( 891 self._job_repo_url) 892 self._autotest_devserver = dev_server.ImageServer(ds_url) 893 self._autotest_devserver.stage_artifacts(build, ['stateful']) 894 895 update_url = self._autotest_devserver.get_update_url(build) 896 statefuldev_url = update_url.replace('update', 'static') 897 statefuldev_url += '/stateful.tgz' 898 return statefuldev_url 899 900 def verify_update_events(self, source_release, hostlog_filename, 901 target_release=None): 902 """Compares a hostlog file against a set of expected events. 903 904 In this class we build a list of expected events (list of 905 UpdateEngineEvent objects), and compare that against a "hostlog" 906 returned from update_engine from the update. This hostlog is a json 907 list of events fired during the update. 908 909 @param source_release: The source build version. 910 @param hostlog_filename: The path to a hotlog returned from nebraska. 911 @param target_release: The target build version. 912 913 """ 914 if target_release is not None: 915 expected_events = self._get_expected_event_for_post_reboot_check( 916 source_release, target_release) 917 else: 918 expected_events = self._get_expected_events_for_rootfs_update( 919 source_release) 920 logging.info('Checking update against hostlog file: %s', 921 hostlog_filename) 922 try: 923 with open(hostlog_filename, 'r') as fp: 924 hostlog_events = json.load(fp) 925 except Exception as e: 926 raise error.TestFail('Error reading the hostlog file: %s' % e) 927 928 for expected, actual in zip_longest(expected_events, hostlog_events): 929 err_msg = self._verify_event_with_timeout(expected, actual) 930 if err_msg is not None: 931 raise error.TestFail(('Hostlog verification failed: %s ' % 932 err_msg)) 933 934 935 def get_payload_url_on_public_bucket(self, 936 job_repo_url=None, 937 full_payload=True, 938 payload_type=_PAYLOAD_TYPE.CROS): 939 """ 940 Get the google storage url of the payload in a public bucket. 941 942 We will be copying the payload to a public google storage bucket 943 (similar location to updates via autest command). 944 945 @param job_repo_url: string url containing the current build. 946 @param full_payload: True for full, False for delta. 947 @param payload_type: The type of payload to get. Can be a value of the 948 _PAYLOAD_TYPE enum. 949 950 """ 951 if job_repo_url is not None: 952 self._job_repo_url = job_repo_url 953 _, build = tools.get_devserver_build_from_package_url( 954 self._job_repo_url) 955 self._build = build 956 self._should_restore_stateful = True 957 958 payload_url = self._get_payload_url(full_payload=full_payload, 959 payload_type=payload_type) 960 url = self._copy_payload_to_public_bucket(payload_url) 961 logging.info('Public update URL: %s', url) 962 return url 963 964 965 def _get_stateful_url_on_public_bucket(self): 966 """ 967 Get the google storage URL of the payload stateful in a public bucket. 968 969 We will be copying the payload to a public google storage bucket 970 (similar location to updates via autest command). 971 972 """ 973 payload_url = self._get_payload_url() 974 stateful_url = '/'.join( 975 [payload_url.rpartition('/')[0], self._STATEFUL_ARCHIVE_NAME]) 976 # We have a flat directory structure in the public directory. Therefore 977 # we need to disambiguate the stateful archive by prepending full 978 # payload name. 979 stateful_filename = '_'.join([ 980 os.path.splitext(payload_url.rpartition('/')[2])[0], 981 self._STATEFUL_ARCHIVE_NAME 982 ]) 983 url = self._copy_payload_to_public_bucket( 984 stateful_url, 985 use_globbing=False, 986 destination_filename=stateful_filename) 987 logging.info('Public stateful URL: %s', url) 988 return url 989 990 def _get_provision_url_on_public_bucket(self, release_path): 991 """ 992 Copy the necessary artifacts for quick-provision to the public bucket 993 and return the URL pointing to them. 994 995 This is to enable local runs of tests that need to provision a source 996 version on the DUT (such as m2n tests) without requiring lab cache 997 server SSH access. 998 999 @param release_path: path to the build artifacts in 1000 gs://chromeos-releases. Ex: dev-channel/asurada/14515.0.0. The 1001 output of _get_latest_serving_stable_build matches this format. 1002 1003 """ 1004 # We have a flat directory structure in the public directory. Therefore 1005 # we need to disambiguate the path to the provision artifacts. 1006 new_gs_dir = os.path.join(self._CELLULAR_BUCKET, 'provision', 1007 release_path) 1008 src_gs_dir = os.path.join('gs://chromeos-releases', release_path) 1009 provision_artifacts = [ 1010 self._STATEFUL_ARCHIVE_NAME, self._ROOTFS_ARCHIVE_NAME, 1011 self._KERNEL_ARCHIVE_NAME 1012 ] 1013 1014 for file in provision_artifacts: 1015 src_url = os.path.join(src_gs_dir, file) 1016 dst_url = os.path.join(new_gs_dir, file) 1017 utils.run([ 1018 'gsutil', 'cp', '-n', '-a', 'public-read', src_url, dst_url 1019 ]) 1020 1021 public_url = new_gs_dir.replace('gs://', 1022 'https://storage.googleapis.com/') 1023 1024 return public_url 1025 1026 def _copy_quick_provision_to_dut(self): 1027 """ Copies the quick-provision script to the DUT from googlesource.""" 1028 tmp = autotemp.tempdir(unique_id='m2n') 1029 src = os.path.join(tmp.name, 'quick-provision') 1030 dst = '/usr/local/bin/quick-provision' 1031 logging.info('Downloading quick-provision from googlesource') 1032 qp_url_path = '%s/+/%s/%s?format=text' % ( 1033 'chromiumos/platform/dev-util', 'refs/heads/main', 1034 'quick-provision/quick-provision') 1035 contents_b64 = gob_util.FetchUrl(chromite_constants.EXTERNAL_GOB_HOST, 1036 qp_url_path) 1037 osutils.WriteFile(src, base64.b64decode(contents_b64).decode('utf-8')) 1038 self._host.send_file(src, dst) 1039 self._run(['chown', '$USER', dst]) 1040 self._run(['chmod', '755', dst]) 1041 1042 def get_payload_for_nebraska(self, 1043 job_repo_url=None, 1044 build=None, 1045 full_payload=True, 1046 public_bucket=False, 1047 payload_type=_PAYLOAD_TYPE.CROS): 1048 """ 1049 Gets a platform or DLC payload URL to be used with a nebraska instance 1050 on the DUT. 1051 1052 @param job_repo_url: string url containing the current build and cache 1053 server to use. 1054 @param build: string containing the build to use for the update, 1055 like R102-14644.0.0 (the milestone number is required). 1056 Only used for the public bucket update flow. 1057 @param full_payload: bool whether we want a full payload. 1058 @param public_bucket: True to return a payload on a public bucket. 1059 @param payload_type: The type of payload to get. Can be a value of the 1060 _PAYLOAD_TYPE enum. 1061 1062 @returns string URL of a payload staged on a lab devserver. 1063 1064 """ 1065 self._job_repo_url = self._get_job_repo_url(job_repo_url) 1066 1067 if self._job_repo_url: 1068 logging.info('Getting payload for the build in job_repo_url') 1069 ds_url, build = tools.get_devserver_build_from_package_url( 1070 self._job_repo_url) 1071 self._autotest_devserver = dev_server.ImageServer(ds_url) 1072 self._build = build 1073 elif build is not None: 1074 logging.info('Getting payload for the build provided in test_that') 1075 # self._build looks like this: octopus-release/R102-14650.0.0 1076 # Replace the version with the one provided in test_that. 1077 self._build = self._get_release_builder_path().rsplit( 1078 '/')[0] + '/' + build 1079 else: 1080 logging.info('Getting payload for the current build on the DUT') 1081 self._build = self._get_release_builder_path() 1082 1083 logging.info("Getting payload for nebraska for build %s", self._build) 1084 1085 if public_bucket: 1086 return self.get_payload_url_on_public_bucket( 1087 full_payload=full_payload, payload_type=payload_type) 1088 1089 payload = self._get_payload_url(full_payload=full_payload, 1090 payload_type=payload_type) 1091 payload_url, _ = self._stage_payload_by_uri(payload) 1092 logging.info('Payload URL for Nebraska: %s', payload_url) 1093 self._should_restore_stateful = True 1094 return payload_url 1095 1096 1097 def update_device(self, 1098 payload_uri, 1099 clobber_stateful=False, 1100 tag='source', 1101 ignore_appid=False, 1102 m2n=False): 1103 """ 1104 Updates the device. 1105 1106 Used by autoupdate_EndToEndTest and autoupdate_StatefulCompatibility, 1107 which use auto_updater to perform updates. 1108 1109 @param payload_uri: The payload with which the device should be updated. 1110 @param clobber_stateful: Boolean that determines whether the stateful 1111 of the device should be force updated and the 1112 TPM ownership should be cleared. By default, 1113 set to False. 1114 @param tag: An identifier string added to each log filename. 1115 @param ignore_appid: True to tell Nebraska to ignore the App ID field 1116 when parsing the update request. This allows 1117 the target update to use a different board's 1118 image, which is needed for kernelnext updates. 1119 @param m2n: True for an m2n update. m2n update tests don't use signed 1120 payloads from gs://chromeos-releases/, so the payload paths 1121 need to be parsed differently. 1122 1123 @raise error.TestFail if anything goes wrong with the update. 1124 1125 """ 1126 cros_preserved_path = ('/mnt/stateful_partition/unencrypted/' 1127 'preserve/cros-update') 1128 if m2n: 1129 # The payload_uri for an m2n update looks like: 1130 # http://100.115.220.112:8082/static/octopus-release/R102-14692.0.0/chromeos_R102-14692.0.0_octopus_full_dev.bin 1131 payload_path = payload_uri[payload_uri.index('static/'):] 1132 build_name = '/'.join(payload_path.split('/')[1:-1]) 1133 payload_filename = payload_path.split('/')[-1] 1134 else: 1135 # Otherwise the payload_uri looks like: 1136 # gs://chromeos-releases/dev-channel/octopus/14698.0.0/payloads/chromeos_14698.0.0_octopus_dev-channel_full_test.bin-gyzdkobygyzdck3swpkou632wan55vgx 1137 build_name, payload_filename = self._get_update_parameters_from_uri( 1138 payload_uri) 1139 1140 logging.info('Installing %s on the DUT', payload_uri) 1141 with remote_access.ChromiumOSDeviceHandler( 1142 self._host.hostname, base_dir=cros_preserved_path) as device: 1143 updater = auto_updater.ChromiumOSUpdater( 1144 device, 1145 build_name, 1146 build_name, 1147 yes=True, 1148 payload_filename=payload_filename, 1149 clobber_stateful=clobber_stateful, 1150 clear_tpm_owner=clobber_stateful, 1151 do_stateful_update=True, 1152 staging_server=self._autotest_devserver.url(), 1153 transfer_class=auto_updater_transfer. 1154 LabEndToEndPayloadTransfer, 1155 ignore_appid=ignore_appid) 1156 1157 try: 1158 updater.RunUpdate() 1159 except Exception as e: 1160 logging.exception('ERROR: Failed to update device.') 1161 raise error.TestFail(str(e)) 1162 finally: 1163 self._copy_generated_nebraska_logs( 1164 updater.request_logs_dir, identifier=tag) 1165 1166 def _get_paygen_json(self): 1167 """Return the paygen.json file as a json dictionary.""" 1168 bucket, paygen_file = self._PAYGEN_JSON_URI.rsplit('/', 1) 1169 tmpdir = '/tmp/m2n/' 1170 self._host.run('mkdir -p %s' % tmpdir) 1171 gsutil_wrapper.copy_private_bucket(host=self._host, 1172 bucket=bucket, 1173 filename=paygen_file, 1174 destination=tmpdir) 1175 return json.loads( 1176 self._host.run('cat %s' % os.path.join(tmpdir, paygen_file), 1177 verbose=False).stdout) 1178 1179 def _paygen_json_lookup(self, board, channel, delta_type): 1180 """ 1181 Filters the paygen.json file by board, channel, and payload type. 1182 1183 @param board: The board name. 1184 @param channel: The ChromeOS channel. 1185 @param delta_type: OMAHA, FSI, MILESTONE. STEPPING_STONE. 1186 1187 @returns json results filtered by the input params. 1188 1189 """ 1190 paygen_data = self._get_paygen_json() 1191 result = [] 1192 if channel.endswith('-channel'): 1193 channel = channel[:-len('-channel')] 1194 for delta in paygen_data['delta']: 1195 if ((delta['board']['public_codename'] == board) 1196 and (delta.get('channel', None) == channel) 1197 and (delta.get('delta_type', None) == delta_type)): 1198 result.append(delta) 1199 return result 1200 1201 def _get_latest_serving_stable_build(self): 1202 """ 1203 Returns the latest serving stable build on Omaha for the current board. 1204 1205 It will lookup the paygen.json file and return the build label that can 1206 be passed to quick_provision. This is useful for M2N tests to easily find 1207 the first build to provision. 1208 1209 @returns latest stable serving omaha build. 1210 1211 """ 1212 if lsbrelease_utils.is_moblab(): 1213 raise error.TestNA("Moblab cannot run M2N tests. See b/193438616.") 1214 board = self._host.get_board().split(':')[1] 1215 # Boards like auron_paine are auron-paine in paygen.json and builders. 1216 if '_' in board: 1217 board = board.replace('_', '-') 1218 channel = 'stable-channel' 1219 delta_type = 'OMAHA' 1220 stable_paygen_data = self._paygen_json_lookup(board, channel, 1221 delta_type) 1222 if not stable_paygen_data: 1223 # Some unibuild boards can have ALL of their stable serving builds 1224 # also be an FSI. When this happens we will not find an OMAHA 1225 # payload to use because GE only publishes one for a channel+build 1226 # pair. So try to get the latest FSI on stable channel. 1227 logging.info('No OMAHA payloads found. Falling back to FSI') 1228 stable_paygen_data = self._paygen_json_lookup( 1229 board, channel, 'FSI') 1230 if not stable_paygen_data: 1231 raise error.TestFail( 1232 'No stable build found in paygen.json for %s' % board) 1233 latest_stable_paygen_data = max( 1234 stable_paygen_data, key=(lambda key: key['chrome_os_version'])) 1235 return os.path.join(channel, board, 1236 latest_stable_paygen_data["chrome_os_version"]) 1237