• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Lint as: python2, python3
2# Copyright 2017 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6from __future__ import absolute_import
7from __future__ import division
8from __future__ import print_function
9
10import json
11import logging
12import os
13import re
14import shutil
15from six.moves import zip
16from six.moves import zip_longest
17import six.moves.urllib.parse
18
19from datetime import datetime, timedelta
20from xml.etree import ElementTree
21
22from autotest_lib.client.common_lib import error
23from autotest_lib.client.common_lib import utils
24from autotest_lib.client.common_lib.cros import dev_server
25from autotest_lib.client.cros.update_engine import dlc_util
26from autotest_lib.client.cros.update_engine import update_engine_event as uee
27from autotest_lib.client.cros.update_engine import update_engine_util
28from autotest_lib.server import autotest
29from autotest_lib.server import test
30from autotest_lib.server.cros.dynamic_suite import tools
31from chromite.lib import auto_updater
32from chromite.lib import auto_updater_transfer
33from chromite.lib import remote_access
34from chromite.lib import retry_util
35
36
37class UpdateEngineTest(test.test, update_engine_util.UpdateEngineUtil):
38    """Base class for all autoupdate_ server tests.
39
40    Contains useful functions shared between tests like staging payloads
41    on devservers, verifying hostlogs, and launching client tests.
42
43    """
44    version = 1
45
46    # Timeout periods, given in seconds.
47    _INITIAL_CHECK_TIMEOUT = 12 * 60
48    _DOWNLOAD_STARTED_TIMEOUT = 4 * 60
49    _DOWNLOAD_FINISHED_TIMEOUT = 20 * 60
50    _UPDATE_COMPLETED_TIMEOUT = 4 * 60
51    _POST_REBOOT_TIMEOUT = 15 * 60
52
53    # Name of the logfile generated by nebraska.py.
54    _NEBRASKA_LOG = 'nebraska.log'
55
56    # Version we tell the DUT it is on before update.
57    _CUSTOM_LSB_VERSION = '0.0.0.0'
58
59    _CELLULAR_BUCKET = 'gs://chromeos-throw-away-bucket/CrOSPayloads/Cellular/'
60
61    _TIMESTAMP_FORMAT = '%Y-%m-%d %H:%M:%S'
62
63
64    def initialize(self, host=None, hosts=None):
65        """
66        Sets default variables for the test.
67
68        @param host: The DUT we will be running on.
69        @param hosts: If we are running a test with multiple DUTs (eg P2P)
70                      we will use hosts instead of host.
71
72        """
73        self._current_timestamp = None
74        self._host = host
75        # Some AU tests use multiple DUTs
76        self._hosts = hosts
77
78        # Define functions used in update_engine_util.
79        self._run = self._host.run if self._host else None
80        self._get_file = self._host.get_file if self._host else None
81
82        # Utilities for DLC management
83        self._dlc_util = dlc_util.DLCUtil(self._run)
84
85
86    def cleanup(self):
87        """Clean up update_engine autotests."""
88        if self._host:
89            self._host.get_file(self._UPDATE_ENGINE_LOG, self.resultsdir)
90
91
92    def _get_expected_events_for_rootfs_update(self, source_release):
93        """
94        Creates a list of expected events fired during a rootfs update.
95
96        There are 4 events fired during a rootfs update. We will create these
97        in the correct order.
98
99        @param source_release: The source build version.
100
101        """
102        return [
103            uee.UpdateEngineEvent(
104                version=source_release,
105                timeout=self._INITIAL_CHECK_TIMEOUT),
106            uee.UpdateEngineEvent(
107                event_type=uee.EVENT_TYPE_DOWNLOAD_STARTED,
108                event_result=uee.EVENT_RESULT_SUCCESS,
109                version=source_release,
110                timeout=self._DOWNLOAD_STARTED_TIMEOUT),
111            uee.UpdateEngineEvent(
112                event_type=uee.EVENT_TYPE_DOWNLOAD_FINISHED,
113                event_result=uee.EVENT_RESULT_SUCCESS,
114                version=source_release,
115                timeout=self._DOWNLOAD_FINISHED_TIMEOUT),
116            uee.UpdateEngineEvent(
117                event_type=uee.EVENT_TYPE_UPDATE_COMPLETE,
118                event_result=uee.EVENT_RESULT_SUCCESS,
119                version=source_release,
120                timeout=self._UPDATE_COMPLETED_TIMEOUT)
121        ]
122
123
124    def _get_expected_event_for_post_reboot_check(self, source_release,
125                                                  target_release):
126        """
127        Creates the expected event fired during post-reboot update check.
128
129        @param source_release: The source build version.
130        @param target_release: The target build version.
131
132        """
133        return [
134            uee.UpdateEngineEvent(
135                event_type=uee.EVENT_TYPE_REBOOTED_AFTER_UPDATE,
136                event_result=uee.EVENT_RESULT_SUCCESS,
137                version=target_release,
138                previous_version=source_release,
139                timeout = self._POST_REBOOT_TIMEOUT)
140        ]
141
142
143    def _verify_event_with_timeout(self, expected_event, actual_event):
144        """
145        Verify an expected event occurred before its timeout.
146
147        @param expected_event: an expected event.
148        @param actual_event: an actual event from the hostlog.
149
150        @return None if event complies, an error string otherwise.
151
152        """
153        logging.info('Expecting %s within %s seconds', expected_event,
154                     expected_event._timeout)
155        if not actual_event:
156            return ('No entry found for %s event.' % uee.get_event_type
157                (expected_event._expected_attrs['event_type']))
158        logging.info('Consumed new event: %s', actual_event)
159        # If this is the first event, set it as the current time
160        if self._current_timestamp is None:
161            self._current_timestamp = datetime.strptime(
162                actual_event['timestamp'], self._TIMESTAMP_FORMAT)
163
164        # Get the time stamp for the current event and convert to datetime
165        timestamp = actual_event['timestamp']
166        event_timestamp = datetime.strptime(timestamp,
167                                            self._TIMESTAMP_FORMAT)
168
169        # If the event happened before the timeout
170        difference = event_timestamp - self._current_timestamp
171        if difference < timedelta(seconds=expected_event._timeout):
172            logging.info('Event took %s seconds to fire during the '
173                         'update', difference.seconds)
174            self._current_timestamp = event_timestamp
175            mismatched_attrs = expected_event.equals(actual_event)
176            if mismatched_attrs is None:
177                return None
178            else:
179                return self._error_incorrect_event(
180                    expected_event, actual_event, mismatched_attrs)
181        else:
182            return self._timeout_error_message(expected_event,
183                                               difference.seconds)
184
185
186    def _error_incorrect_event(self, expected, actual, mismatched_attrs):
187        """
188        Error message for when an event is not what we expect.
189
190        @param expected: The expected event that did not match the hostlog.
191        @param actual: The actual event with the mismatched arg(s).
192        @param mismatched_attrs: A list of mismatched attributes.
193
194        """
195        et = uee.get_event_type(expected._expected_attrs['event_type'])
196        return ('Event %s had mismatched attributes: %s. We expected %s, but '
197                'got %s.' % (et, mismatched_attrs, expected, actual))
198
199
200    def _timeout_error_message(self, expected, time_taken):
201        """
202        Error message for when an event takes too long to fire.
203
204        @param expected: The expected event that timed out.
205        @param time_taken: How long it actually took.
206
207        """
208        et = uee.get_event_type(expected._expected_attrs['event_type'])
209        return ('Event %s should take less than %ds. It took %ds.'
210                % (et, expected._timeout, time_taken))
211
212
213    def _stage_payload_by_uri(self, payload_uri, properties_file=True):
214        """Stage a payload based on its GS URI.
215
216        This infers the build's label, filename and GS archive from the
217        provided GS URI.
218
219        @param payload_uri: The full GS URI of the payload.
220        @param properties_file: If true, it will stage the update payload
221                                properties file too.
222
223        @return URL of the staged payload (and properties file) on the server.
224
225        @raise error.TestError if there's a problem with staging.
226
227        """
228        archive_url, _, filename = payload_uri.rpartition('/')
229        build_name = six.moves.urllib.parse.urlsplit(archive_url).path.strip(
230                '/')
231        filenames = [filename]
232        if properties_file:
233            filenames.append(filename + '.json')
234        try:
235            self._autotest_devserver.stage_artifacts(image=build_name,
236                                                     files=filenames,
237                                                     archive_url=archive_url)
238            return (self._autotest_devserver.get_staged_file_url(f, build_name)
239                    for f in filenames)
240        except dev_server.DevServerException as e:
241            raise error.TestError('Failed to stage payload: %s' % e)
242
243
244    def _get_devserver_for_test(self, test_conf):
245        """Find a devserver to use.
246
247        We use the payload URI as the hash for ImageServer.resolve. The chosen
248        devserver needs to respect the location of the host if
249        'prefer_local_devserver' is set to True or 'restricted_subnets' is set.
250
251        @param test_conf: a dictionary of test settings.
252
253        """
254        autotest_devserver = dev_server.ImageServer.resolve(
255            test_conf['target_payload_uri'], self._host.hostname)
256        devserver_hostname = six.moves.urllib.parse.urlparse(
257                autotest_devserver.url()).hostname
258        logging.info('Devserver chosen for this run: %s', devserver_hostname)
259        return autotest_devserver
260
261
262    def _get_payload_url(self, build=None, full_payload=True, is_dlc=False):
263        """
264        Gets the GStorage URL of the full or delta payload for this build, for
265        either platform or DLC payloads.
266
267        @param build: build string e.g eve-release/R85-13265.0.0.
268        @param full_payload: True for full payload. False for delta.
269        @param is_dlc: True to get the payload URL for sample-dlc.
270
271        @returns the payload URL.
272
273        """
274        if build is None:
275            if self._job_repo_url is None:
276                self._job_repo_url = self._get_job_repo_url()
277            ds_url, build = tools.get_devserver_build_from_package_url(
278                self._job_repo_url)
279            self._autotest_devserver = dev_server.ImageServer(ds_url)
280
281        gs = dev_server._get_image_storage_server()
282
283        # Example payload names (AU):
284        # chromeos_R85-13265.0.0_eve_full_dev.bin
285        # chromeos_R85-13265.0.0_R85-13265.0.0_eve_delta_dev.bin
286        # Example payload names (DLC):
287        # dlc_sample-dlc_package_R85-13265.0.0_eve_full_dev.bin
288        # dlc_sample-dlc_package_R85-13265.0.0_R85-13265.0.0_eve_delta_dev.bin
289        if is_dlc:
290            payload_prefix = 'dlc_*%s*_%s_*' % (build.rpartition('/')[2], '%s')
291        else:
292            payload_prefix = 'chromeos_*_%s_*.bin'
293
294        regex = payload_prefix % ('full' if full_payload else 'delta')
295
296        payload_url_regex = gs + build + '/' + regex
297        logging.debug('Trying to find payloads at %s', payload_url_regex)
298        payloads = utils.gs_ls(payload_url_regex)
299        if not payloads:
300            raise error.TestFail('Could not find payload for %s', build)
301        logging.debug('Payloads found: %s', payloads)
302        return payloads[0]
303
304
305    @staticmethod
306    def _get_stateful_uri(build_uri):
307        """Returns a complete GS URI of a stateful update given a build path."""
308        return '/'.join([build_uri.rstrip('/'), 'stateful.tgz'])
309
310
311    def _get_job_repo_url(self, job_repo_url=None):
312        """Gets the job_repo_url argument supplied to the test by the lab."""
313        if job_repo_url is not None:
314            return job_repo_url
315        if self._hosts is not None:
316            self._host = self._hosts[0]
317        if self._host is None:
318            raise error.TestFail('No host specified by AU test.')
319        info = self._host.host_info_store.get()
320        return info.attributes.get(self._host.job_repo_url_attribute, '')
321
322
323    def _stage_payloads(self, payload_uri, archive_uri):
324        """
325        Stages payloads on the devserver.
326
327        @param payload_uri: URI for a GS payload to stage.
328        @param archive_uri: URI for GS folder containing payloads. This is used
329                            to find the related stateful payload.
330
331        @returns URI of staged payload, URI of staged stateful.
332
333        """
334        if not payload_uri:
335            return None, None
336        staged_uri, _ = self._stage_payload_by_uri(payload_uri)
337        logging.info('Staged %s at %s.', payload_uri, staged_uri)
338
339        # Figure out where to get the matching stateful payload.
340        if archive_uri:
341            stateful_uri = self._get_stateful_uri(archive_uri)
342        else:
343            stateful_uri = self._payload_to_stateful_uri(payload_uri)
344        staged_stateful = self._stage_payload_by_uri(stateful_uri,
345                                                     properties_file=False)
346        logging.info('Staged stateful from %s at %s.', stateful_uri,
347                     staged_stateful)
348        return staged_uri, staged_stateful
349
350
351
352    def _payload_to_stateful_uri(self, payload_uri):
353        """Given a payload GS URI, returns the corresponding stateful URI."""
354        build_uri = payload_uri.rpartition('/payloads/')[0]
355        return self._get_stateful_uri(build_uri)
356
357
358    def _copy_payload_to_public_bucket(self, payload_url):
359        """
360        Copy payload and make link public.
361
362        @param payload_url: Payload URL on Google Storage.
363
364        @returns The payload URL that is now publicly accessible.
365
366        """
367        payload_filename = payload_url.rpartition('/')[2]
368        utils.run(['gsutil', 'cp', '%s*' % payload_url, self._CELLULAR_BUCKET])
369        new_gs_url = self._CELLULAR_BUCKET + payload_filename
370        utils.run(['gsutil', 'acl', 'ch', '-u', 'AllUsers:R',
371                   '%s*' % new_gs_url])
372        return new_gs_url.replace('gs://', 'https://storage.googleapis.com/')
373
374
375    def _suspend_then_resume(self):
376        """Suspends and resumes the host DUT."""
377        try:
378            self._host.suspend(suspend_time=30)
379        except error.AutoservSuspendError:
380            logging.exception('Suspend did not last the entire time.')
381
382
383    def _run_client_test_and_check_result(self, test_name, **kwargs):
384        """
385        Kicks of a client autotest and checks that it didn't fail.
386
387        @param test_name: client test name
388        @param **kwargs: key-value arguments to pass to the test.
389
390        """
391        client_at = autotest.Autotest(self._host)
392        client_at.run_test(test_name, **kwargs)
393        client_at._check_client_test_result(self._host, test_name)
394
395
396    def _extract_request_logs(self, update_engine_log, is_dlc=False):
397        """
398        Extracts request logs from an update_engine log.
399
400        @param update_engine_log: The update_engine log as a string.
401        @param is_dlc: True to return the request logs for the DLC updates
402                       instead of the platform update.
403        @returns a list object representing the platform (OS) request logs, or
404                 a dictionary of lists representing DLC request logs,
405                 keyed by DLC ID, if is_dlc is True.
406
407        """
408        # Looking for all request XML strings in the log.
409        pattern = re.compile(r'<request.*?</request>', re.DOTALL)
410        requests = pattern.findall(update_engine_log)
411
412        # We are looking for patterns like this:
413        # [0324/151230.562305:INFO:omaha_request_action.cc(501)] Request:
414        timestamp_pattern = re.compile(r'\[([0-9]+)/([0-9]+).*?\] Request:')
415        timestamps = [
416            # Just use the current year since the logs don't have the year
417            # value. Let's all hope tests don't start to fail on new year's
418            # eve LOL.
419            datetime(datetime.now().year,
420                     int(ts[0][0:2]),  # Month
421                     int(ts[0][2:4]),  # Day
422                     int(ts[1][0:2]),  # Hours
423                     int(ts[1][2:4]),  # Minutes
424                     int(ts[1][4:6]))  # Seconds
425            for ts in timestamp_pattern.findall(update_engine_log)
426        ]
427
428        if len(requests) != len(timestamps):
429            raise error.TestFail('Failed to properly parse the update_engine '
430                                 'log file.')
431
432        result = []
433        dlc_results = {}
434        for timestamp, request in zip(timestamps, requests):
435
436            root = ElementTree.fromstring(request)
437
438            # There may be events for multiple apps if DLCs are installed.
439            # See below (trimmed) example request including DLC:
440            #
441            # <request requestid=...>
442            #   <os version="Indy" platform=...></os>
443            #   <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}"
444            #       version="13265.0.0" track=...>
445            #     <event eventtype="13" eventresult="1"></event>
446            #   </app>
447            #   <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}_sample-dlc"
448            #       version="0.0.0.0" track=...>
449            #     <event eventtype="13" eventresult="1"></event>
450            #   </app>
451            # </request>
452            #
453            # The first <app> section is for the platform update. The second
454            # is for the DLC update.
455            #
456            # Example without DLC:
457            # <request requestid=...>
458            #   <os version="Indy" platform=...></os>
459            #   <app appid="{DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}"
460            #       version="13265.0.0" track=...>
461            #     <event eventtype="13" eventresult="1"></event>
462            #   </app>
463            # </request>
464
465            apps = root.findall('app')
466            for app in apps:
467                event = app.find('event')
468
469                event_info = {
470                    'version': app.attrib.get('version'),
471                    'event_type': (int(event.attrib.get('eventtype'))
472                                  if event is not None else None),
473                    'event_result': (int(event.attrib.get('eventresult'))
474                                    if event is not None else None),
475                    'timestamp': timestamp.strftime(self._TIMESTAMP_FORMAT),
476                }
477
478                previous_version = (event.attrib.get('previousversion')
479                                    if event is not None else None)
480                if previous_version:
481                    event_info['previous_version'] = previous_version
482
483                # Check if the event is for the platform update or for a DLC
484                # by checking the appid. For platform, the appid looks like:
485                #     {DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}
486                # For DLCs, it is the platform app ID + _ + the DLC ID:
487                #     {DB5199C7-358B-4E1F-B4F6-AF6D2DD01A38}_sample-dlc
488                id_segments = app.attrib.get('appid').split('_')
489                if len(id_segments) > 1:
490                    dlc_id = id_segments[1]
491                    if dlc_id in dlc_results:
492                        dlc_results[dlc_id].append(event_info)
493                    else:
494                        dlc_results[dlc_id] = [event_info]
495                else:
496                    result.append(event_info)
497
498        if is_dlc:
499            logging.info('Extracted DLC request logs: %s', dlc_results)
500            return dlc_results
501        else:
502            logging.info('Extracted platform (OS) request log: %s', result)
503            return result
504
505
506    def _create_hostlog_files(self):
507        """Create the two hostlog files for the update.
508
509        To ensure the update was successful we need to compare the update
510        events against expected update events. There is a hostlog for the
511        rootfs update and for the post reboot update check.
512
513        """
514        # Check that update logs exist for the update that just happened.
515        if len(self._get_update_engine_logs()) < 2:
516            err_msg = 'update_engine logs are missing. Cannot verify update.'
517            raise error.TestFail(err_msg)
518
519        # Each time we reboot in the middle of an update we ping omaha again
520        # for each update event. So parse the list backwards to get the final
521        # events.
522        rootfs_hostlog = os.path.join(self.resultsdir, 'hostlog_rootfs')
523        with open(rootfs_hostlog, 'w') as fp:
524            # There are four expected hostlog events during update.
525            json.dump(self._extract_request_logs(
526                self._get_update_engine_log(1))[-4:], fp)
527
528        reboot_hostlog = os.path.join(self.resultsdir, 'hostlog_reboot')
529        with open(reboot_hostlog, 'w') as fp:
530            # There is one expected hostlog events after reboot.
531            json.dump(self._extract_request_logs(
532                self._get_update_engine_log(0))[:1], fp)
533
534        return rootfs_hostlog, reboot_hostlog
535
536
537    def _create_dlc_hostlog_files(self):
538        """Create the rootfs and reboot hostlog files for DLC updates.
539
540        Each DLC has its own set of update requests in the logs together with
541        the platform update requests. To ensure the DLC update was successful
542        we will compare the update events against the expected events, which
543        are the same expected events as for the platform update. There is a
544        hostlog for the rootfs update and the post-reboot update check for
545        each DLC.
546
547        @returns two dictionaries, one for the rootfs DLC update and one for
548                 the post-reboot check. The keys are DLC IDs and the values
549                 are the hostlog filenames.
550
551        """
552        dlc_rootfs_hostlogs = {}
553        dlc_reboot_hostlogs = {}
554
555        dlc_rootfs_request_logs = self._extract_request_logs(
556            self._get_update_engine_log(1), is_dlc=True)
557
558        for dlc_id in dlc_rootfs_request_logs:
559            dlc_rootfs_hostlog = os.path.join(self.resultsdir,
560                                              'hostlog_' + dlc_id)
561            dlc_rootfs_hostlogs[dlc_id] = dlc_rootfs_hostlog
562            with open(dlc_rootfs_hostlog, 'w') as fp:
563                # Same number of events for DLC updates as for platform
564                json.dump(dlc_rootfs_request_logs[dlc_id][-4:], fp)
565
566        dlc_reboot_request_logs = self._extract_request_logs(
567            self._get_update_engine_log(0), is_dlc=True)
568
569        for dlc_id in dlc_reboot_request_logs:
570            dlc_reboot_hostlog = os.path.join(self.resultsdir,
571                                              'hostlog_' + dlc_id + '_reboot')
572            dlc_reboot_hostlogs[dlc_id] = dlc_reboot_hostlog
573            with open(dlc_reboot_hostlog, 'w') as fp:
574                # Same number of events for DLC updates as for platform
575                json.dump(dlc_reboot_request_logs[dlc_id][:1], fp)
576
577        return dlc_rootfs_hostlogs, dlc_reboot_hostlogs
578
579
580    def _set_active_p2p_host(self, host):
581        """
582        Choose which p2p host device to run commands on.
583
584        For P2P tests with multiple DUTs we need to be able to choose which
585        host within self._hosts we want to issue commands on.
586
587        @param host: The host to run commands on.
588
589        """
590        self._set_util_functions(host.run, host.get_file)
591
592
593    def _set_update_over_cellular_setting(self, update_over_cellular=True):
594        """
595        Toggles the update_over_cellular setting in update_engine.
596
597        @param update_over_cellular: True to enable, False to disable.
598
599        """
600        answer = 'yes' if update_over_cellular else 'no'
601        cmd = [self._UPDATE_ENGINE_CLIENT_CMD,
602               '--update_over_cellular=%s' % answer]
603        retry_util.RetryException(error.AutoservRunError, 2, self._run, cmd)
604
605
606    def _copy_generated_nebraska_logs(self, logs_dir, identifier):
607        """Copies nebraska logs from logs_dir into job results directory.
608
609        The nebraska process on the device generates logs and stores those logs
610        in a /tmp directory. The update engine generates update_engine.log
611        during the auto-update which is also stored in the same /tmp directory.
612        This method copies these logfiles from the /tmp directory into the job
613
614        @param logs_dir: Directory containing paths to the log files generated
615                         by the nebraska process.
616        @param identifier: A string that is appended to the logfile when it is
617                           saved so that multiple files with the same name can
618                           be differentiated.
619        """
620        partial_filename = '%s_%s_%s' % ('%s', self._host.hostname, identifier)
621        src_files = [
622            self._NEBRASKA_LOG,
623            os.path.basename(self._UPDATE_ENGINE_LOG),
624        ]
625
626        for src_fname in src_files:
627            source = os.path.join(logs_dir, src_fname)
628            dest = os.path.join(self.resultsdir, partial_filename % src_fname)
629            logging.debug('Copying logs from %s to %s', source, dest)
630            try:
631                shutil.copyfile(source, dest)
632            except Exception as e:
633                logging.error('Could not copy logs from %s into %s due to '
634                              'exception: %s', source, dest, e)
635
636    @staticmethod
637    def _get_update_parameters_from_uri(payload_uri):
638        """Extract vars needed to update with a Google Storage payload URI.
639
640        The two values we need are:
641        (1) A build_name string e.g dev-channel/samus/9583.0.0
642        (2) A filename of the exact payload file to use for the update. This
643        payload needs to have already been staged on the devserver.
644
645        @param payload_uri: Google Storage URI to extract values from
646
647        """
648
649        # gs://chromeos-releases/dev-channel/samus/9334.0.0/payloads/blah.bin
650        # build_name = dev-channel/samus/9334.0.0
651        # payload_file = payloads/blah.bin
652        build_name = payload_uri[:payload_uri.index('payloads/')]
653        build_name = six.moves.urllib.parse.urlsplit(build_name).path.strip(
654                '/')
655        payload_file = payload_uri[payload_uri.index('payloads/'):]
656
657        logging.debug('Extracted build_name: %s, payload_file: %s from %s.',
658                      build_name, payload_file, payload_uri)
659        return build_name, payload_file
660
661
662    def _restore_stateful(self):
663        """Restore the stateful partition after a destructive test."""
664        # Stage stateful payload.
665        ds_url, build = tools.get_devserver_build_from_package_url(
666                self._job_repo_url)
667        self._autotest_devserver = dev_server.ImageServer(ds_url)
668        self._autotest_devserver.stage_artifacts(build, ['stateful'])
669
670        logging.info('Restoring stateful partition...')
671        # Setup local dir.
672        self._run(['mkdir', '-p', '-m', '1777', '/usr/local/tmp'])
673
674        # Download and extract the stateful payload.
675        update_url = self._autotest_devserver.get_update_url(build)
676        statefuldev_url = update_url.replace('update', 'static')
677        statefuldev_url += '/stateful.tgz'
678        cmd = [
679                'curl', '--silent', '--show-error', '--max-time', '600',
680                statefuldev_url, '|', 'tar', '--ignore-command-error',
681                '--overwrite', '--directory', '/mnt/stateful_partition', '-xz'
682        ]
683        try:
684            self._run(cmd)
685        except error.AutoservRunError as e:
686            err_str = 'Failed to restore the stateful partition'
687            raise error.TestFail('%s: %s' % (err_str, str(e)))
688
689        # Touch a file so changes are picked up after reboot.
690        update_file = '/mnt/stateful_partition/.update_available'
691        self._run(['echo', '-n', 'clobber', '>', update_file])
692        self._host.reboot()
693
694        # Make sure python is available again.
695        try:
696            self._run(['python', '--version'])
697        except error.AutoservRunError as e:
698            err_str = 'Python not available after restoring stateful.'
699            raise error.TestFail(err_str)
700
701        logging.info('Stateful restored successfully.')
702
703
704    def verify_update_events(self, source_release, hostlog_filename,
705                             target_release=None):
706        """Compares a hostlog file against a set of expected events.
707
708        In this class we build a list of expected events (list of
709        UpdateEngineEvent objects), and compare that against a "hostlog"
710        returned from update_engine from the update. This hostlog is a json
711        list of events fired during the update.
712
713        @param source_release: The source build version.
714        @param hostlog_filename: The path to a hotlog returned from nebraska.
715        @param target_release: The target build version.
716
717        """
718        if target_release is not None:
719            expected_events = self._get_expected_event_for_post_reboot_check(
720                source_release, target_release)
721        else:
722            expected_events = self._get_expected_events_for_rootfs_update(
723                source_release)
724        logging.info('Checking update against hostlog file: %s',
725                     hostlog_filename)
726        try:
727            with open(hostlog_filename, 'r') as fp:
728                hostlog_events = json.load(fp)
729        except Exception as e:
730            raise error.TestFail('Error reading the hostlog file: %s' % e)
731
732        for expected, actual in zip_longest(expected_events, hostlog_events):
733            err_msg = self._verify_event_with_timeout(expected, actual)
734            if err_msg is not None:
735                raise error.TestFail(('Hostlog verification failed: %s ' %
736                                     err_msg))
737
738
739    def get_update_url_for_test(self, job_repo_url=None, full_payload=True,
740                                stateful=False):
741        """
742        Returns a devserver update URL for tests that cannot use a Nebraska
743        instance on the DUT for updating.
744
745        This expects the test to set self._host or self._hosts.
746
747        @param job_repo_url: string url containing the current build.
748        @param full_payload: bool whether we want a full payload.
749        @param stateful: bool whether we want to stage stateful payload too.
750
751        @returns a valid devserver update URL.
752
753        """
754        self._job_repo_url = self._get_job_repo_url(job_repo_url)
755        if not self._job_repo_url:
756            raise error.TestFail('There was no job_repo_url so we cannot get '
757                                 'a payload to use.')
758        ds_url, build = tools.get_devserver_build_from_package_url(
759            self._job_repo_url)
760
761        # The lab devserver assigned to this test.
762        lab_devserver = dev_server.ImageServer(ds_url)
763
764        # Stage payloads on the lab devserver.
765        self._autotest_devserver = lab_devserver
766        artifacts = ['full_payload' if full_payload else 'delta_payload']
767        if stateful:
768            artifacts.append('stateful')
769        self._autotest_devserver.stage_artifacts(build, artifacts)
770
771        # Use the same lab devserver to also handle the update.
772        url = self._autotest_devserver.get_update_url(build)
773
774        logging.info('Update URL: %s', url)
775        return url
776
777
778    def get_payload_url_on_public_bucket(self, job_repo_url=None,
779                                         full_payload=True, is_dlc=False):
780        """
781        Get the google storage url of the payload in a public bucket.
782
783        We will be copying the payload to a public google storage bucket
784        (similar location to updates via autest command).
785
786        @param job_repo_url: string url containing the current build.
787        @param full_payload: True for full, False for delta.
788        @param is_dlc: True to get the payload URL for sample-dlc.
789
790        """
791        self._job_repo_url = self._get_job_repo_url(job_repo_url)
792        payload_url = self._get_payload_url(full_payload=full_payload,
793                                            is_dlc=is_dlc)
794        url = self._copy_payload_to_public_bucket(payload_url)
795        logging.info('Public update URL: %s', url)
796        return url
797
798
799    def get_payload_for_nebraska(self, job_repo_url=None, full_payload=True,
800                                 public_bucket=False, is_dlc=False):
801        """
802        Gets a platform or DLC payload URL to be used with a nebraska instance
803        on the DUT.
804
805        @param job_repo_url: string url containing the current build.
806        @param full_payload: bool whether we want a full payload.
807        @param public_bucket: True to return a payload on a public bucket.
808        @param is_dlc: True to get the payload URL for sample-dlc.
809
810        @returns string URL of a payload staged on a lab devserver.
811
812        """
813        if public_bucket:
814            return self.get_payload_url_on_public_bucket(
815                job_repo_url, full_payload=full_payload, is_dlc=is_dlc)
816
817        self._job_repo_url = self._get_job_repo_url(job_repo_url)
818        payload = self._get_payload_url(full_payload=full_payload,
819                                        is_dlc=is_dlc)
820        payload_url, _ = self._stage_payload_by_uri(payload)
821        logging.info('Payload URL for Nebraska: %s', payload_url)
822        return payload_url
823
824
825    def update_device(self,
826                      payload_uri,
827                      clobber_stateful=False,
828                      tag='source',
829                      ignore_appid=False):
830        """
831        Updates the device.
832
833        Used by autoupdate_EndToEndTest and autoupdate_StatefulCompatibility,
834        which use auto_updater to perform updates.
835
836        @param payload_uri: The payload with which the device should be updated.
837        @param clobber_stateful: Boolean that determines whether the stateful
838                                 of the device should be force updated and the
839                                 TPM ownership should be cleared. By default,
840                                 set to False.
841        @param tag: An identifier string added to each log filename.
842        @param ignore_appid: True to tell Nebraska to ignore the App ID field
843                             when parsing the update request. This allows
844                             the target update to use a different board's
845                             image, which is needed for kernelnext updates.
846
847        @raise error.TestFail if anything goes wrong with the update.
848
849        """
850        cros_preserved_path = ('/mnt/stateful_partition/unencrypted/'
851                               'preserve/cros-update')
852        build_name, payload_filename = self._get_update_parameters_from_uri(
853            payload_uri)
854        logging.info('Installing %s on the DUT', payload_uri)
855        with remote_access.ChromiumOSDeviceHandler(
856            self._host.hostname, base_dir=cros_preserved_path) as device:
857            updater = auto_updater.ChromiumOSUpdater(
858                    device,
859                    build_name,
860                    build_name,
861                    yes=True,
862                    payload_filename=payload_filename,
863                    clobber_stateful=clobber_stateful,
864                    clear_tpm_owner=clobber_stateful,
865                    do_stateful_update=True,
866                    staging_server=self._autotest_devserver.url(),
867                    transfer_class=auto_updater_transfer.
868                    LabEndToEndPayloadTransfer,
869                    ignore_appid=ignore_appid)
870
871            try:
872                updater.RunUpdate()
873            except Exception as e:
874                logging.exception('ERROR: Failed to update device.')
875                raise error.TestFail(str(e))
876            finally:
877                self._copy_generated_nebraska_logs(
878                    updater.request_logs_dir, identifier=tag)
879