• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2018 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import logging
6import numpy
7import os
8
9from autotest_lib.client.bin import utils
10from autotest_lib.client.common_lib.cros import tpm_utils
11from autotest_lib.server import test, autotest
12
13CLIENT_TEST_NAME = 'platform_InitLoginPerf'
14STAGE_OOBE = 0
15STAGE_REGULAR = 1
16STAGE_NAME = ['oobe', 'regular']
17BENCHMARKS = {
18        'initial_login': {'stage': STAGE_OOBE,
19                          'name': 'login-duration',
20                          'display': '1stLogin',
21                          'units': 'seconds',
22                          'upload': True},
23        'regular_login': {'stage': STAGE_REGULAR,
24                          'name': 'login-duration',
25                          'display': 'RegLogin',
26                          'units': 'seconds',
27                          'upload': True},
28        'prepare_attestation': {'stage': STAGE_OOBE,
29                                'name': 'attestation-duration',
30                                'display': 'PrepAttn',
31                                'units': 'seconds',
32                                'upload': True},
33        }
34
35class platform_InitLoginPerfServer(test.test):
36    """Test to exercise and gather perf data for initialization and login."""
37
38    version = 1
39
40    def initialize(self):
41        """Run before the first iteration."""
42        self.perf_results = {}
43        for bmname in BENCHMARKS:
44            self.perf_results[bmname] = []
45
46    def stage_args(self, stage):
47        """Build arguments for the client-side test.
48
49        @param stage  Stage of the test to get arguments for.
50        @return       Dictionary of arguments.
51
52        """
53        if stage == 0:
54            return {'perform_init': True,
55                    'pre_init_delay': self.pre_init_delay}
56        else:
57            return {'perform_init': False}
58
59    def run_stage(self, stage):
60        """Run the client-side test.
61
62        @param stage: Stage of the test to run.
63
64        """
65        full_stage = 'iteration.%s/%s' % (self.iteration, STAGE_NAME[stage])
66        logging.info('Run stage %s', full_stage)
67        self.client_at.run_test(test_name=self.client_test,
68                                results_dir=full_stage,
69                                check_client_result=True,
70                                **self.stage_args(stage))
71        client_keyval = os.path.join(self.outputdir, full_stage,
72                self.client_test, 'results', 'keyval')
73        self.client_results[stage] = utils.read_keyval(client_keyval,
74                type_tag='perf')
75
76    def save_perf_data(self):
77        """Extract perf data from client-side test results."""
78        for bmname, bm in BENCHMARKS.iteritems():
79            try:
80                self.perf_results[bmname].append(
81                        self.client_results[bm['stage']][bm['name']])
82            except:
83                logging.warning('Failed to extract %s from client results',
84                                bmname)
85                self.perf_results[bmname].append(None)
86                pass
87
88    def output_benchmark(self, bmname):
89        """Output a benchmark.
90
91        @param bmname: Name of the benchmark.
92
93        """
94        bm = BENCHMARKS[bmname]
95        values = self.perf_results[bmname]
96        if not bm.get('upload', True):
97            return
98        self.output_perf_value(
99                description=bmname,
100                value=[x for x in values if x is not None],
101                units=bm.get('units', 'seconds'),
102                higher_is_better=False,
103                graph=self.graph_name)
104
105    def display_perf_headers(self):
106        """Add headers for the results table to the info log."""
107        hdr = "# "
108        for bm in BENCHMARKS.itervalues():
109            hdr += bm['display'] + ' '
110        logging.info('# Results for delay = %.2f sec', self.pre_init_delay)
111        logging.info(hdr)
112
113    def display_perf_line(self, n):
114        """Add one iteration results line to the info log.
115
116        @param n: Number of the iteration.
117
118        """
119        line = "# "
120        for bmname in BENCHMARKS:
121            value = self.perf_results[bmname][n]
122            if value is None:
123                line += '    None '
124            else:
125                line += '%8.2f ' % value
126        logging.info(line)
127
128    def display_perf_stats(self, name, func):
129        """ Add results statistics line to the info log.
130
131        @param name: Name of the statistic.
132        @param func: Function to reduce the list of results.
133
134        """
135        line = "# "
136        for bmname in BENCHMARKS:
137            line += '%8.2f ' % func(self.perf_results[bmname])
138        logging.info('# %s:', name)
139        logging.info(line)
140
141    def process_perf_data(self):
142        """Process performance data from all iterations."""
143        logging.info('Process perf data')
144        logging.debug('Results: %s', self.perf_results)
145
146        if self.upload_perf:
147            for bmname in BENCHMARKS:
148                self.output_benchmark(bmname)
149
150        logging.info('##############################################')
151        self.display_perf_headers()
152        for iter in range(self.iteration - 1):
153            self.display_perf_line(iter)
154        self.display_perf_stats('Average', numpy.mean)
155        self.display_perf_stats('Min', min)
156        self.display_perf_stats('Max', max)
157        self.display_perf_stats('StdDev', lambda x: numpy.std(x, ddof=1))
158        logging.info('##############################################')
159
160    def run_once(self, host, pre_init_delay=0,
161                 upload_perf=False, graph_name=None):
162        """Run a single iteration.
163
164        @param pre_init_delay: Delay before initialization during first boot.
165        @param upload_perf: Do we need to upload the results?
166        @param graph_name: Graph name to use when uploading the results.
167
168        """
169        if self.iteration is None:
170            self.iteration = 1
171        logging.info('Start iteration %s', self.iteration)
172
173        self.client = host
174        self.pre_init_delay = pre_init_delay
175        self.upload_perf = upload_perf
176        self.graph_name = graph_name
177        self.client_results = {}
178        self.client_test = CLIENT_TEST_NAME
179        self.client_at = autotest.Autotest(self.client)
180
181        logging.info('Clear the owner before the test')
182        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
183
184        self.run_stage(STAGE_OOBE)
185        self.client.reboot()
186        self.run_stage(STAGE_REGULAR)
187        self.save_perf_data()
188
189    def postprocess(self):
190        """Run after all iterations in case of success."""
191        self.process_perf_data()
192
193    def cleanup(self):
194        """Run at the end regardless of success."""
195        logging.info('Cleanup')
196        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
197