• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Lint as: python2, python3
2# Copyright 2018 The Chromium OS Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import logging
7import numpy
8import os
9
10from autotest_lib.client.bin import utils
11from autotest_lib.client.common_lib.cros import tpm_utils
12from autotest_lib.server import test, autotest
13
14CLIENT_TEST_NAME = 'platform_InitLoginPerf'
15STAGE_OOBE = 0
16STAGE_REGULAR = 1
17STAGE_NAME = ['oobe', 'regular']
18BENCHMARKS = {
19        'initial_login': {'stage': STAGE_OOBE,
20                          'name': 'login-duration',
21                          'display': '1stLogin',
22                          'units': 'seconds',
23                          'upload': True},
24        'regular_login': {'stage': STAGE_REGULAR,
25                          'name': 'login-duration',
26                          'display': 'RegLogin',
27                          'units': 'seconds',
28                          'upload': True},
29        'prepare_attestation': {'stage': STAGE_OOBE,
30                                'name': 'attestation-duration',
31                                'display': 'PrepAttn',
32                                'units': 'seconds',
33                                'upload': True},
34        }
35
36class platform_InitLoginPerfServer(test.test):
37    """Test to exercise and gather perf data for initialization and login."""
38
39    version = 1
40
41    def initialize(self):
42        """Run before the first iteration."""
43        self.perf_results = {}
44        for bmname in BENCHMARKS:
45            self.perf_results[bmname] = []
46
47    def stage_args(self, stage):
48        """Build arguments for the client-side test.
49
50        @param stage  Stage of the test to get arguments for.
51        @return       Dictionary of arguments.
52
53        """
54        if stage == 0:
55            return {'perform_init': True,
56                    'pre_init_delay': self.pre_init_delay}
57        else:
58            return {'perform_init': False}
59
60    def run_stage(self, stage):
61        """Run the client-side test.
62
63        @param stage: Stage of the test to run.
64
65        """
66        full_stage = 'iteration.%s/%s' % (self.iteration, STAGE_NAME[stage])
67        logging.info('Run stage %s', full_stage)
68        self.client_at.run_test(test_name=self.client_test,
69                                results_dir=full_stage,
70                                check_client_result=True,
71                                **self.stage_args(stage))
72        client_keyval = os.path.join(self.outputdir, full_stage,
73                self.client_test, 'results', 'keyval')
74        self.client_results[stage] = utils.read_keyval(client_keyval,
75                type_tag='perf')
76
77    def save_perf_data(self):
78        """Extract perf data from client-side test results."""
79        for bmname, bm in BENCHMARKS.items():
80            try:
81                self.perf_results[bmname].append(
82                        float(self.client_results[bm['stage']][bm['name']]))
83            except:
84                logging.warning('Failed to extract %s from client results',
85                                bmname)
86                self.perf_results[bmname].append(None)
87                pass
88
89    def output_benchmark(self, bmname):
90        """Output a benchmark.
91
92        @param bmname: Name of the benchmark.
93
94        """
95        bm = BENCHMARKS[bmname]
96        values = self.perf_results[bmname]
97        if not bm.get('upload', True):
98            return
99        self.output_perf_value(
100                description=bmname,
101                value=[x for x in values if x is not None],
102                units=bm.get('units', 'seconds'),
103                higher_is_better=False,
104                graph=self.graph_name)
105
106    def display_perf_headers(self):
107        """Add headers for the results table to the info log."""
108        hdr = "# "
109        for bm in BENCHMARKS.values():
110            hdr += bm['display'] + ' '
111        logging.info('# Results for delay = %.2f sec', self.pre_init_delay)
112        logging.info(hdr)
113
114    def display_perf_line(self, n):
115        """Add one iteration results line to the info log.
116
117        @param n: Number of the iteration.
118
119        """
120        line = "# "
121        for bmname in BENCHMARKS:
122            value = self.perf_results[bmname][n]
123            if value is None:
124                line += '    None '
125            else:
126                line += '%8.2f ' % value
127        logging.info(line)
128
129    def display_perf_stats(self, name, func):
130        """ Add results statistics line to the info log.
131
132        @param name: Name of the statistic.
133        @param func: Function to reduce the list of results.
134
135        """
136        line = "# "
137        for bmname in BENCHMARKS:
138            line += '%8.2f ' % func(self.perf_results[bmname])
139        logging.info('# %s:', name)
140        logging.info(line)
141
142    def process_perf_data(self):
143        """Process performance data from all iterations."""
144        logging.info('Process perf data')
145        logging.debug('Results: %s', self.perf_results)
146
147        if self.upload_perf:
148            for bmname in BENCHMARKS:
149                self.output_benchmark(bmname)
150
151        logging.info('##############################################')
152        self.display_perf_headers()
153        for iter in range(self.iteration - 1):
154            self.display_perf_line(iter)
155        self.display_perf_stats('Average', numpy.mean)
156        self.display_perf_stats('Min', min)
157        self.display_perf_stats('Max', max)
158        self.display_perf_stats('StdDev', lambda x: numpy.std(x, ddof=1))
159        logging.info('##############################################')
160
161    def run_once(self, host, pre_init_delay=0,
162                 upload_perf=False, graph_name=None):
163        """Run a single iteration.
164
165        @param pre_init_delay: Delay before initialization during first boot.
166        @param upload_perf: Do we need to upload the results?
167        @param graph_name: Graph name to use when uploading the results.
168
169        """
170        if self.iteration is None:
171            self.iteration = 1
172        logging.info('Start iteration %s', self.iteration)
173
174        self.client = host
175        self.pre_init_delay = pre_init_delay
176        self.upload_perf = upload_perf
177        self.graph_name = graph_name
178        self.client_results = {}
179        self.client_test = CLIENT_TEST_NAME
180        self.client_at = autotest.Autotest(self.client)
181
182        logging.info('Clear the owner before the test')
183        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
184
185        self.run_stage(STAGE_OOBE)
186        self.client.reboot()
187        self.run_stage(STAGE_REGULAR)
188        self.save_perf_data()
189
190    def postprocess(self):
191        """Run after all iterations in case of success."""
192        self.process_perf_data()
193
194    def cleanup(self):
195        """Run at the end regardless of success."""
196        logging.info('Cleanup')
197        tpm_utils.ClearTPMOwnerRequest(self.client, wait_for_ready=False)
198