• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2016 The Chromium OS Authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5"""Services relating to generating a suite timeline and report."""
6
7from __future__ import print_function
8
9import common
10import json
11
12from autotest_lib.client.common_lib import time_utils
13from autotest_lib.server import frontend
14from autotest_lib.server.lib import status_history
15from chromite.lib import cros_logging as logging
16
17
18HostJobHistory = status_history.HostJobHistory
19
20# TODO: Handle other statuses like infra failures.
21TKO_STATUS_MAP = {
22    'ERROR': 'fail',
23    'FAIL': 'fail',
24    'GOOD': 'pass',
25    'PASS': 'pass',
26    'ABORT': 'aborted',
27    'Failed': 'fail',
28    'Completed': 'pass',
29    'Aborted': 'aborted',
30}
31
32
33def parse_tko_status_string(status_string):
34    """Parse a status string from TKO or the HQE databases.
35
36    @param status_string: A status string from TKO or HQE databases.
37
38    @return A status string suitable for inclusion within Cloud Datastore.
39    """
40    return TKO_STATUS_MAP.get(status_string, 'unknown:' + status_string)
41
42
43def make_entry(entry_id, name, status, start_time,
44               finish_time=None, parent=None):
45    """Generate an event log entry to be stored in Cloud Datastore.
46
47    @param entry_id: A (Kind, id) tuple representing the key.
48    @param name: A string identifying the event
49    @param status: A string identifying the status of the event.
50    @param start_time: A unix timestamp of the start of the event.
51    @param finish_time: A unix timestamp of the finish of the event.
52    @param parent: A (Kind, id) tuple representing the parent key.
53
54    @return A dictionary representing the entry suitable for dumping via JSON.
55    """
56    entry = {
57        'id': entry_id,
58        'name': name,
59        'status': status,
60        'start_time': start_time,
61    }
62    if finish_time is not None:
63        entry['finish_time'] = finish_time
64    if parent is not None:
65        entry['parent'] = parent
66    return entry
67
68
69def find_start_finish_times(statuses):
70    """Determines the start and finish times for a list of statuses.
71
72    @param statuses: A list of job test statuses.
73
74    @return (start_tme, finish_time) tuple of seconds past epoch.  If either
75            cannot be determined, None for that time.
76    """
77    starts = {int(time_utils.to_epoch_time(s.test_started_time))
78              for s in statuses if s.test_started_time != 'None'}
79    finishes = {int(time_utils.to_epoch_time(s.test_finished_time))
80                for s in statuses if s.test_finished_time != 'None'}
81    start_time = min(starts) if starts else None
82    finish_time = max(finishes) if finishes else None
83    return start_time, finish_time
84
85
86def make_job_entry(tko, job, parent=None, suite_job=False, job_entries=None):
87    """Generate a Suite or HWTest event log entry.
88
89    @param tko: TKO database handle.
90    @param job: A frontend.Job to generate an entry for.
91    @param parent: A (Kind, id) tuple representing the parent key.
92    @param suite_job: A boolean indicating wheret this represents a suite job.
93    @param job_entries: A dictionary mapping job id to earlier job entries.
94
95    @return A dictionary representing the entry suitable for dumping via JSON.
96    """
97    statuses = tko.get_job_test_statuses_from_db(job.id)
98    status = 'pass'
99    dut = None
100    for s in statuses:
101        parsed_status = parse_tko_status_string(s.status)
102        # TODO: Improve this generation of status.
103        if parsed_status != 'pass':
104            status = parsed_status
105        if s.hostname:
106            dut = s.hostname
107        if s.test_started_time == 'None' or s.test_finished_time == 'None':
108            logging.warn('TKO entry for %d missing time: %s' % (job.id, str(s)))
109    start_time, finish_time = find_start_finish_times(statuses)
110    entry = make_entry(('Suite' if suite_job else 'HWTest', int(job.id)),
111                       job.name.split('/')[-1], status, start_time,
112                       finish_time=finish_time, parent=parent)
113
114    entry['job_id'] = int(job.id)
115    if dut:
116        entry['dut'] = dut
117    if job.shard:
118        entry['shard'] = job.shard
119    # Determine the try of this job by looking back through what the
120    # original job id is.
121    if 'retry_original_job_id' in job.keyvals:
122        original_job_id = int(job.keyvals['retry_original_job_id'])
123        original_job = job_entries.get(original_job_id, None)
124        if original_job:
125            entry['try'] = original_job['try'] + 1
126        else:
127            entry['try'] = 0
128    else:
129        entry['try'] = 1
130    entry['gs_url'] = status_history.get_job_gs_url(job)
131    return entry
132
133
134def make_hqe_entry(hostname, hqe, hqe_statuses, parent=None):
135    """Generate a HQE event log entry.
136
137    @param hostname: A string of the hostname.
138    @param hqe: A host history to generate an event for.
139    @param hqe_statuses: A dictionary mapping HQE ids to job status.
140    @param parent: A (Kind, id) tuple representing the parent key.
141
142    @return A dictionary representing the entry suitable for dumping via JSON.
143    """
144    entry = make_entry(
145        ('HQE', int(hqe.id)), hostname,
146        hqe_statuses.get(hqe.id, parse_tko_status_string(hqe.job_status)),
147        hqe.start_time, finish_time=hqe.end_time, parent=parent)
148
149    entry['task_name'] = hqe.name.split('/')[-1]
150    entry['in_suite'] = hqe.id in hqe_statuses
151    entry['job_url'] = hqe.job_url
152    entry['gs_url'] = hqe.gs_url
153    if hqe.job_id is not None:
154        entry['job_id'] = hqe.job_id
155    entry['is_special'] = hqe.is_special
156    return entry
157
158
159def generate_suite_report(suite_job_id, afe=None, tko=None):
160    """Generate a list of events corresonding to a single suite job.
161
162    @param suite_job_id: The AFE id of the suite job.
163    @param afe: AFE database handle.
164    @param tko: TKO database handle.
165
166    @return A list of entries suitable for dumping via JSON.
167    """
168    if afe is None:
169        afe = frontend.AFE()
170    if tko is None:
171        tko = frontend.TKO()
172
173    # Retrieve the main suite job.
174    suite_job = afe.get_jobs(id=suite_job_id)[0]
175
176    suite_entry = make_job_entry(tko, suite_job, suite_job=True)
177    entries = [suite_entry]
178
179    # Retrieve the child jobs and cache all their statuses
180    logging.debug('Fetching child jobs...')
181    child_jobs = afe.get_jobs(parent_job_id=suite_job_id)
182    logging.debug('... fetched %s child jobs.' % len(child_jobs))
183    job_statuses = {}
184    job_entries = {}
185    for j in child_jobs:
186        job_entry = make_job_entry(tko, j, suite_entry['id'],
187                                   job_entries=job_entries)
188        entries.append(job_entry)
189        job_statuses[j.id] = job_entry['status']
190        job_entries[j.id] = job_entry
191
192    # Retrieve the HQEs from all the child jobs, record statuses from
193    # job statuses.
194    child_job_ids = {j.id for j in child_jobs}
195    logging.debug('Fetching HQEs...')
196    hqes = afe.get_host_queue_entries(job_id__in=list(child_job_ids))
197    logging.debug('... fetched %s HQEs.' % len(hqes))
198    hqe_statuses = {h.id: job_statuses.get(h.job.id, None) for h in hqes}
199
200    # Generate list of hosts.
201    hostnames = {h.host.hostname for h in hqes if h.host}
202    logging.debug('%s distinct hosts participated in the suite.' %
203                  len(hostnames))
204
205    # Retrieve histories for the time of the suite for all associated hosts.
206    # TODO: Include all hosts in the pool.
207    if suite_entry['start_time'] and suite_entry['finish_time']:
208        histories = [HostJobHistory.get_host_history(afe, hostname,
209                                                     suite_entry['start_time'],
210                                                     suite_entry['finish_time'])
211                     for hostname in sorted(hostnames)]
212
213        for history in histories:
214            entries.extend(make_hqe_entry(history.hostname, h, hqe_statuses,
215                                          suite_entry['id']) for h in history)
216
217    return entries
218
219def dump_entries_as_json(entries, output_file):
220    """Dump event log entries as json to a file.
221
222    @param entries: A list of event log entries to dump.
223    @param output_file: The file to write to.
224    """
225    # Write the entries out as JSON.
226    logging.debug('Dumping %d entries' % len(entries))
227    for e in entries:
228        json.dump(e, output_file, sort_keys=True)
229        output_file.write('\n')
230