• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#! /usr/bin/env python3
2#
3# Copyright 2017 The Chromium Authors
4# Use of this source code is governed by a BSD-style license that can be
5# found in the LICENSE file.
6
7
8import argparse
9import json
10import os
11import sys
12
13
14def merge_shard_results(summary_json, jsons_to_merge):
15  """Reads JSON test output from all shards and combines them into one.
16
17  Returns dict with merged test output on success or None on failure. Emits
18  annotations.
19  """
20  try:
21    with open(summary_json) as f:
22      summary = json.load(f)
23  except (IOError, ValueError):
24    # TODO(crbug.com/40196155):Re-enable this check after the recipe module
25    # chromium_swarming can run it with py3
26    # pylint: disable=raise-missing-from
27    raise Exception('Summary json cannot be loaded.')
28
29  # Merge all JSON files together. Keep track of missing shards.
30  merged = {
31    'all_tests': set(),
32    'disabled_tests': set(),
33    'global_tags': set(),
34    'missing_shards': [],
35    'per_iteration_data': [],
36    'swarming_summary': summary,
37    'links': set()
38  }
39  for index, result in enumerate(summary['shards']):
40    if result is None:
41      merged['missing_shards'].append(index)
42      continue
43
44    # Author note: this code path doesn't trigger convert_to_old_format() in
45    # client/swarming.py, which means the state enum is saved in its string
46    # name form, not in the number form.
47    state = result.get('state')
48    if state == 'BOT_DIED':
49      print(
50          'Shard #%d had a Swarming internal failure' % index, file=sys.stderr)
51    elif state == 'EXPIRED':
52      print('There wasn\'t enough capacity to run your test', file=sys.stderr)
53    elif state == 'TIMED_OUT':
54      print('Test runtime exceeded allocated time'
55            'Either it ran for too long (hard timeout) or it didn\'t produce '
56            'I/O for an extended period of time (I/O timeout)',
57            file=sys.stderr)
58    elif state != 'COMPLETED':
59      print('Invalid Swarming task state: %s' % state, file=sys.stderr)
60
61    json_data, err_msg = load_shard_json(index, result.get('task_id'),
62                                         jsons_to_merge)
63    if json_data:
64      # Set-like fields.
65      for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
66        merged[key].update(json_data.get(key), [])
67
68      # 'per_iteration_data' is a list of dicts. Dicts should be merged
69      # together, not the 'per_iteration_data' list itself.
70      merged['per_iteration_data'] = merge_list_of_dicts(
71          merged['per_iteration_data'], json_data.get('per_iteration_data', []))
72    else:
73      merged['missing_shards'].append(index)
74      print('No result was found: %s' % err_msg, file=sys.stderr)
75
76  # If some shards are missing, make it known. Continue parsing anyway. Step
77  # should be red anyway, since swarming.py return non-zero exit code in that
78  # case.
79  if merged['missing_shards']:
80    as_str = ', '.join([str(shard) for shard in merged['missing_shards']])
81    print('some shards did not complete: %s' % as_str, file=sys.stderr)
82    # Not all tests run, combined JSON summary can not be trusted.
83    merged['global_tags'].add('UNRELIABLE_RESULTS')
84
85  # Convert to jsonish dict.
86  for key in ('all_tests', 'disabled_tests', 'global_tags', 'links'):
87    merged[key] = sorted(merged[key])
88  return merged
89
90
91OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024  # 100 MB
92
93
94def load_shard_json(index, task_id, jsons_to_merge):
95  """Reads JSON output of the specified shard.
96
97  Args:
98    output_dir: The directory in which to look for the JSON output to load.
99    index: The index of the shard to load data for, this is for old api.
100    task_id: The directory of the shard to load data for, this is for new api.
101
102  Returns: A tuple containing:
103    * The contents of path, deserialized into a python object.
104    * An error string.
105    (exactly one of the tuple elements will be non-None).
106  """
107  matching_json_files = [
108      j for j in jsons_to_merge
109      if (os.path.basename(j) == 'output.json' and
110          (os.path.basename(os.path.dirname(j)) == str(index) or
111           os.path.basename(os.path.dirname(j)) == task_id))]
112
113  if not matching_json_files:
114    print('shard %s test output missing' % index, file=sys.stderr)
115    return (None, 'shard %s test output was missing' % index)
116  if len(matching_json_files) > 1:
117    print('duplicate test output for shard %s' % index, file=sys.stderr)
118    return (None, 'shard %s test output was duplicated' % index)
119
120  path = matching_json_files[0]
121
122  try:
123    filesize = os.stat(path).st_size
124    if filesize > OUTPUT_JSON_SIZE_LIMIT:
125      print(
126          'output.json is %d bytes. Max size is %d' % (filesize,
127                                                       OUTPUT_JSON_SIZE_LIMIT),
128          file=sys.stderr)
129      return (None, 'shard %s test output exceeded the size limit' % index)
130
131    with open(path) as f:
132      return (json.load(f), None)
133  except (IOError, ValueError, OSError) as e:
134    print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
135    print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
136
137    return (None, 'shard %s test output was missing or invalid' % index)
138
139
140def merge_list_of_dicts(left, right):
141  """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
142  output = []
143  for i in range(max(len(left), len(right))):
144    left_dict = left[i] if i < len(left) else {}
145    right_dict = right[i] if i < len(right) else {}
146    merged_dict = left_dict.copy()
147    merged_dict.update(right_dict)
148    output.append(merged_dict)
149  return output
150
151
152def standard_gtest_merge(
153    output_json, summary_json, jsons_to_merge):
154
155  output = merge_shard_results(summary_json, jsons_to_merge)
156  with open(output_json, 'w') as f:
157    json.dump(output, f)
158
159  return 0
160
161
162def main(raw_args):
163  parser = argparse.ArgumentParser()
164  parser.add_argument('--summary-json')
165  parser.add_argument('-o', '--output-json', required=True)
166  parser.add_argument('jsons_to_merge', nargs='*')
167
168  args = parser.parse_args(raw_args)
169
170  return standard_gtest_merge(
171      args.output_json, args.summary_json, args.jsons_to_merge)
172
173
174if __name__ == '__main__':
175  sys.exit(main(sys.argv[1:]))
176