• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright 2017 The Chromium Authors
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6import json
7import os
8import sys
9
10import merge_api
11
12MISSING_SHARDS_MSG = r"""Missing results from the following shard(s): %s
13
14This can happen in following cases:
15  * Test failed to start (missing *.dll/*.so dependency for example)
16  * Test crashed or hung
17  * Task expired because there are not enough bots available and are all used
18  * Swarming service experienced problems
19
20Please examine logs to figure out what happened.
21"""
22
23
24def emit_warning(title, log=None):
25  print('@@@STEP_WARNINGS@@@')
26  print(title)
27  if log:
28    title = title.rstrip()
29    for line in log.splitlines():
30      print('@@@STEP_LOG_LINE@%s@%s@@@' % (title, line.rstrip()))
31    print('@@@STEP_LOG_END@%s@@@' % title)
32
33
34def merge_shard_results(summary_json, jsons_to_merge):
35  """Reads JSON test output from all shards and combines them into one.
36
37  Returns dict with merged test output on success or None on failure. Emits
38  annotations.
39  """
40  # summary.json is produced by swarming client itself. We are mostly interested
41  # in the number of shards.
42  try:
43    with open(summary_json, encoding='utf-8') as f:
44      summary = json.load(f)
45  except (IOError, ValueError):
46    emit_warning(
47        'summary.json is missing or can not be read',
48        'Something is seriously wrong with swarming client or the bot.')
49    return None
50
51  # Merge all JSON files together. Keep track of missing shards.
52  merged = {
53      'all_tests': set(),
54      'disabled_tests': set(),
55      'global_tags': set(),
56      'missing_shards': [],
57      'per_iteration_data': [],
58      'swarming_summary': summary,
59      'test_locations': {},
60  }
61  for index, result in enumerate(summary['shards']):
62    if result is None:
63      merged['missing_shards'].append(index)
64      continue
65
66    # Author note: this code path doesn't trigger convert_to_old_format() in
67    # client/swarming.py, which means the state enum is saved in its string
68    # name form, not in the number form.
69    state = result.get('state')
70    if state == u'BOT_DIED':
71      emit_warning('Shard #%d had a Swarming internal failure' % index)
72    elif state == u'EXPIRED':
73      emit_warning("There wasn't enough capacity to run your test")
74    elif state == u'TIMED_OUT':
75      emit_warning(
76          'Test runtime exceeded allocated time',
77          "Either it ran for too long (hard timeout) or it didn't produce "
78          'I/O for an extended period of time (I/O timeout)')
79    elif state != u'COMPLETED':
80      emit_warning('Invalid Swarming task state: %s' % state)
81
82    json_data, err_msg = load_shard_json(index, result.get('task_id'),
83                                         jsons_to_merge)
84    if json_data:
85      # Set-like fields.
86      for key in ('all_tests', 'disabled_tests', 'global_tags'):
87        merged[key].update(json_data.get(key, []))
88
89      # Dict-like fields.
90      for key in ('test_locations', ):
91        merged[key].update(json_data.get(key, {}))
92
93      # 'per_iteration_data' is a list of dicts. Dicts should be merged
94      # together, not the 'per_iteration_data' list itself.
95      merged['per_iteration_data'] = merge_list_of_dicts(
96          merged['per_iteration_data'], json_data.get('per_iteration_data', []))
97    else:
98      merged['missing_shards'].append(index)
99      emit_warning('No result was found: %s' % err_msg)
100
101  # If some shards are missing, make it known. Continue parsing anyway. Step
102  # should be red anyway, since swarming.py return non-zero exit code in that
103  # case.
104  if merged['missing_shards']:
105    as_str = ', '.join(map(str, merged['missing_shards']))
106    emit_warning('some shards did not complete: %s' % as_str,
107                 MISSING_SHARDS_MSG % as_str)
108    # Not all tests run, combined JSON summary can not be trusted.
109    merged['global_tags'].add('UNRELIABLE_RESULTS')
110
111  # Convert to jsonish dict.
112  for key in ('all_tests', 'disabled_tests', 'global_tags'):
113    merged[key] = sorted(merged[key])
114  return merged
115
116
117OUTPUT_JSON_SIZE_LIMIT = 100 * 1024 * 1024  # 100 MB
118
119
120def load_shard_json(index, task_id, jsons_to_merge):
121  """Reads JSON output of the specified shard.
122
123  Args:
124    output_dir: The directory in which to look for the JSON output to load.
125    index: The index of the shard to load data for, this is for old api.
126    task_id: The directory of the shard to load data for, this is for new api.
127
128  Returns: A tuple containing:
129    * The contents of path, deserialized into a python object.
130    * An error string.
131    (exactly one of the tuple elements will be non-None).
132  """
133  # 'output.json' is set in swarming/api.py, gtest_task method.
134  matching_json_files = [
135      j for j in jsons_to_merge if (os.path.basename(j) == 'output.json' and (
136          os.path.basename(os.path.dirname(j)) == str(index)
137          or os.path.basename(os.path.dirname(j)) == task_id))
138  ]
139
140  if not matching_json_files:
141    print('shard %s test output missing' % index, file=sys.stderr)
142    return (None, 'shard %s test output was missing' % index)
143  if len(matching_json_files) > 1:
144    print('duplicate test output for shard %s' % index, file=sys.stderr)
145    return (None, 'shard %s test output was duplicated' % index)
146
147  path = matching_json_files[0]
148
149  try:
150    filesize = os.stat(path).st_size
151    if filesize > OUTPUT_JSON_SIZE_LIMIT:
152      print('output.json is %d bytes. Max size is %d' %
153            (filesize, OUTPUT_JSON_SIZE_LIMIT),
154            file=sys.stderr)
155      return (None, 'shard %s test output exceeded the size limit' % index)
156
157    with open(path, encoding='utf-8') as f:
158      return (json.load(f), None)
159  except (IOError, ValueError, OSError) as e:
160    print('Missing or invalid gtest JSON file: %s' % path, file=sys.stderr)
161    print('%s: %s' % (type(e).__name__, e), file=sys.stderr)
162
163    return (None, 'shard %s test output was missing or invalid' % index)
164
165
166def merge_list_of_dicts(left, right):
167  """Merges dicts left[0] with right[0], left[1] with right[1], etc."""
168  output = []
169  for i in range(max(len(left), len(right))):
170    left_dict = left[i] if i < len(left) else {}
171    right_dict = right[i] if i < len(right) else {}
172    merged_dict = left_dict.copy()
173    merged_dict.update(right_dict)
174    output.append(merged_dict)
175  return output
176
177
178def standard_gtest_merge(output_json, summary_json, jsons_to_merge):
179
180  output = merge_shard_results(summary_json, jsons_to_merge)
181  with open(output_json, 'w', encoding='utf-8') as f:
182    json.dump(output, f)
183
184  return 0
185
186
187def main(raw_args):
188
189  parser = merge_api.ArgumentParser()
190  args = parser.parse_args(raw_args)
191
192  return standard_gtest_merge(args.output_json, args.summary_json,
193                              args.jsons_to_merge)
194
195
196if __name__ == '__main__':
197  sys.exit(main(sys.argv[1:]))
198