• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2018 the V8 project authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5import json
6import os
7import sys
8import time
9
10from . import base
11from ..local import junit_output
12
13
14def print_failure_header(test):
15  if test.output_proc.negative:
16    negative_marker = '[negative] '
17  else:
18    negative_marker = ''
19  print "=== %(label)s %(negative)s===" % {
20    'label': test,
21    'negative': negative_marker,
22  }
23
24
25class TestsCounter(base.TestProcObserver):
26  def __init__(self):
27    super(TestsCounter, self).__init__()
28    self.total = 0
29
30  def _on_next_test(self, test):
31    self.total += 1
32
33
34class ResultsTracker(base.TestProcObserver):
35  def __init__(self):
36    super(ResultsTracker, self).__init__()
37    self._requirement = base.DROP_OUTPUT
38
39    self.failed = 0
40    self.remaining = 0
41    self.total = 0
42
43  def _on_next_test(self, test):
44    self.total += 1
45    self.remaining += 1
46
47  def _on_result_for(self, test, result):
48    self.remaining -= 1
49    if result.has_unexpected_output:
50      self.failed += 1
51
52
53class ProgressIndicator(base.TestProcObserver):
54  def finished(self):
55    pass
56
57
58class SimpleProgressIndicator(ProgressIndicator):
59  def __init__(self):
60    super(SimpleProgressIndicator, self).__init__()
61    self._requirement = base.DROP_PASS_OUTPUT
62
63    self._failed = []
64    self._total = 0
65
66  def _on_next_test(self, test):
67    self._total += 1
68
69  def _on_result_for(self, test, result):
70    # TODO(majeski): Support for dummy/grouped results
71    if result.has_unexpected_output:
72      self._failed.append((test, result))
73
74  def finished(self):
75    crashed = 0
76    print
77    for test, result in self._failed:
78      print_failure_header(test)
79      if result.output.stderr:
80        print "--- stderr ---"
81        print result.output.stderr.strip()
82      if result.output.stdout:
83        print "--- stdout ---"
84        print result.output.stdout.strip()
85      print "Command: %s" % result.cmd.to_string()
86      if result.output.HasCrashed():
87        print "exit code: %d" % result.output.exit_code
88        print "--- CRASHED ---"
89        crashed += 1
90      if result.output.HasTimedOut():
91        print "--- TIMEOUT ---"
92    if len(self._failed) == 0:
93      print "==="
94      print "=== All tests succeeded"
95      print "==="
96    else:
97      print
98      print "==="
99      print "=== %i tests failed" % len(self._failed)
100      if crashed > 0:
101        print "=== %i tests CRASHED" % crashed
102      print "==="
103
104
105class VerboseProgressIndicator(SimpleProgressIndicator):
106  def __init__(self):
107    super(VerboseProgressIndicator, self).__init__()
108    self._last_printed_time = time.time()
109
110  def _print(self, text):
111    print text
112    sys.stdout.flush()
113    self._last_printed_time = time.time()
114
115  def _on_result_for(self, test, result):
116    super(VerboseProgressIndicator, self)._on_result_for(test, result)
117    # TODO(majeski): Support for dummy/grouped results
118    if result.has_unexpected_output:
119      if result.output.HasCrashed():
120        outcome = 'CRASH'
121      else:
122        outcome = 'FAIL'
123    else:
124      outcome = 'pass'
125    self._print('Done running %s: %s' % (test, outcome))
126
127  def _on_heartbeat(self):
128    if time.time() - self._last_printed_time > 30:
129      # Print something every 30 seconds to not get killed by an output
130      # timeout.
131      self._print('Still working...')
132
133
134class DotsProgressIndicator(SimpleProgressIndicator):
135  def __init__(self):
136    super(DotsProgressIndicator, self).__init__()
137    self._count = 0
138
139  def _on_result_for(self, test, result):
140    # TODO(majeski): Support for dummy/grouped results
141    self._count += 1
142    if self._count > 1 and self._count % 50 == 1:
143      sys.stdout.write('\n')
144    if result.has_unexpected_output:
145      if result.output.HasCrashed():
146        sys.stdout.write('C')
147        sys.stdout.flush()
148      elif result.output.HasTimedOut():
149        sys.stdout.write('T')
150        sys.stdout.flush()
151      else:
152        sys.stdout.write('F')
153        sys.stdout.flush()
154    else:
155      sys.stdout.write('.')
156      sys.stdout.flush()
157
158
159class CompactProgressIndicator(ProgressIndicator):
160  def __init__(self, templates):
161    super(CompactProgressIndicator, self).__init__()
162    self._requirement = base.DROP_PASS_OUTPUT
163
164    self._templates = templates
165    self._last_status_length = 0
166    self._start_time = time.time()
167
168    self._total = 0
169    self._passed = 0
170    self._failed = 0
171
172  def _on_next_test(self, test):
173    self._total += 1
174
175  def _on_result_for(self, test, result):
176    # TODO(majeski): Support for dummy/grouped results
177    if result.has_unexpected_output:
178      self._failed += 1
179    else:
180      self._passed += 1
181
182    self._print_progress(str(test))
183    if result.has_unexpected_output:
184      output = result.output
185      stdout = output.stdout.strip()
186      stderr = output.stderr.strip()
187
188      self._clear_line(self._last_status_length)
189      print_failure_header(test)
190      if len(stdout):
191        print self._templates['stdout'] % stdout
192      if len(stderr):
193        print self._templates['stderr'] % stderr
194      print "Command: %s" % result.cmd
195      if output.HasCrashed():
196        print "exit code: %d" % output.exit_code
197        print "--- CRASHED ---"
198      if output.HasTimedOut():
199        print "--- TIMEOUT ---"
200
201  def finished(self):
202    self._print_progress('Done')
203    print
204
205  def _print_progress(self, name):
206    self._clear_line(self._last_status_length)
207    elapsed = time.time() - self._start_time
208    if not self._total:
209      progress = 0
210    else:
211      progress = (self._passed + self._failed) * 100 // self._total
212    status = self._templates['status_line'] % {
213      'passed': self._passed,
214      'progress': progress,
215      'failed': self._failed,
216      'test': name,
217      'mins': int(elapsed) / 60,
218      'secs': int(elapsed) % 60
219    }
220    status = self._truncate(status, 78)
221    self._last_status_length = len(status)
222    print status,
223    sys.stdout.flush()
224
225  def _truncate(self, string, length):
226    if length and len(string) > (length - 3):
227      return string[:(length - 3)] + "..."
228    else:
229      return string
230
231  def _clear_line(self, last_length):
232    raise NotImplementedError()
233
234
235class ColorProgressIndicator(CompactProgressIndicator):
236  def __init__(self):
237    templates = {
238      'status_line': ("[%(mins)02i:%(secs)02i|"
239                      "\033[34m%%%(progress) 4d\033[0m|"
240                      "\033[32m+%(passed) 4d\033[0m|"
241                      "\033[31m-%(failed) 4d\033[0m]: %(test)s"),
242      'stdout': "\033[1m%s\033[0m",
243      'stderr': "\033[31m%s\033[0m",
244    }
245    super(ColorProgressIndicator, self).__init__(templates)
246
247  def _clear_line(self, last_length):
248    print "\033[1K\r",
249
250
251class MonochromeProgressIndicator(CompactProgressIndicator):
252  def __init__(self):
253    templates = {
254      'status_line': ("[%(mins)02i:%(secs)02i|%%%(progress) 4d|"
255                      "+%(passed) 4d|-%(failed) 4d]: %(test)s"),
256      'stdout': '%s',
257      'stderr': '%s',
258    }
259    super(MonochromeProgressIndicator, self).__init__(templates)
260
261  def _clear_line(self, last_length):
262    print ("\r" + (" " * last_length) + "\r"),
263
264
265class JUnitTestProgressIndicator(ProgressIndicator):
266  def __init__(self, junitout, junittestsuite):
267    super(JUnitTestProgressIndicator, self).__init__()
268    self._requirement = base.DROP_PASS_STDOUT
269
270    self.outputter = junit_output.JUnitTestOutput(junittestsuite)
271    if junitout:
272      self.outfile = open(junitout, "w")
273    else:
274      self.outfile = sys.stdout
275
276  def _on_result_for(self, test, result):
277    # TODO(majeski): Support for dummy/grouped results
278    fail_text = ""
279    output = result.output
280    if result.has_unexpected_output:
281      stdout = output.stdout.strip()
282      if len(stdout):
283        fail_text += "stdout:\n%s\n" % stdout
284      stderr = output.stderr.strip()
285      if len(stderr):
286        fail_text += "stderr:\n%s\n" % stderr
287      fail_text += "Command: %s" % result.cmd.to_string()
288      if output.HasCrashed():
289        fail_text += "exit code: %d\n--- CRASHED ---" % output.exit_code
290      if output.HasTimedOut():
291        fail_text += "--- TIMEOUT ---"
292    self.outputter.HasRunTest(
293        test_name=str(test),
294        test_cmd=result.cmd.to_string(relative=True),
295        test_duration=output.duration,
296        test_failure=fail_text)
297
298  def finished(self):
299    self.outputter.FinishAndWrite(self.outfile)
300    if self.outfile != sys.stdout:
301      self.outfile.close()
302
303
304class JsonTestProgressIndicator(ProgressIndicator):
305  def __init__(self, json_test_results, arch, mode):
306    super(JsonTestProgressIndicator, self).__init__()
307    # We want to drop stdout/err for all passed tests on the first try, but we
308    # need to get outputs for all runs after the first one. To accommodate that,
309    # reruns are set to keep the result no matter what requirement says, i.e.
310    # keep_output set to True in the RerunProc.
311    self._requirement = base.DROP_PASS_STDOUT
312
313    self.json_test_results = json_test_results
314    self.arch = arch
315    self.mode = mode
316    self.results = []
317    self.tests = []
318
319  def _on_result_for(self, test, result):
320    if result.is_rerun:
321      self.process_results(test, result.results)
322    else:
323      self.process_results(test, [result])
324
325  def process_results(self, test, results):
326    for run, result in enumerate(results):
327      # TODO(majeski): Support for dummy/grouped results
328      output = result.output
329      # Buffer all tests for sorting the durations in the end.
330      # TODO(machenbach): Running average + buffer only slowest 20 tests.
331      self.tests.append((test, output.duration, result.cmd))
332
333      # Omit tests that run as expected on the first try.
334      # Everything that happens after the first run is included in the output
335      # even if it flakily passes.
336      if not result.has_unexpected_output and run == 0:
337        continue
338
339      self.results.append({
340        "name": str(test),
341        "flags": result.cmd.args,
342        "command": result.cmd.to_string(relative=True),
343        "run": run + 1,
344        "stdout": output.stdout,
345        "stderr": output.stderr,
346        "exit_code": output.exit_code,
347        "result": test.output_proc.get_outcome(output),
348        "expected": test.expected_outcomes,
349        "duration": output.duration,
350        "random_seed": test.random_seed,
351        "target_name": test.get_shell(),
352        "variant": test.variant,
353      })
354
355  def finished(self):
356    complete_results = []
357    if os.path.exists(self.json_test_results):
358      with open(self.json_test_results, "r") as f:
359        # Buildbot might start out with an empty file.
360        complete_results = json.loads(f.read() or "[]")
361
362    duration_mean = None
363    if self.tests:
364      # Get duration mean.
365      duration_mean = (
366          sum(duration for (_, duration, cmd) in self.tests) /
367          float(len(self.tests)))
368
369    # Sort tests by duration.
370    self.tests.sort(key=lambda (_, duration, cmd): duration, reverse=True)
371    slowest_tests = [
372      {
373        "name": str(test),
374        "flags": cmd.args,
375        "command": cmd.to_string(relative=True),
376        "duration": duration,
377        "marked_slow": test.is_slow,
378      } for (test, duration, cmd) in self.tests[:20]
379    ]
380
381    complete_results.append({
382      "arch": self.arch,
383      "mode": self.mode,
384      "results": self.results,
385      "slowest_tests": slowest_tests,
386      "duration_mean": duration_mean,
387      "test_total": len(self.tests),
388    })
389
390    with open(self.json_test_results, "w") as f:
391      f.write(json.dumps(complete_results))
392