1 // Copyright 2013 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifdef UNSAFE_BUFFERS_BUILD
6 // TODO(crbug.com/40284755): Remove this and spanify to fix the errors.
7 #pragma allow_unsafe_buffers
8 #endif
9
10 #include "base/test/launcher/test_results_tracker.h"
11
12 #include <stddef.h>
13
14 #include <memory>
15 #include <utility>
16
17 #include "base/base64.h"
18 #include "base/check.h"
19 #include "base/command_line.h"
20 #include "base/containers/span.h"
21 #include "base/files/file.h"
22 #include "base/files/file_path.h"
23 #include "base/files/file_util.h"
24 #include "base/format_macros.h"
25 #include "base/i18n/time_formatting.h"
26 #include "base/json/json_writer.h"
27 #include "base/json/string_escape.h"
28 #include "base/logging.h"
29 #include "base/strings/strcat.h"
30 #include "base/strings/string_util.h"
31 #include "base/strings/stringprintf.h"
32 #include "base/test/gtest_util.h"
33 #include "base/test/launcher/test_launcher.h"
34 #include "base/test/test_switches.h"
35 #include "base/time/time.h"
36 #include "base/values.h"
37 #include "build/build_config.h"
38 #include "third_party/icu/source/i18n/unicode/timezone.h"
39
40 namespace base {
41
42 namespace {
43
44 // The default output file for XML output.
45 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
46 "test_detail.xml");
47
48 // Converts the given epoch time in milliseconds to a date string in the ISO
49 // 8601 format, without the timezone information.
50 // TODO(pkasting): Consider using `TimeFormatAsIso8601()`, possibly modified.
FormatTimeAsIso8601(Time time)51 std::string FormatTimeAsIso8601(Time time) {
52 return base::UnlocalizedTimeFormatWithPattern(time, "yyyy-MM-dd'T'HH:mm:ss",
53 icu::TimeZone::getGMT());
54 }
55
56 struct TestSuiteResultsAggregator {
TestSuiteResultsAggregatorbase::__anona92547950111::TestSuiteResultsAggregator57 TestSuiteResultsAggregator()
58 : tests(0), failures(0), disabled(0), errors(0) {}
59
Addbase::__anona92547950111::TestSuiteResultsAggregator60 void Add(const TestResult& result) {
61 tests++;
62 elapsed_time += result.elapsed_time;
63
64 switch (result.status) {
65 case TestResult::TEST_SUCCESS:
66 break;
67 case TestResult::TEST_FAILURE:
68 failures++;
69 break;
70 case TestResult::TEST_EXCESSIVE_OUTPUT:
71 case TestResult::TEST_FAILURE_ON_EXIT:
72 case TestResult::TEST_TIMEOUT:
73 case TestResult::TEST_CRASH:
74 case TestResult::TEST_UNKNOWN:
75 case TestResult::TEST_NOT_RUN:
76 errors++;
77 break;
78 case TestResult::TEST_SKIPPED:
79 disabled++;
80 break;
81 }
82 }
83
84 int tests;
85 int failures;
86 int disabled;
87 int errors;
88
89 TimeDelta elapsed_time;
90 };
91
92 } // namespace
93
TestResultsTracker()94 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(nullptr) {}
95
~TestResultsTracker()96 TestResultsTracker::~TestResultsTracker() {
97 CHECK(thread_checker_.CalledOnValidThread());
98
99 if (!out_)
100 return;
101
102 CHECK_GE(iteration_, 0);
103
104 // Maps test case names to test results.
105 typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
106 TestCaseMap test_case_map;
107
108 TestSuiteResultsAggregator all_tests_aggregator;
109 for (const PerIterationData::ResultsMap::value_type& i
110 : per_iteration_data_[iteration_].results) {
111 // Use the last test result as the final one.
112 TestResult result = i.second.test_results.back();
113 test_case_map[result.GetTestCaseName()].push_back(result);
114 all_tests_aggregator.Add(result);
115 }
116
117 fprintf(out_.get(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
118 fprintf(out_.get(),
119 "<testsuites name=\"AllTests\" tests=\"%d\" failures=\"%d\""
120 " disabled=\"%d\" errors=\"%d\" time=\"%.3f\" timestamp=\"%s\">\n",
121 all_tests_aggregator.tests, all_tests_aggregator.failures,
122 all_tests_aggregator.disabled, all_tests_aggregator.errors,
123 all_tests_aggregator.elapsed_time.InSecondsF(),
124 FormatTimeAsIso8601(Time::Now()).c_str());
125
126 for (const TestCaseMap::value_type& i : test_case_map) {
127 const std::string testsuite_name = i.first;
128 const std::vector<TestResult>& results = i.second;
129
130 TestSuiteResultsAggregator aggregator;
131 for (const TestResult& result : results) {
132 aggregator.Add(result);
133 }
134 fprintf(out_.get(),
135 " <testsuite name=\"%s\" tests=\"%d\" "
136 "failures=\"%d\" disabled=\"%d\" errors=\"%d\" time=\"%.3f\" "
137 "timestamp=\"%s\">\n",
138 testsuite_name.c_str(), aggregator.tests, aggregator.failures,
139 aggregator.disabled, aggregator.errors,
140 aggregator.elapsed_time.InSecondsF(),
141 FormatTimeAsIso8601(Time::Now()).c_str());
142
143 for (const TestResult& result : results) {
144 fprintf(out_.get(),
145 " <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
146 "%s classname=\"%s\">\n",
147 result.GetTestName().c_str(), result.elapsed_time.InSecondsF(),
148 (result.timestamp
149 ? StrCat({" timestamp=\"",
150 FormatTimeAsIso8601(*result.timestamp), "\""})
151 .c_str()
152 : ""),
153 result.GetTestCaseName().c_str());
154 if (result.status != TestResult::TEST_SUCCESS) {
155 // The actual failure message is not propagated up to here, as it's too
156 // much work to escape it properly, and in case of failure, almost
157 // always one needs to look into full log anyway.
158 fprintf(out_.get(),
159 " <failure message=\"\" type=\"\"></failure>\n");
160 }
161 fprintf(out_.get(), " </testcase>\n");
162 }
163 fprintf(out_.get(), " </testsuite>\n");
164 }
165
166 fprintf(out_.get(), "</testsuites>\n");
167 fclose(out_);
168 }
169
Init(const CommandLine & command_line)170 bool TestResultsTracker::Init(const CommandLine& command_line) {
171 CHECK(thread_checker_.CalledOnValidThread());
172
173 // Prevent initializing twice.
174 CHECK(!out_);
175
176 print_temp_leaks_ =
177 command_line.HasSwitch(switches::kTestLauncherPrintTempLeaks);
178
179 if (!command_line.HasSwitch(kGTestOutputFlag))
180 return true;
181
182 std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
183 size_t colon_pos = flag.find(':');
184 FilePath path;
185 if (colon_pos != std::string::npos) {
186 FilePath flag_path =
187 command_line.GetSwitchValuePath(kGTestOutputFlag);
188 FilePath::StringType path_string = flag_path.value();
189 path = FilePath(path_string.substr(colon_pos + 1));
190 // If the given path ends with '/', consider it is a directory.
191 // Note: This does NOT check that a directory (or file) actually exists
192 // (the behavior is same as what gtest does).
193 if (path.EndsWithSeparator()) {
194 FilePath executable = command_line.GetProgram().BaseName();
195 path = path.Append(executable.ReplaceExtension(
196 FilePath::StringType(FILE_PATH_LITERAL("xml"))));
197 }
198 }
199 if (path.value().empty())
200 path = FilePath(kDefaultOutputFile);
201 FilePath dir_name = path.DirName();
202 if (!DirectoryExists(dir_name)) {
203 LOG(WARNING) << "The output directory does not exist. "
204 << "Creating the directory: " << dir_name.value();
205 // Create the directory if necessary (because the gtest does the same).
206 if (!CreateDirectory(dir_name)) {
207 LOG(ERROR) << "Failed to created directory " << dir_name.value();
208 return false;
209 }
210 }
211 out_ = OpenFile(path, "w");
212 if (!out_) {
213 LOG(ERROR) << "Cannot open output file: "
214 << path.value() << ".";
215 return false;
216 }
217
218 return true;
219 }
220
OnTestIterationStarting()221 void TestResultsTracker::OnTestIterationStarting() {
222 CHECK(thread_checker_.CalledOnValidThread());
223
224 // Start with a fresh state for new iteration.
225 iteration_++;
226 per_iteration_data_.push_back(PerIterationData());
227 }
228
AddTest(const std::string & test_name)229 void TestResultsTracker::AddTest(const std::string& test_name) {
230 // Record disabled test names without DISABLED_ prefix so that they are easy
231 // to compare with regular test names, e.g. before or after disabling.
232 all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
233 }
234
AddDisabledTest(const std::string & test_name)235 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
236 // Record disabled test names without DISABLED_ prefix so that they are easy
237 // to compare with regular test names, e.g. before or after disabling.
238 disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
239 }
240
AddTestLocation(const std::string & test_name,const std::string & file,int line)241 void TestResultsTracker::AddTestLocation(const std::string& test_name,
242 const std::string& file,
243 int line) {
244 test_locations_.insert(std::make_pair(
245 TestNameWithoutDisabledPrefix(test_name), CodeLocation(file, line)));
246 }
247
AddTestPlaceholder(const std::string & test_name)248 void TestResultsTracker::AddTestPlaceholder(const std::string& test_name) {
249 test_placeholders_.insert(test_name);
250 }
251
AddTestResult(const TestResult & result)252 void TestResultsTracker::AddTestResult(const TestResult& result) {
253 CHECK(thread_checker_.CalledOnValidThread());
254 CHECK_GE(iteration_, 0);
255
256 PerIterationData::ResultsMap& results_map =
257 per_iteration_data_[iteration_].results;
258 std::string test_name_without_disabled_prefix =
259 TestNameWithoutDisabledPrefix(result.full_name);
260 auto it = results_map.find(test_name_without_disabled_prefix);
261
262 // Record disabled test names without DISABLED_ prefix so that they are easy
263 // to compare with regular test names, e.g. before or after disabling.
264 AggregateTestResult& aggregate_test_result = it->second;
265
266 // If the current test_result is a PRE test and it failed, insert its result
267 // in the corresponding non-PRE test's place.
268 std::string test_name_without_pre_prefix(test_name_without_disabled_prefix);
269 ReplaceSubstringsAfterOffset(&test_name_without_pre_prefix, 0, "PRE_", "");
270 if (test_name_without_pre_prefix != test_name_without_disabled_prefix) {
271 if (result.status != TestResult::TEST_SUCCESS) {
272 it = results_map.find(test_name_without_pre_prefix);
273 if (!it->second.test_results.empty() &&
274 it->second.test_results.back().status == TestResult::TEST_NOT_RUN) {
275 // Also need to remove the non-PRE test's placeholder.
276 it->second.test_results.pop_back();
277 }
278 it->second.test_results.push_back(result);
279 }
280 // We quit early here and let the non-PRE test detect this result and
281 // modify its result appropriately.
282 return;
283 }
284
285 // If the last test result is a placeholder, then get rid of it now that we
286 // have real results.
287 if (!aggregate_test_result.test_results.empty() &&
288 aggregate_test_result.test_results.back().status ==
289 TestResult::TEST_NOT_RUN) {
290 aggregate_test_result.test_results.pop_back();
291 }
292
293 TestResult result_to_add = result;
294 result_to_add.full_name = test_name_without_disabled_prefix;
295 if (!aggregate_test_result.test_results.empty()) {
296 TestResult prev_result = aggregate_test_result.test_results.back();
297 if (prev_result.full_name != test_name_without_disabled_prefix) {
298 // Some other test's result is in our place! It must be our failed PRE
299 // test. Modify our own result if it failed and we succeeded so we don't
300 // end up silently swallowing PRE-only failures.
301 std::string prev_result_name(prev_result.full_name);
302 ReplaceSubstringsAfterOffset(&prev_result_name, 0, "PRE_", "");
303 CHECK_EQ(prev_result_name, test_name_without_disabled_prefix);
304
305 if (result.status == TestResult::TEST_SUCCESS) {
306 TestResult modified_result(prev_result);
307 modified_result.full_name = test_name_without_disabled_prefix;
308 result_to_add = modified_result;
309 }
310 aggregate_test_result.test_results.pop_back();
311 }
312 }
313 aggregate_test_result.test_results.push_back(result_to_add);
314 }
315
AddLeakedItems(int count,const std::vector<std::string> & test_names)316 void TestResultsTracker::AddLeakedItems(
317 int count,
318 const std::vector<std::string>& test_names) {
319 DCHECK(count);
320 per_iteration_data_.back().leaked_temp_items.emplace_back(count, test_names);
321 }
322
GeneratePlaceholderIteration()323 void TestResultsTracker::GeneratePlaceholderIteration() {
324 CHECK(thread_checker_.CalledOnValidThread());
325
326 for (auto& full_test_name : test_placeholders_) {
327 std::string test_name = TestNameWithoutDisabledPrefix(full_test_name);
328
329 TestResult test_result;
330 test_result.full_name = test_name;
331 test_result.status = TestResult::TEST_NOT_RUN;
332
333 // There shouldn't be any existing results when we generate placeholder
334 // results.
335 CHECK(
336 per_iteration_data_[iteration_].results[test_name].test_results.empty())
337 << test_name;
338 per_iteration_data_[iteration_].results[test_name].test_results.push_back(
339 test_result);
340 }
341 }
342
PrintSummaryOfCurrentIteration() const343 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
344 TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
345
346 PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
347 tests_by_status[TestResult::TEST_FAILURE].end(),
348 "failed");
349 PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
350 tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
351 "failed on exit");
352 PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
353 tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
354 "produced excessive output");
355 PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
356 tests_by_status[TestResult::TEST_TIMEOUT].end(),
357 "timed out");
358 PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
359 tests_by_status[TestResult::TEST_CRASH].end(),
360 "crashed");
361 PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
362 tests_by_status[TestResult::TEST_SKIPPED].end(),
363 "skipped");
364 PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
365 tests_by_status[TestResult::TEST_UNKNOWN].end(),
366 "had unknown result");
367 PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
368 tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
369
370 if (print_temp_leaks_) {
371 for (const auto& leaking_tests :
372 per_iteration_data_.back().leaked_temp_items) {
373 PrintLeaks(leaking_tests.first, leaking_tests.second);
374 }
375 }
376 }
377
PrintSummaryOfAllIterations() const378 void TestResultsTracker::PrintSummaryOfAllIterations() const {
379 CHECK(thread_checker_.CalledOnValidThread());
380
381 TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
382
383 fprintf(stdout, "Summary of all test iterations:\n");
384 fflush(stdout);
385
386 PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
387 tests_by_status[TestResult::TEST_FAILURE].end(),
388 "failed");
389 PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
390 tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
391 "failed on exit");
392 PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
393 tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
394 "produced excessive output");
395 PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
396 tests_by_status[TestResult::TEST_TIMEOUT].end(),
397 "timed out");
398 PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
399 tests_by_status[TestResult::TEST_CRASH].end(),
400 "crashed");
401 PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
402 tests_by_status[TestResult::TEST_SKIPPED].end(),
403 "skipped");
404 PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
405 tests_by_status[TestResult::TEST_UNKNOWN].end(),
406 "had unknown result");
407 PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
408 tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
409
410 fprintf(stdout, "End of the summary.\n");
411 fflush(stdout);
412 }
413
AddGlobalTag(const std::string & tag)414 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
415 global_tags_.insert(tag);
416 }
417
SaveSummaryAsJSON(const FilePath & path,const std::vector<std::string> & additional_tags) const418 bool TestResultsTracker::SaveSummaryAsJSON(
419 const FilePath& path,
420 const std::vector<std::string>& additional_tags) const {
421 Value::Dict summary_root;
422
423 Value::List global_tags;
424 for (const auto& global_tag : global_tags_) {
425 global_tags.Append(global_tag);
426 }
427 for (const auto& tag : additional_tags) {
428 global_tags.Append(tag);
429 }
430 summary_root.Set("global_tags", std::move(global_tags));
431
432 Value::List all_tests;
433 for (const auto& test : all_tests_) {
434 all_tests.Append(test);
435 }
436 summary_root.Set("all_tests", std::move(all_tests));
437
438 Value::List disabled_tests;
439 for (const auto& disabled_test : disabled_tests_) {
440 disabled_tests.Append(disabled_test);
441 }
442 summary_root.Set("disabled_tests", std::move(disabled_tests));
443
444 Value::List per_iteration_data;
445
446 // Even if we haven't run any tests, we still have the dummy iteration.
447 int max_iteration = iteration_ < 0 ? 0 : iteration_;
448
449 for (int i = 0; i <= max_iteration; i++) {
450 Value::Dict current_iteration_data;
451
452 for (const auto& j : per_iteration_data_[i].results) {
453 Value::List test_results;
454
455 for (size_t k = 0; k < j.second.test_results.size(); k++) {
456 const TestResult& test_result = j.second.test_results[k];
457
458 Value::Dict test_result_value;
459
460 test_result_value.Set("status", test_result.StatusAsString());
461 test_result_value.Set(
462 "elapsed_time_ms",
463 static_cast<int>(test_result.elapsed_time.InMilliseconds()));
464
465 if (test_result.thread_id) {
466 test_result_value.Set("thread_id",
467 static_cast<int>(*test_result.thread_id));
468 }
469 if (test_result.process_num)
470 test_result_value.Set("process_num", *test_result.process_num);
471 if (test_result.timestamp) {
472 // The timestamp is formatted using TimeFormatAsIso8601 instead of
473 // FormatTimeAsIso8601 here for a better accuracy, since the former
474 // method includes fractions of a second.
475 test_result_value.Set(
476 "timestamp", TimeFormatAsIso8601(*test_result.timestamp).c_str());
477 }
478
479 bool lossless_snippet = false;
480 if (IsStringUTF8(test_result.output_snippet)) {
481 test_result_value.Set("output_snippet", test_result.output_snippet);
482 lossless_snippet = true;
483 } else {
484 test_result_value.Set(
485 "output_snippet",
486 "<non-UTF-8 snippet, see output_snippet_base64>");
487 }
488
489 // TODO(phajdan.jr): Fix typo in JSON key (losless -> lossless)
490 // making sure not to break any consumers of this data.
491 test_result_value.Set("losless_snippet", lossless_snippet);
492
493 // Also include the raw version (base64-encoded so that it can be safely
494 // JSON-serialized - there are no guarantees about character encoding
495 // of the snippet). This can be very useful piece of information when
496 // debugging a test failure related to character encoding.
497 std::string base64_output_snippet =
498 base::Base64Encode(test_result.output_snippet);
499 test_result_value.Set("output_snippet_base64", base64_output_snippet);
500 if (!test_result.links.empty()) {
501 Value::Dict links;
502 for (const auto& link : test_result.links) {
503 Value::Dict link_info;
504 link_info.Set("content", link.second);
505 links.SetByDottedPath(link.first, std::move(link_info));
506 }
507 test_result_value.Set("links", std::move(links));
508 }
509 if (!test_result.tags.empty()) {
510 Value::Dict tags;
511 for (const auto& tag : test_result.tags) {
512 Value::List tag_values;
513 for (const auto& tag_value : tag.second) {
514 tag_values.Append(tag_value);
515 }
516 Value::Dict tag_info;
517 tag_info.Set("values", std::move(tag_values));
518 tags.SetByDottedPath(tag.first, std::move(tag_info));
519 }
520 test_result_value.Set("tags", std::move(tags));
521 }
522 if (!test_result.properties.empty()) {
523 Value::Dict properties;
524 for (const auto& property : test_result.properties) {
525 Value::Dict property_info;
526 property_info.Set("value", property.second);
527 properties.SetByDottedPath(property.first,
528 std::move(property_info));
529 }
530 test_result_value.Set("properties", std::move(properties));
531 }
532
533 Value::List test_result_parts;
534 for (const TestResultPart& result_part :
535 test_result.test_result_parts) {
536 Value::Dict result_part_value;
537
538 result_part_value.Set("type", result_part.TypeAsString());
539 result_part_value.Set("file", result_part.file_name);
540 result_part_value.Set("line", result_part.line_number);
541
542 bool lossless_summary = IsStringUTF8(result_part.summary);
543 if (lossless_summary) {
544 result_part_value.Set("summary", result_part.summary);
545 } else {
546 result_part_value.Set("summary",
547 "<non-UTF-8 snippet, see summary_base64>");
548 }
549 result_part_value.Set("lossless_summary", lossless_summary);
550
551 std::string encoded_summary = base::Base64Encode(result_part.summary);
552 result_part_value.Set("summary_base64", encoded_summary);
553
554 bool lossless_message = IsStringUTF8(result_part.message);
555 if (lossless_message) {
556 result_part_value.Set("message", result_part.message);
557 } else {
558 result_part_value.Set("message",
559 "<non-UTF-8 snippet, see message_base64>");
560 }
561 result_part_value.Set("lossless_message", lossless_message);
562
563 std::string encoded_message = base::Base64Encode(result_part.message);
564 result_part_value.Set("message_base64", encoded_message);
565
566 test_result_parts.Append(std::move(result_part_value));
567 }
568 test_result_value.Set("result_parts", std::move(test_result_parts));
569
570 test_results.Append(std::move(test_result_value));
571 }
572
573 current_iteration_data.Set(j.first, std::move(test_results));
574 }
575 per_iteration_data.Append(std::move(current_iteration_data));
576 }
577 summary_root.Set("per_iteration_data", std::move(per_iteration_data));
578
579 Value::Dict test_locations;
580 for (const auto& item : test_locations_) {
581 std::string test_name = item.first;
582 CodeLocation location = item.second;
583 Value::Dict location_value;
584 location_value.Set("file", location.file);
585 location_value.Set("line", location.line);
586 test_locations.Set(test_name, std::move(location_value));
587 }
588 summary_root.Set("test_locations", std::move(test_locations));
589
590 std::string json;
591 if (!JSONWriter::Write(summary_root, &json))
592 return false;
593
594 File output(path, File::FLAG_CREATE_ALWAYS | File::FLAG_WRITE);
595 if (!output.IsValid()) {
596 return false;
597 }
598 if (!output.WriteAtCurrentPosAndCheck(base::as_byte_span(json))) {
599 return false;
600 }
601
602 #if BUILDFLAG(IS_FUCHSIA)
603 // File::Flush() will call fsync(). This is important on Fuchsia to ensure
604 // that the file is written to the disk - the system running under qemu will
605 // shutdown shortly after the test completes. On Fuchsia fsync() times out
606 // after 15 seconds. Apparently this may not be enough in some cases,
607 // particularly when running net_unittests on buildbots, see
608 // https://crbug.com/796318. Try calling fsync() more than once to workaround
609 // this issue.
610 //
611 // TODO(sergeyu): Figure out a better solution.
612 int flush_attempts_left = 4;
613 while (flush_attempts_left-- > 0) {
614 if (output.Flush())
615 return true;
616 LOG(ERROR) << "fsync() failed when saving test output summary. "
617 << ((flush_attempts_left > 0) ? "Retrying." : " Giving up.");
618 }
619
620 return false;
621 #else
622 return true;
623 #endif
624 }
625
626 TestResultsTracker::TestStatusMap
GetTestStatusMapForCurrentIteration() const627 TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
628 TestStatusMap tests_by_status;
629 GetTestStatusForIteration(iteration_, &tests_by_status);
630 return tests_by_status;
631 }
632
633 TestResultsTracker::TestStatusMap
GetTestStatusMapForAllIterations() const634 TestResultsTracker::GetTestStatusMapForAllIterations() const {
635 TestStatusMap tests_by_status;
636 for (int i = 0; i <= iteration_; i++)
637 GetTestStatusForIteration(i, &tests_by_status);
638 return tests_by_status;
639 }
640
GetTestStatusForIteration(int iteration,TestStatusMap * map) const641 void TestResultsTracker::GetTestStatusForIteration(
642 int iteration, TestStatusMap* map) const {
643 for (const auto& j : per_iteration_data_[iteration].results) {
644 // Use the last test result as the final one.
645 const TestResult& result = j.second.test_results.back();
646 (*map)[result.status].insert(result.full_name);
647 }
648 }
649
650 // Utility function to print a list of test names. Uses iterator to be
651 // compatible with different containers, like vector and set.
652 template<typename InputIterator>
PrintTests(InputIterator first,InputIterator last,const std::string & description) const653 void TestResultsTracker::PrintTests(InputIterator first,
654 InputIterator last,
655 const std::string& description) const {
656 size_t count = std::distance(first, last);
657 if (count == 0)
658 return;
659
660 fprintf(stdout,
661 "%" PRIuS " test%s %s:\n",
662 count,
663 count != 1 ? "s" : "",
664 description.c_str());
665 for (InputIterator it = first; it != last; ++it) {
666 const std::string& test_name = *it;
667 const auto location_it = test_locations_.find(test_name);
668 CHECK(location_it != test_locations_.end()) << test_name;
669 const CodeLocation& location = location_it->second;
670 fprintf(stdout, " %s (%s:%d)\n", test_name.c_str(),
671 location.file.c_str(), location.line);
672 }
673 fflush(stdout);
674 }
675
PrintLeaks(int count,const std::vector<std::string> & test_names) const676 void TestResultsTracker::PrintLeaks(
677 int count,
678 const std::vector<std::string>& test_names) const {
679 fprintf(stdout,
680 "ERROR: %d files and/or directories were left behind in the temporary"
681 " directory by one or more of these tests: %s\n",
682 count, JoinString(test_names, ":").c_str());
683 fflush(stdout);
684 }
685
686 TestResultsTracker::AggregateTestResult::AggregateTestResult() = default;
687
688 TestResultsTracker::AggregateTestResult::AggregateTestResult(
689 const AggregateTestResult& other) = default;
690
691 TestResultsTracker::AggregateTestResult::~AggregateTestResult() = default;
692
693 TestResultsTracker::PerIterationData::PerIterationData() = default;
694
695 TestResultsTracker::PerIterationData::PerIterationData(
696 const PerIterationData& other) = default;
697
698 TestResultsTracker::PerIterationData::~PerIterationData() = default;
699
700 } // namespace base
701