1 // Copyright 2013 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/test/launcher/test_results_tracker.h"
6
7 #include <stddef.h>
8
9 #include <memory>
10 #include <utility>
11
12 #include "base/base64.h"
13 #include "base/command_line.h"
14 #include "base/files/file.h"
15 #include "base/files/file_path.h"
16 #include "base/files/file_util.h"
17 #include "base/format_macros.h"
18 #include "base/json/json_writer.h"
19 #include "base/json/string_escape.h"
20 #include "base/logging.h"
21 #include "base/strings/strcat.h"
22 #include "base/strings/string_util.h"
23 #include "base/strings/stringprintf.h"
24 #include "base/test/gtest_util.h"
25 #include "base/test/launcher/test_launcher.h"
26 #include "base/test/test_switches.h"
27 #include "base/time/time.h"
28 #include "base/time/time_to_iso8601.h"
29 #include "base/values.h"
30
31 namespace base {
32
33 namespace {
34
35 // The default output file for XML output.
36 const FilePath::CharType kDefaultOutputFile[] = FILE_PATH_LITERAL(
37 "test_detail.xml");
38
39 // Converts the given epoch time in milliseconds to a date string in the ISO
40 // 8601 format, without the timezone information.
41 // TODO(xyzzyz): Find a good place in Chromium to put it and refactor all uses
42 // to point to it.
FormatTimeAsIso8601(Time time)43 std::string FormatTimeAsIso8601(Time time) {
44 Time::Exploded exploded;
45 time.UTCExplode(&exploded);
46 return StringPrintf("%04d-%02d-%02dT%02d:%02d:%02d",
47 exploded.year,
48 exploded.month,
49 exploded.day_of_month,
50 exploded.hour,
51 exploded.minute,
52 exploded.second);
53 }
54
55 struct TestSuiteResultsAggregator {
TestSuiteResultsAggregatorbase::__anon4162857a0111::TestSuiteResultsAggregator56 TestSuiteResultsAggregator()
57 : tests(0), failures(0), disabled(0), errors(0) {}
58
Addbase::__anon4162857a0111::TestSuiteResultsAggregator59 void Add(const TestResult& result) {
60 tests++;
61 elapsed_time += result.elapsed_time;
62
63 switch (result.status) {
64 case TestResult::TEST_SUCCESS:
65 break;
66 case TestResult::TEST_FAILURE:
67 failures++;
68 break;
69 case TestResult::TEST_EXCESSIVE_OUTPUT:
70 case TestResult::TEST_FAILURE_ON_EXIT:
71 case TestResult::TEST_TIMEOUT:
72 case TestResult::TEST_CRASH:
73 case TestResult::TEST_UNKNOWN:
74 case TestResult::TEST_NOT_RUN:
75 errors++;
76 break;
77 case TestResult::TEST_SKIPPED:
78 disabled++;
79 break;
80 }
81 }
82
83 int tests;
84 int failures;
85 int disabled;
86 int errors;
87
88 TimeDelta elapsed_time;
89 };
90
91 } // namespace
92
TestResultsTracker()93 TestResultsTracker::TestResultsTracker() : iteration_(-1), out_(nullptr) {}
94
~TestResultsTracker()95 TestResultsTracker::~TestResultsTracker() {
96 CHECK(thread_checker_.CalledOnValidThread());
97
98 if (!out_)
99 return;
100
101 CHECK_GE(iteration_, 0);
102
103 // Maps test case names to test results.
104 typedef std::map<std::string, std::vector<TestResult> > TestCaseMap;
105 TestCaseMap test_case_map;
106
107 TestSuiteResultsAggregator all_tests_aggregator;
108 for (const PerIterationData::ResultsMap::value_type& i
109 : per_iteration_data_[iteration_].results) {
110 // Use the last test result as the final one.
111 TestResult result = i.second.test_results.back();
112 test_case_map[result.GetTestCaseName()].push_back(result);
113 all_tests_aggregator.Add(result);
114 }
115
116 fprintf(out_.get(), "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n");
117 fprintf(out_.get(),
118 "<testsuites name=\"AllTests\" tests=\"%d\" failures=\"%d\""
119 " disabled=\"%d\" errors=\"%d\" time=\"%.3f\" timestamp=\"%s\">\n",
120 all_tests_aggregator.tests, all_tests_aggregator.failures,
121 all_tests_aggregator.disabled, all_tests_aggregator.errors,
122 all_tests_aggregator.elapsed_time.InSecondsF(),
123 FormatTimeAsIso8601(Time::Now()).c_str());
124
125 for (const TestCaseMap::value_type& i : test_case_map) {
126 const std::string testsuite_name = i.first;
127 const std::vector<TestResult>& results = i.second;
128
129 TestSuiteResultsAggregator aggregator;
130 for (const TestResult& result : results) {
131 aggregator.Add(result);
132 }
133 fprintf(out_.get(),
134 " <testsuite name=\"%s\" tests=\"%d\" "
135 "failures=\"%d\" disabled=\"%d\" errors=\"%d\" time=\"%.3f\" "
136 "timestamp=\"%s\">\n",
137 testsuite_name.c_str(), aggregator.tests, aggregator.failures,
138 aggregator.disabled, aggregator.errors,
139 aggregator.elapsed_time.InSecondsF(),
140 FormatTimeAsIso8601(Time::Now()).c_str());
141
142 for (const TestResult& result : results) {
143 fprintf(out_.get(),
144 " <testcase name=\"%s\" status=\"run\" time=\"%.3f\""
145 "%s classname=\"%s\">\n",
146 result.GetTestName().c_str(), result.elapsed_time.InSecondsF(),
147 (result.timestamp
148 ? StrCat({" timestamp=\"",
149 FormatTimeAsIso8601(*result.timestamp), "\""})
150 .c_str()
151 : ""),
152 result.GetTestCaseName().c_str());
153 if (result.status != TestResult::TEST_SUCCESS) {
154 // The actual failure message is not propagated up to here, as it's too
155 // much work to escape it properly, and in case of failure, almost
156 // always one needs to look into full log anyway.
157 fprintf(out_.get(),
158 " <failure message=\"\" type=\"\"></failure>\n");
159 }
160 fprintf(out_.get(), " </testcase>\n");
161 }
162 fprintf(out_.get(), " </testsuite>\n");
163 }
164
165 fprintf(out_.get(), "</testsuites>\n");
166 fclose(out_);
167 }
168
Init(const CommandLine & command_line)169 bool TestResultsTracker::Init(const CommandLine& command_line) {
170 CHECK(thread_checker_.CalledOnValidThread());
171
172 // Prevent initializing twice.
173 if (out_) {
174 NOTREACHED();
175 return false;
176 }
177
178 print_temp_leaks_ =
179 command_line.HasSwitch(switches::kTestLauncherPrintTempLeaks);
180
181 if (!command_line.HasSwitch(kGTestOutputFlag))
182 return true;
183
184 std::string flag = command_line.GetSwitchValueASCII(kGTestOutputFlag);
185 size_t colon_pos = flag.find(':');
186 FilePath path;
187 if (colon_pos != std::string::npos) {
188 FilePath flag_path =
189 command_line.GetSwitchValuePath(kGTestOutputFlag);
190 FilePath::StringType path_string = flag_path.value();
191 path = FilePath(path_string.substr(colon_pos + 1));
192 // If the given path ends with '/', consider it is a directory.
193 // Note: This does NOT check that a directory (or file) actually exists
194 // (the behavior is same as what gtest does).
195 if (path.EndsWithSeparator()) {
196 FilePath executable = command_line.GetProgram().BaseName();
197 path = path.Append(executable.ReplaceExtension(
198 FilePath::StringType(FILE_PATH_LITERAL("xml"))));
199 }
200 }
201 if (path.value().empty())
202 path = FilePath(kDefaultOutputFile);
203 FilePath dir_name = path.DirName();
204 if (!DirectoryExists(dir_name)) {
205 LOG(WARNING) << "The output directory does not exist. "
206 << "Creating the directory: " << dir_name.value();
207 // Create the directory if necessary (because the gtest does the same).
208 if (!CreateDirectory(dir_name)) {
209 LOG(ERROR) << "Failed to created directory " << dir_name.value();
210 return false;
211 }
212 }
213 out_ = OpenFile(path, "w");
214 if (!out_) {
215 LOG(ERROR) << "Cannot open output file: "
216 << path.value() << ".";
217 return false;
218 }
219
220 return true;
221 }
222
OnTestIterationStarting()223 void TestResultsTracker::OnTestIterationStarting() {
224 CHECK(thread_checker_.CalledOnValidThread());
225
226 // Start with a fresh state for new iteration.
227 iteration_++;
228 per_iteration_data_.push_back(PerIterationData());
229 }
230
AddTest(const std::string & test_name)231 void TestResultsTracker::AddTest(const std::string& test_name) {
232 // Record disabled test names without DISABLED_ prefix so that they are easy
233 // to compare with regular test names, e.g. before or after disabling.
234 all_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
235 }
236
AddDisabledTest(const std::string & test_name)237 void TestResultsTracker::AddDisabledTest(const std::string& test_name) {
238 // Record disabled test names without DISABLED_ prefix so that they are easy
239 // to compare with regular test names, e.g. before or after disabling.
240 disabled_tests_.insert(TestNameWithoutDisabledPrefix(test_name));
241 }
242
AddTestLocation(const std::string & test_name,const std::string & file,int line)243 void TestResultsTracker::AddTestLocation(const std::string& test_name,
244 const std::string& file,
245 int line) {
246 test_locations_.insert(std::make_pair(
247 TestNameWithoutDisabledPrefix(test_name), CodeLocation(file, line)));
248 }
249
AddTestPlaceholder(const std::string & test_name)250 void TestResultsTracker::AddTestPlaceholder(const std::string& test_name) {
251 test_placeholders_.insert(test_name);
252 }
253
AddTestResult(const TestResult & result)254 void TestResultsTracker::AddTestResult(const TestResult& result) {
255 CHECK(thread_checker_.CalledOnValidThread());
256 CHECK_GE(iteration_, 0);
257
258 PerIterationData::ResultsMap& results_map =
259 per_iteration_data_[iteration_].results;
260 std::string test_name_without_disabled_prefix =
261 TestNameWithoutDisabledPrefix(result.full_name);
262 auto it = results_map.find(test_name_without_disabled_prefix);
263
264 // Record disabled test names without DISABLED_ prefix so that they are easy
265 // to compare with regular test names, e.g. before or after disabling.
266 AggregateTestResult& aggregate_test_result = it->second;
267
268 // If the current test_result is a PRE test and it failed, insert its result
269 // in the corresponding non-PRE test's place.
270 std::string test_name_without_pre_prefix(test_name_without_disabled_prefix);
271 ReplaceSubstringsAfterOffset(&test_name_without_pre_prefix, 0, "PRE_", "");
272 if (test_name_without_pre_prefix != test_name_without_disabled_prefix) {
273 if (result.status != TestResult::TEST_SUCCESS) {
274 it = results_map.find(test_name_without_pre_prefix);
275 if (!it->second.test_results.empty() &&
276 it->second.test_results.back().status == TestResult::TEST_NOT_RUN) {
277 // Also need to remove the non-PRE test's placeholder.
278 it->second.test_results.pop_back();
279 }
280 it->second.test_results.push_back(result);
281 }
282 // We quit early here and let the non-PRE test detect this result and
283 // modify its result appropriately.
284 return;
285 }
286
287 // If the last test result is a placeholder, then get rid of it now that we
288 // have real results.
289 if (!aggregate_test_result.test_results.empty() &&
290 aggregate_test_result.test_results.back().status ==
291 TestResult::TEST_NOT_RUN) {
292 aggregate_test_result.test_results.pop_back();
293 }
294
295 TestResult result_to_add = result;
296 result_to_add.full_name = test_name_without_disabled_prefix;
297 if (!aggregate_test_result.test_results.empty()) {
298 TestResult prev_result = aggregate_test_result.test_results.back();
299 if (prev_result.full_name != test_name_without_disabled_prefix) {
300 // Some other test's result is in our place! It must be our failed PRE
301 // test. Modify our own result if it failed and we succeeded so we don't
302 // end up silently swallowing PRE-only failures.
303 std::string prev_result_name(prev_result.full_name);
304 ReplaceSubstringsAfterOffset(&prev_result_name, 0, "PRE_", "");
305 CHECK_EQ(prev_result_name, test_name_without_disabled_prefix);
306
307 if (result.status == TestResult::TEST_SUCCESS) {
308 TestResult modified_result(prev_result);
309 modified_result.full_name = test_name_without_disabled_prefix;
310 result_to_add = modified_result;
311 }
312 aggregate_test_result.test_results.pop_back();
313 }
314 }
315 aggregate_test_result.test_results.push_back(result_to_add);
316 }
317
AddLeakedItems(int count,const std::vector<std::string> & test_names)318 void TestResultsTracker::AddLeakedItems(
319 int count,
320 const std::vector<std::string>& test_names) {
321 DCHECK(count);
322 per_iteration_data_.back().leaked_temp_items.emplace_back(count, test_names);
323 }
324
GeneratePlaceholderIteration()325 void TestResultsTracker::GeneratePlaceholderIteration() {
326 CHECK(thread_checker_.CalledOnValidThread());
327
328 for (auto& full_test_name : test_placeholders_) {
329 std::string test_name = TestNameWithoutDisabledPrefix(full_test_name);
330
331 TestResult test_result;
332 test_result.full_name = test_name;
333 test_result.status = TestResult::TEST_NOT_RUN;
334
335 // There shouldn't be any existing results when we generate placeholder
336 // results.
337 CHECK(
338 per_iteration_data_[iteration_].results[test_name].test_results.empty())
339 << test_name;
340 per_iteration_data_[iteration_].results[test_name].test_results.push_back(
341 test_result);
342 }
343 }
344
PrintSummaryOfCurrentIteration() const345 void TestResultsTracker::PrintSummaryOfCurrentIteration() const {
346 TestStatusMap tests_by_status(GetTestStatusMapForCurrentIteration());
347
348 PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
349 tests_by_status[TestResult::TEST_FAILURE].end(),
350 "failed");
351 PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
352 tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
353 "failed on exit");
354 PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
355 tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
356 "produced excessive output");
357 PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
358 tests_by_status[TestResult::TEST_TIMEOUT].end(),
359 "timed out");
360 PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
361 tests_by_status[TestResult::TEST_CRASH].end(),
362 "crashed");
363 PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
364 tests_by_status[TestResult::TEST_SKIPPED].end(),
365 "skipped");
366 PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
367 tests_by_status[TestResult::TEST_UNKNOWN].end(),
368 "had unknown result");
369 PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
370 tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
371
372 if (print_temp_leaks_) {
373 for (const auto& leaking_tests :
374 per_iteration_data_.back().leaked_temp_items) {
375 PrintLeaks(leaking_tests.first, leaking_tests.second);
376 }
377 }
378 }
379
PrintSummaryOfAllIterations() const380 void TestResultsTracker::PrintSummaryOfAllIterations() const {
381 CHECK(thread_checker_.CalledOnValidThread());
382
383 TestStatusMap tests_by_status(GetTestStatusMapForAllIterations());
384
385 fprintf(stdout, "Summary of all test iterations:\n");
386 fflush(stdout);
387
388 PrintTests(tests_by_status[TestResult::TEST_FAILURE].begin(),
389 tests_by_status[TestResult::TEST_FAILURE].end(),
390 "failed");
391 PrintTests(tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].begin(),
392 tests_by_status[TestResult::TEST_FAILURE_ON_EXIT].end(),
393 "failed on exit");
394 PrintTests(tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].begin(),
395 tests_by_status[TestResult::TEST_EXCESSIVE_OUTPUT].end(),
396 "produced excessive output");
397 PrintTests(tests_by_status[TestResult::TEST_TIMEOUT].begin(),
398 tests_by_status[TestResult::TEST_TIMEOUT].end(),
399 "timed out");
400 PrintTests(tests_by_status[TestResult::TEST_CRASH].begin(),
401 tests_by_status[TestResult::TEST_CRASH].end(),
402 "crashed");
403 PrintTests(tests_by_status[TestResult::TEST_SKIPPED].begin(),
404 tests_by_status[TestResult::TEST_SKIPPED].end(),
405 "skipped");
406 PrintTests(tests_by_status[TestResult::TEST_UNKNOWN].begin(),
407 tests_by_status[TestResult::TEST_UNKNOWN].end(),
408 "had unknown result");
409 PrintTests(tests_by_status[TestResult::TEST_NOT_RUN].begin(),
410 tests_by_status[TestResult::TEST_NOT_RUN].end(), "not run");
411
412 fprintf(stdout, "End of the summary.\n");
413 fflush(stdout);
414 }
415
AddGlobalTag(const std::string & tag)416 void TestResultsTracker::AddGlobalTag(const std::string& tag) {
417 global_tags_.insert(tag);
418 }
419
SaveSummaryAsJSON(const FilePath & path,const std::vector<std::string> & additional_tags) const420 bool TestResultsTracker::SaveSummaryAsJSON(
421 const FilePath& path,
422 const std::vector<std::string>& additional_tags) const {
423 Value::Dict summary_root;
424
425 Value::List global_tags;
426 for (const auto& global_tag : global_tags_) {
427 global_tags.Append(global_tag);
428 }
429 for (const auto& tag : additional_tags) {
430 global_tags.Append(tag);
431 }
432 summary_root.Set("global_tags", std::move(global_tags));
433
434 Value::List all_tests;
435 for (const auto& test : all_tests_) {
436 all_tests.Append(test);
437 }
438 summary_root.Set("all_tests", std::move(all_tests));
439
440 Value::List disabled_tests;
441 for (const auto& disabled_test : disabled_tests_) {
442 disabled_tests.Append(disabled_test);
443 }
444 summary_root.Set("disabled_tests", std::move(disabled_tests));
445
446 Value::List per_iteration_data;
447
448 // Even if we haven't run any tests, we still have the dummy iteration.
449 int max_iteration = iteration_ < 0 ? 0 : iteration_;
450
451 for (int i = 0; i <= max_iteration; i++) {
452 Value::Dict current_iteration_data;
453
454 for (const auto& j : per_iteration_data_[i].results) {
455 Value::List test_results;
456
457 for (size_t k = 0; k < j.second.test_results.size(); k++) {
458 const TestResult& test_result = j.second.test_results[k];
459
460 Value::Dict test_result_value;
461
462 test_result_value.Set("status", test_result.StatusAsString());
463 test_result_value.Set(
464 "elapsed_time_ms",
465 static_cast<int>(test_result.elapsed_time.InMilliseconds()));
466
467 if (test_result.thread_id) {
468 test_result_value.Set("thread_id",
469 static_cast<int>(*test_result.thread_id));
470 }
471 if (test_result.process_num)
472 test_result_value.Set("process_num", *test_result.process_num);
473 if (test_result.timestamp) {
474 // The timestamp is formatted using TimeToISO8601 instead of
475 // FormatTimeAsIso8601 here for a better accuracy that the former
476 // method would include a fraction of second (and the Z suffix).
477 test_result_value.Set("timestamp",
478 TimeToISO8601(*test_result.timestamp).c_str());
479 }
480
481 bool lossless_snippet = false;
482 if (IsStringUTF8(test_result.output_snippet)) {
483 test_result_value.Set("output_snippet", test_result.output_snippet);
484 lossless_snippet = true;
485 } else {
486 test_result_value.Set(
487 "output_snippet",
488 "<non-UTF-8 snippet, see output_snippet_base64>");
489 }
490
491 // TODO(phajdan.jr): Fix typo in JSON key (losless -> lossless)
492 // making sure not to break any consumers of this data.
493 test_result_value.Set("losless_snippet", lossless_snippet);
494
495 // Also include the raw version (base64-encoded so that it can be safely
496 // JSON-serialized - there are no guarantees about character encoding
497 // of the snippet). This can be very useful piece of information when
498 // debugging a test failure related to character encoding.
499 std::string base64_output_snippet;
500 Base64Encode(test_result.output_snippet, &base64_output_snippet);
501 test_result_value.Set("output_snippet_base64", base64_output_snippet);
502 if (!test_result.links.empty()) {
503 Value::Dict links;
504 for (const auto& link : test_result.links) {
505 Value::Dict link_info;
506 link_info.Set("content", link.second);
507 links.SetByDottedPath(link.first, std::move(link_info));
508 }
509 test_result_value.Set("links", std::move(links));
510 }
511 if (!test_result.tags.empty()) {
512 Value::Dict tags;
513 for (const auto& tag : test_result.tags) {
514 Value::List tag_values;
515 for (const auto& tag_value : tag.second) {
516 tag_values.Append(tag_value);
517 }
518 Value::Dict tag_info;
519 tag_info.Set("values", std::move(tag_values));
520 tags.SetByDottedPath(tag.first, std::move(tag_info));
521 }
522 test_result_value.Set("tags", std::move(tags));
523 }
524 if (!test_result.properties.empty()) {
525 Value::Dict properties;
526 for (const auto& property : test_result.properties) {
527 Value::Dict property_info;
528 property_info.Set("value", property.second);
529 properties.SetByDottedPath(property.first,
530 std::move(property_info));
531 }
532 test_result_value.Set("properties", std::move(properties));
533 }
534
535 Value::List test_result_parts;
536 for (const TestResultPart& result_part :
537 test_result.test_result_parts) {
538 Value::Dict result_part_value;
539
540 result_part_value.Set("type", result_part.TypeAsString());
541 result_part_value.Set("file", result_part.file_name);
542 result_part_value.Set("line", result_part.line_number);
543
544 bool lossless_summary = IsStringUTF8(result_part.summary);
545 if (lossless_summary) {
546 result_part_value.Set("summary", result_part.summary);
547 } else {
548 result_part_value.Set("summary",
549 "<non-UTF-8 snippet, see summary_base64>");
550 }
551 result_part_value.Set("lossless_summary", lossless_summary);
552
553 std::string encoded_summary;
554 Base64Encode(result_part.summary, &encoded_summary);
555 result_part_value.Set("summary_base64", encoded_summary);
556
557 bool lossless_message = IsStringUTF8(result_part.message);
558 if (lossless_message) {
559 result_part_value.Set("message", result_part.message);
560 } else {
561 result_part_value.Set("message",
562 "<non-UTF-8 snippet, see message_base64>");
563 }
564 result_part_value.Set("lossless_message", lossless_message);
565
566 std::string encoded_message;
567 Base64Encode(result_part.message, &encoded_message);
568 result_part_value.Set("message_base64", encoded_message);
569
570 test_result_parts.Append(std::move(result_part_value));
571 }
572 test_result_value.Set("result_parts", std::move(test_result_parts));
573
574 test_results.Append(std::move(test_result_value));
575 }
576
577 current_iteration_data.Set(j.first, std::move(test_results));
578 }
579 per_iteration_data.Append(std::move(current_iteration_data));
580 }
581 summary_root.Set("per_iteration_data", std::move(per_iteration_data));
582
583 Value::Dict test_locations;
584 for (const auto& item : test_locations_) {
585 std::string test_name = item.first;
586 CodeLocation location = item.second;
587 Value::Dict location_value;
588 location_value.Set("file", location.file);
589 location_value.Set("line", location.line);
590 test_locations.Set(test_name, std::move(location_value));
591 }
592 summary_root.Set("test_locations", std::move(test_locations));
593
594 std::string json;
595 if (!JSONWriter::Write(summary_root, &json))
596 return false;
597
598 File output(path, File::FLAG_CREATE_ALWAYS | File::FLAG_WRITE);
599 if (!output.IsValid())
600 return false;
601
602 int json_size = static_cast<int>(json.size());
603 if (output.WriteAtCurrentPos(json.data(), json_size) != json_size) {
604 return false;
605 }
606
607 // File::Flush() will call fsync(). This is important on Fuchsia to ensure
608 // that the file is written to the disk - the system running under qemu will
609 // shutdown shortly after the test completes. On Fuchsia fsync() times out
610 // after 15 seconds. Apparently this may not be enough in some cases,
611 // particularly when running net_unittests on buildbots, see
612 // https://crbug.com/796318. Try calling fsync() more than once to workaround
613 // this issue.
614 //
615 // TODO(sergeyu): Figure out a better solution.
616 int flush_attempts_left = 4;
617 while (flush_attempts_left-- > 0) {
618 if (output.Flush())
619 return true;
620 LOG(ERROR) << "fsync() failed when saving test output summary. "
621 << ((flush_attempts_left > 0) ? "Retrying." : " Giving up.");
622 }
623
624 return false;
625 }
626
627 TestResultsTracker::TestStatusMap
GetTestStatusMapForCurrentIteration() const628 TestResultsTracker::GetTestStatusMapForCurrentIteration() const {
629 TestStatusMap tests_by_status;
630 GetTestStatusForIteration(iteration_, &tests_by_status);
631 return tests_by_status;
632 }
633
634 TestResultsTracker::TestStatusMap
GetTestStatusMapForAllIterations() const635 TestResultsTracker::GetTestStatusMapForAllIterations() const {
636 TestStatusMap tests_by_status;
637 for (int i = 0; i <= iteration_; i++)
638 GetTestStatusForIteration(i, &tests_by_status);
639 return tests_by_status;
640 }
641
GetTestStatusForIteration(int iteration,TestStatusMap * map) const642 void TestResultsTracker::GetTestStatusForIteration(
643 int iteration, TestStatusMap* map) const {
644 for (const auto& j : per_iteration_data_[iteration].results) {
645 // Use the last test result as the final one.
646 const TestResult& result = j.second.test_results.back();
647 (*map)[result.status].insert(result.full_name);
648 }
649 }
650
651 // Utility function to print a list of test names. Uses iterator to be
652 // compatible with different containers, like vector and set.
653 template<typename InputIterator>
PrintTests(InputIterator first,InputIterator last,const std::string & description) const654 void TestResultsTracker::PrintTests(InputIterator first,
655 InputIterator last,
656 const std::string& description) const {
657 size_t count = std::distance(first, last);
658 if (count == 0)
659 return;
660
661 fprintf(stdout,
662 "%" PRIuS " test%s %s:\n",
663 count,
664 count != 1 ? "s" : "",
665 description.c_str());
666 for (InputIterator it = first; it != last; ++it) {
667 const std::string& test_name = *it;
668 const auto location_it = test_locations_.find(test_name);
669 CHECK(location_it != test_locations_.end()) << test_name;
670 const CodeLocation& location = location_it->second;
671 fprintf(stdout, " %s (%s:%d)\n", test_name.c_str(),
672 location.file.c_str(), location.line);
673 }
674 fflush(stdout);
675 }
676
PrintLeaks(int count,const std::vector<std::string> & test_names) const677 void TestResultsTracker::PrintLeaks(
678 int count,
679 const std::vector<std::string>& test_names) const {
680 fprintf(stdout,
681 "ERROR: %d files and/or directories were left behind in the temporary"
682 " directory by one or more of these tests: %s\n",
683 count, JoinString(test_names, ":").c_str());
684 fflush(stdout);
685 }
686
687 TestResultsTracker::AggregateTestResult::AggregateTestResult() = default;
688
689 TestResultsTracker::AggregateTestResult::AggregateTestResult(
690 const AggregateTestResult& other) = default;
691
692 TestResultsTracker::AggregateTestResult::~AggregateTestResult() = default;
693
694 TestResultsTracker::PerIterationData::PerIterationData() = default;
695
696 TestResultsTracker::PerIterationData::PerIterationData(
697 const PerIterationData& other) = default;
698
699 TestResultsTracker::PerIterationData::~PerIterationData() = default;
700
701 } // namespace base
702