1 /*
2 * Copyright (C) 2017 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <errno.h>
18 #include <fcntl.h>
19 #include <inttypes.h>
20 #include <poll.h>
21 #include <signal.h>
22 #include <stdio.h>
23 #include <string.h>
24 #include <unistd.h>
25
26 #include <atomic>
27 #include <string>
28 #include <tuple>
29 #include <vector>
30
31 #include <android-base/logging.h>
32 #include <android-base/strings.h>
33 #include <android-base/unique_fd.h>
34 #include <gtest/gtest.h>
35
36 #include "Color.h"
37 #include "Isolate.h"
38 #include "NanoTime.h"
39 #include "Test.h"
40
41 namespace android {
42 namespace gtest_extras {
43
44 static std::atomic_int g_signal;
45
SignalHandler(int sig)46 static void SignalHandler(int sig) {
47 g_signal = sig;
48 }
49
RegisterSignalHandler()50 static void RegisterSignalHandler() {
51 auto ret = signal(SIGINT, SignalHandler);
52 if (ret == SIG_ERR) {
53 PLOG(FATAL) << "Setting up SIGINT handler failed";
54 }
55 ret = signal(SIGQUIT, SignalHandler);
56 if (ret == SIG_ERR) {
57 PLOG(FATAL) << "Setting up SIGQUIT handler failed";
58 }
59 }
60
UnregisterSignalHandler()61 static void UnregisterSignalHandler() {
62 auto ret = signal(SIGINT, SIG_DFL);
63 if (ret == SIG_ERR) {
64 PLOG(FATAL) << "Disabling SIGINT handler failed";
65 }
66 ret = signal(SIGQUIT, SIG_DFL);
67 if (ret == SIG_ERR) {
68 PLOG(FATAL) << "Disabling SIGQUIT handler failed";
69 }
70 }
71
PluralizeString(size_t value,const char * name,bool uppercase=false)72 static std::string PluralizeString(size_t value, const char* name, bool uppercase = false) {
73 std::string string(std::to_string(value) + name);
74 if (value != 1) {
75 if (uppercase) {
76 string += 'S';
77 } else {
78 string += 's';
79 }
80 }
81 return string;
82 }
83
EnumerateTests()84 void Isolate::EnumerateTests() {
85 // Only apply --gtest_filter if present. This is the only option that changes
86 // what tests are listed.
87 std::string command(child_args_[0]);
88 if (!options_.filter().empty()) {
89 command += " --gtest_filter=" + options_.filter();
90 }
91 command += " --gtest_list_tests";
92 #if defined(__APPLE__)
93 FILE* fp = popen(command.c_str(), "r");
94 #else
95 FILE* fp = popen(command.c_str(), "re");
96 #endif
97 if (fp == nullptr) {
98 PLOG(FATAL) << "Unexpected failure from popen";
99 }
100
101 size_t total_shards = options_.total_shards();
102 bool sharded = total_shards > 1;
103 size_t test_count = 0;
104 if (sharded) {
105 test_count = options_.shard_index() + 1;
106 }
107
108 bool skip_until_next_suite = false;
109 std::string suite_name;
110 char* buffer = nullptr;
111 size_t buffer_len = 0;
112 bool new_suite = false;
113 while (getline(&buffer, &buffer_len, fp) > 0) {
114 if (buffer[0] != ' ') {
115 // This is the case name.
116 suite_name = buffer;
117 auto space_index = suite_name.find(' ');
118 if (space_index != std::string::npos) {
119 suite_name.erase(space_index);
120 }
121 if (suite_name.back() == '\n') {
122 suite_name.resize(suite_name.size() - 1);
123 }
124
125 if (!options_.allow_disabled_tests() && android::base::StartsWith(suite_name, "DISABLED_")) {
126 // This whole set of tests have been disabled, skip them all.
127 skip_until_next_suite = true;
128 } else {
129 new_suite = true;
130 skip_until_next_suite = false;
131 }
132 } else if (buffer[0] == ' ' && buffer[1] == ' ') {
133 if (!skip_until_next_suite) {
134 std::string test_name = &buffer[2];
135 auto space_index = test_name.find(' ');
136 if (space_index != std::string::npos) {
137 test_name.erase(space_index);
138 }
139 if (test_name.back() == '\n') {
140 test_name.resize(test_name.size() - 1);
141 }
142 if (options_.allow_disabled_tests() || !android::base::StartsWith(test_name, "DISABLED_")) {
143 if (!sharded || --test_count == 0) {
144 tests_.push_back(std::make_tuple(suite_name, test_name));
145 total_tests_++;
146 if (new_suite) {
147 // Only increment the number of suites when we find at least one test
148 // for the suites.
149 total_suites_++;
150 new_suite = false;
151 }
152 if (sharded) {
153 test_count = total_shards;
154 }
155 }
156 } else {
157 total_disable_tests_++;
158 }
159 } else {
160 total_disable_tests_++;
161 }
162 } else {
163 printf("Unexpected output from test listing.\nCommand:\n%s\nLine:\n%s\n", command.c_str(),
164 buffer);
165 exit(1);
166 }
167 }
168 free(buffer);
169 if (pclose(fp) == -1) {
170 PLOG(FATAL) << "Unexpected failure from pclose";
171 }
172 }
173
ChildProcessFn(const std::tuple<std::string,std::string> & test)174 int Isolate::ChildProcessFn(const std::tuple<std::string, std::string>& test) {
175 // Make sure the filter is only coming from our command-line option.
176 unsetenv("GTEST_FILTER");
177
178 // Add the filter argument.
179 std::vector<const char*> args(child_args_);
180 std::string filter("--gtest_filter=" + GetTestName(test));
181 args.push_back(filter.c_str());
182
183 int argc = args.size();
184 // Add the null terminator.
185 args.push_back(nullptr);
186 ::testing::InitGoogleTest(&argc, const_cast<char**>(args.data()));
187 return RUN_ALL_TESTS();
188 }
189
LaunchTests()190 void Isolate::LaunchTests() {
191 while (!running_indices_.empty() && cur_test_index_ < tests_.size()) {
192 android::base::unique_fd read_fd, write_fd;
193 if (!Pipe(&read_fd, &write_fd)) {
194 PLOG(FATAL) << "Unexpected failure from pipe";
195 }
196 if (fcntl(read_fd.get(), F_SETFL, O_NONBLOCK) == -1) {
197 PLOG(FATAL) << "Unexpected failure from fcntl";
198 }
199
200 pid_t pid = fork();
201 if (pid == -1) {
202 PLOG(FATAL) << "Unexpected failure from fork";
203 }
204 if (pid == 0) {
205 read_fd.reset();
206 close(STDOUT_FILENO);
207 close(STDERR_FILENO);
208 if (dup2(write_fd, STDOUT_FILENO) == -1) {
209 exit(1);
210 }
211 if (dup2(write_fd, STDERR_FILENO) == -1) {
212 exit(1);
213 }
214 UnregisterSignalHandler();
215 exit(ChildProcessFn(tests_[cur_test_index_]));
216 }
217
218 size_t run_index = running_indices_.back();
219 running_indices_.pop_back();
220 Test* test = new Test(tests_[cur_test_index_], cur_test_index_, run_index, read_fd.release());
221 running_by_pid_.emplace(pid, test);
222 running_[run_index] = test;
223 running_by_test_index_[cur_test_index_] = test;
224
225 pollfd* pollfd = &running_pollfds_[run_index];
226 pollfd->fd = test->fd();
227 pollfd->events = POLLIN;
228 cur_test_index_++;
229 }
230 }
231
ReadTestsOutput()232 void Isolate::ReadTestsOutput() {
233 int ready = poll(running_pollfds_.data(), running_pollfds_.size(), 0);
234 if (ready <= 0) {
235 return;
236 }
237
238 for (size_t i = 0; i < running_pollfds_.size(); i++) {
239 pollfd* pfd = &running_pollfds_[i];
240 if (pfd->revents & POLLIN) {
241 Test* test = running_[i];
242 if (!test->Read()) {
243 test->CloseFd();
244 pfd->fd = 0;
245 pfd->events = 0;
246 }
247 }
248 pfd->revents = 0;
249 }
250 }
251
CheckTestsFinished()252 size_t Isolate::CheckTestsFinished() {
253 size_t finished_tests = 0;
254 int status;
255 pid_t pid;
256 while ((pid = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) {
257 auto entry = running_by_pid_.find(pid);
258 if (entry == running_by_pid_.end()) {
259 LOG(FATAL) << "Pid " << pid << " was not spawned by the isolation framework.";
260 }
261
262 std::unique_ptr<Test>& test_ptr = entry->second;
263 Test* test = test_ptr.get();
264 test->Stop();
265
266 // Read any leftover data.
267 test->ReadUntilClosed();
268 if (test->result() == TEST_NONE) {
269 if (WIFSIGNALED(status)) {
270 std::string output(test->name() + " terminated by signal: " + strsignal(WTERMSIG(status)) +
271 ".\n");
272 test->AppendOutput(output);
273 test->set_result(TEST_FAIL);
274 } else {
275 int exit_code = WEXITSTATUS(status);
276 if (exit_code != 0) {
277 std::string output(test->name() + " exited with exitcode " + std::to_string(exit_code) +
278 ".\n");
279 test->AppendOutput(output);
280 test->set_result(TEST_FAIL);
281 } else {
282 // Set the result based on the output, since skipped tests and
283 // passing tests have the same exit status.
284 test->SetResultFromOutput();
285 }
286 }
287 } else if (test->result() == TEST_TIMEOUT) {
288 uint64_t time_ms = options_.deadline_threshold_ms();
289 std::string timeout_str(test->name() + " killed because of timeout at " +
290 std::to_string(time_ms) + " ms.\n");
291 test->AppendOutput(timeout_str);
292 }
293
294 if (test->ExpectFail()) {
295 if (test->result() == TEST_FAIL) {
296 // The test is expected to fail, it failed.
297 test->set_result(TEST_XFAIL);
298 } else if (test->result() == TEST_PASS) {
299 // The test is expected to fail, it passed.
300 test->set_result(TEST_XPASS);
301 }
302 }
303
304 test->Print(options_.gtest_format());
305
306 switch (test->result()) {
307 case TEST_PASS:
308 total_pass_tests_++;
309 if (test->slow()) {
310 total_slow_tests_++;
311 }
312 break;
313 case TEST_XPASS:
314 total_xpass_tests_++;
315 break;
316 case TEST_FAIL:
317 total_fail_tests_++;
318 break;
319 case TEST_TIMEOUT:
320 total_timeout_tests_++;
321 break;
322 case TEST_XFAIL:
323 total_xfail_tests_++;
324 break;
325 case TEST_SKIPPED:
326 total_skipped_tests_++;
327 break;
328 case TEST_NONE:
329 LOG(FATAL) << "Test result is TEST_NONE, this should not be possible.";
330 }
331 finished_tests++;
332 size_t test_index = test->test_index();
333 finished_.emplace(test_index, test_ptr.release());
334 running_indices_.push_back(test->run_index());
335
336 // Remove it from all of the running indices.
337 size_t run_index = test->run_index();
338 if (running_by_pid_.erase(pid) != 1) {
339 printf("Internal error: Erasing pid %d from running_by_pid_ incorrect\n", pid);
340 }
341 if (running_by_test_index_.erase(test_index) == 0) {
342 printf("Internal error: Erasing test_index %zu from running_by_pid_ incorrect\n", test_index);
343 }
344 running_[run_index] = nullptr;
345 running_pollfds_[run_index] = {};
346 }
347
348 // The only valid error case is if ECHILD is returned because there are
349 // no more processes left running.
350 if (pid == -1 && errno != ECHILD) {
351 PLOG(FATAL) << "Unexpected failure from waitpid";
352 }
353 return finished_tests;
354 }
355
CheckTestsTimeout()356 void Isolate::CheckTestsTimeout() {
357 for (auto& entry : running_by_pid_) {
358 Test* test = entry.second.get();
359 if (test->result() == TEST_TIMEOUT) {
360 continue;
361 }
362
363 if (NanoTime() > test->start_ns() + deadline_threshold_ns_) {
364 test->set_result(TEST_TIMEOUT);
365 // Do not mark this as slow and timed out.
366 test->set_slow(false);
367 // Test gets cleaned up in CheckTestsFinished.
368 kill(entry.first, SIGKILL);
369 } else if (!test->slow() && NanoTime() > test->start_ns() + slow_threshold_ns_) {
370 // Mark the test as running slow.
371 test->set_slow(true);
372 }
373 }
374 }
375
HandleSignals()376 void Isolate::HandleSignals() {
377 int signal = g_signal.exchange(0);
378 if (signal == SIGINT) {
379 printf("Terminating due to signal...\n");
380 for (auto& entry : running_by_pid_) {
381 kill(entry.first, SIGKILL);
382 }
383 exit(1);
384 } else if (signal == SIGQUIT) {
385 printf("List of current running tests:\n");
386 for (const auto& entry : running_by_test_index_) {
387 const Test* test = entry.second;
388 uint64_t run_time_ms = (NanoTime() - test->start_ns()) / kNsPerMs;
389 printf(" %s (elapsed time %" PRId64 " ms)\n", test->name().c_str(), run_time_ms);
390 }
391 }
392 }
393
RunAllTests()394 void Isolate::RunAllTests() {
395 total_pass_tests_ = 0;
396 total_xpass_tests_ = 0;
397 total_fail_tests_ = 0;
398 total_xfail_tests_ = 0;
399 total_timeout_tests_ = 0;
400 total_slow_tests_ = 0;
401 total_skipped_tests_ = 0;
402
403 running_by_test_index_.clear();
404
405 size_t job_count = options_.job_count();
406 running_.clear();
407 running_.resize(job_count);
408 running_pollfds_.resize(job_count);
409 memset(running_pollfds_.data(), 0, running_pollfds_.size() * sizeof(pollfd));
410 running_indices_.clear();
411 for (size_t i = 0; i < job_count; i++) {
412 running_indices_.push_back(i);
413 }
414
415 finished_.clear();
416
417 size_t finished = 0;
418 cur_test_index_ = 0;
419 while (finished < tests_.size()) {
420 LaunchTests();
421
422 ReadTestsOutput();
423
424 finished += CheckTestsFinished();
425
426 CheckTestsTimeout();
427
428 HandleSignals();
429
430 usleep(MIN_USECONDS_WAIT);
431 }
432 }
433
PrintResults(size_t total,const ResultsType & results,std::string * footer)434 void Isolate::PrintResults(size_t total, const ResultsType& results, std::string* footer) {
435 ColoredPrintf(results.color, results.prefix);
436 if (results.list_desc != nullptr) {
437 printf(" %s %s, listed below:\n", PluralizeString(total, " test").c_str(), results.list_desc);
438 } else {
439 printf(" %s, listed below:\n", PluralizeString(total, " test").c_str());
440 }
441 for (const auto& entry : finished_) {
442 const Test* test = entry.second.get();
443 if (results.match_func(*test)) {
444 ColoredPrintf(results.color, results.prefix);
445 printf(" %s", test->name().c_str());
446 if (results.print_func != nullptr) {
447 results.print_func(options_, *test);
448 }
449 printf("\n");
450 }
451 }
452
453 if (results.title == nullptr) {
454 return;
455 }
456
457 if (total < 10) {
458 *footer += ' ';
459 }
460 *footer +=
461 PluralizeString(total, (std::string(" ") + results.title + " TEST").c_str(), true) + '\n';
462 }
463
464 Isolate::ResultsType Isolate::SlowResults = {
465 .color = COLOR_YELLOW,
466 .prefix = "[ SLOW ]",
467 .list_desc = nullptr,
468 .title = "SLOW",
__anon7262e4f70102() 469 .match_func = [](const Test& test) { return test.slow(); },
470 .print_func =
__anon7262e4f70202() 471 [](const Options& options, const Test& test) {
472 printf(" (%" PRIu64 " ms, exceeded %" PRIu64 " ms)", test.RunTimeNs() / kNsPerMs,
473 options.slow_threshold_ms());
474 },
475 };
476
477 Isolate::ResultsType Isolate::XpassFailResults = {
478 .color = COLOR_RED,
479 .prefix = "[ FAILED ]",
480 .list_desc = "should have failed",
481 .title = "SHOULD HAVE FAILED",
__anon7262e4f70302() 482 .match_func = [](const Test& test) { return test.result() == TEST_XPASS; },
483 .print_func = nullptr,
484 };
485
486 Isolate::ResultsType Isolate::FailResults = {
487 .color = COLOR_RED,
488 .prefix = "[ FAILED ]",
489 .list_desc = nullptr,
490 .title = "FAILED",
__anon7262e4f70402() 491 .match_func = [](const Test& test) { return test.result() == TEST_FAIL; },
492 .print_func = nullptr,
493 };
494
495 Isolate::ResultsType Isolate::TimeoutResults = {
496 .color = COLOR_RED,
497 .prefix = "[ TIMEOUT ]",
498 .list_desc = nullptr,
499 .title = "TIMEOUT",
__anon7262e4f70502() 500 .match_func = [](const Test& test) { return test.result() == TEST_TIMEOUT; },
501 .print_func =
__anon7262e4f70602() 502 [](const Options&, const Test& test) {
503 printf(" (stopped at %" PRIu64 " ms)", test.RunTimeNs() / kNsPerMs);
504 },
505 };
506
507 Isolate::ResultsType Isolate::SkippedResults = {
508 .color = COLOR_GREEN,
509 .prefix = "[ SKIPPED ]",
510 .list_desc = nullptr,
511 .title = nullptr,
__anon7262e4f70702() 512 .match_func = [](const Test& test) { return test.result() == TEST_SKIPPED; },
513 .print_func = nullptr,
514 };
515
PrintFooter(uint64_t elapsed_time_ns)516 void Isolate::PrintFooter(uint64_t elapsed_time_ns) {
517 ColoredPrintf(COLOR_GREEN, "[==========]");
518 printf(" %s from %s ran. (%" PRId64 " ms total)\n",
519 PluralizeString(total_tests_, " test").c_str(),
520 PluralizeString(total_suites_, " test suite").c_str(), elapsed_time_ns / kNsPerMs);
521
522 ColoredPrintf(COLOR_GREEN, "[ PASSED ]");
523 printf(" %s.", PluralizeString(total_pass_tests_ + total_xfail_tests_, " test").c_str());
524 if (total_xfail_tests_ != 0) {
525 printf(" (%s)", PluralizeString(total_xfail_tests_, " expected failure").c_str());
526 }
527 printf("\n");
528
529 std::string footer;
530
531 // Tests that were skipped.
532 if (total_skipped_tests_ != 0) {
533 PrintResults(total_skipped_tests_, SkippedResults, &footer);
534 }
535
536 // Tests that ran slow.
537 if (total_slow_tests_ != 0) {
538 PrintResults(total_slow_tests_, SlowResults, &footer);
539 }
540
541 // Tests that passed but should have failed.
542 if (total_xpass_tests_ != 0) {
543 PrintResults(total_xpass_tests_, XpassFailResults, &footer);
544 }
545
546 // Tests that timed out.
547 if (total_timeout_tests_ != 0) {
548 PrintResults(total_timeout_tests_, TimeoutResults, &footer);
549 }
550
551 // Tests that failed.
552 if (total_fail_tests_ != 0) {
553 PrintResults(total_fail_tests_, FailResults, &footer);
554 }
555
556 if (!footer.empty()) {
557 printf("\n%s", footer.c_str());
558 }
559
560 if (total_disable_tests_ != 0) {
561 if (footer.empty()) {
562 printf("\n");
563 }
564 ColoredPrintf(COLOR_YELLOW, " YOU HAVE %s\n\n",
565 PluralizeString(total_disable_tests_, " DISABLED TEST", true).c_str());
566 }
567
568 fflush(stdout);
569 }
570
XmlEscape(const std::string & xml)571 std::string XmlEscape(const std::string& xml) {
572 std::string escaped;
573 escaped.reserve(xml.size());
574
575 for (auto c : xml) {
576 switch (c) {
577 case '<':
578 escaped.append("<");
579 break;
580 case '>':
581 escaped.append(">");
582 break;
583 case '&':
584 escaped.append("&");
585 break;
586 case '\'':
587 escaped.append("'");
588 break;
589 case '"':
590 escaped.append(""");
591 break;
592 default:
593 escaped.append(1, c);
594 break;
595 }
596 }
597
598 return escaped;
599 }
600
601 class TestResultPrinter : public ::testing::EmptyTestEventListener {
602 public:
TestResultPrinter()603 TestResultPrinter() : pinfo_(nullptr) {}
OnTestStart(const::testing::TestInfo & test_info)604 virtual void OnTestStart(const ::testing::TestInfo& test_info) {
605 pinfo_ = &test_info; // Record test_info for use in OnTestPartResult.
606 }
607 virtual void OnTestPartResult(const ::testing::TestPartResult& result);
608
609 private:
610 const ::testing::TestInfo* pinfo_;
611 };
612
613 // Called after an assertion failure.
OnTestPartResult(const::testing::TestPartResult & result)614 void TestResultPrinter::OnTestPartResult(const ::testing::TestPartResult& result) {
615 // If the test part succeeded, we don't need to do anything.
616 if (result.type() == ::testing::TestPartResult::kSuccess) {
617 return;
618 }
619
620 // Print failure message from the assertion (e.g. expected this and got that).
621 printf("%s:(%d) Failure in test %s.%s\n%s\n", result.file_name(), result.line_number(),
622 pinfo_->test_suite_name(), pinfo_->name(), result.message());
623 fflush(stdout);
624 }
625
626 // Output xml file when --gtest_output is used, write this function as we can't reuse
627 // gtest.cc:XmlUnitTestResultPrinter. The reason is XmlUnitTestResultPrinter is totally
628 // defined in gtest.cc and not expose to outside. What's more, as we don't run gtest in
629 // the parent process, we don't have gtest classes which are needed by XmlUnitTestResultPrinter.
WriteXmlResults(uint64_t elapsed_time_ns,time_t start_time)630 void Isolate::WriteXmlResults(uint64_t elapsed_time_ns, time_t start_time) {
631 FILE* fp = fopen(options_.xml_file().c_str(), "w");
632 if (fp == nullptr) {
633 printf("Cannot open xml file '%s': %s\n", options_.xml_file().c_str(), strerror(errno));
634 exit(1);
635 }
636
637 const tm* time_struct = localtime(&start_time);
638 if (time_struct == nullptr) {
639 PLOG(FATAL) << "Unexpected failure from localtime";
640 }
641 char timestamp[40];
642 snprintf(timestamp, sizeof(timestamp), "%4d-%02d-%02dT%02d:%02d:%02d",
643 time_struct->tm_year + 1900, time_struct->tm_mon + 1, time_struct->tm_mday,
644 time_struct->tm_hour, time_struct->tm_min, time_struct->tm_sec);
645
646 fputs("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", fp);
647 fprintf(fp, "<testsuites tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
648 tests_.size(), total_fail_tests_ + total_timeout_tests_ + total_xpass_tests_);
649 fprintf(fp, " timestamp=\"%s\" time=\"%.3lf\" name=\"AllTests\">\n", timestamp,
650 double(elapsed_time_ns) / kNsPerMs);
651
652 // Construct the suite information.
653 struct SuiteInfo {
654 std::string suite_name;
655 size_t fails = 0;
656 double elapsed_ms = 0;
657 std::vector<const Test*> tests;
658 };
659 std::string last_suite_name;
660 std::vector<SuiteInfo> suites;
661 SuiteInfo* info = nullptr;
662 for (const auto& entry : finished_) {
663 const Test* test = entry.second.get();
664 const std::string& suite_name = test->suite_name();
665 if (test->result() == TEST_XFAIL) {
666 // Skip XFAIL tests.
667 continue;
668 }
669 if (last_suite_name != suite_name) {
670 SuiteInfo suite_info{.suite_name = suite_name.substr(0, suite_name.size() - 1)};
671 last_suite_name = suite_name;
672 suites.push_back(suite_info);
673 info = &suites.back();
674 }
675 info->tests.push_back(test);
676 info->elapsed_ms += double(test->RunTimeNs()) / kNsPerMs;
677 if (test->result() != TEST_PASS) {
678 info->fails++;
679 }
680 }
681
682 for (auto& suite_entry : suites) {
683 fprintf(fp,
684 " <testsuite name=\"%s\" tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
685 suite_entry.suite_name.c_str(), suite_entry.tests.size(), suite_entry.fails);
686 fprintf(fp, " time=\"%.3lf\">\n", suite_entry.elapsed_ms);
687
688 for (auto test : suite_entry.tests) {
689 fprintf(fp, " <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"",
690 test->test_name().c_str(), double(test->RunTimeNs()) / kNsPerMs,
691 suite_entry.suite_name.c_str());
692 if (test->result() == TEST_PASS) {
693 fputs(" />\n", fp);
694 } else {
695 fputs(">\n", fp);
696 const std::string escaped_output = XmlEscape(test->output());
697 fprintf(fp, " <failure message=\"%s\" type=\"\">\n", escaped_output.c_str());
698 fputs(" </failure>\n", fp);
699 fputs(" </testcase>\n", fp);
700 }
701 }
702 fputs(" </testsuite>\n", fp);
703 }
704 fputs("</testsuites>\n", fp);
705 fclose(fp);
706 }
707
Run()708 int Isolate::Run() {
709 slow_threshold_ns_ = options_.slow_threshold_ms() * kNsPerMs;
710 deadline_threshold_ns_ = options_.deadline_threshold_ms() * kNsPerMs;
711
712 bool sharding_enabled = options_.total_shards() > 1;
713 if (sharding_enabled &&
714 (options_.shard_index() < 0 || options_.shard_index() >= options_.total_shards())) {
715 ColoredPrintf(COLOR_RED,
716 "Invalid environment variables: we require 0 <= GTEST_SHARD_INDEX < "
717 "GTEST_TOTAL_SHARDS, but you have GTEST_SHARD_INDEX=%" PRId64
718 ", GTEST_TOTAL_SHARDS=%" PRId64,
719 options_.shard_index(), options_.total_shards());
720 printf("\n");
721 return 1;
722 }
723
724 if (!options_.filter().empty()) {
725 ColoredPrintf(COLOR_YELLOW, "Note: Google Test filter = %s", options_.filter().c_str());
726 printf("\n");
727 }
728
729 if (sharding_enabled) {
730 ColoredPrintf(COLOR_YELLOW, "Note: This is test shard %" PRId64 " of %" PRId64,
731 options_.shard_index() + 1, options_.total_shards());
732 printf("\n");
733 }
734
735 EnumerateTests();
736
737 // Stop default result printer to avoid environment setup/teardown information for each test.
738 ::testing::UnitTest::GetInstance()->listeners().Release(
739 ::testing::UnitTest::GetInstance()->listeners().default_result_printer());
740 ::testing::UnitTest::GetInstance()->listeners().Append(new TestResultPrinter);
741 RegisterSignalHandler();
742
743 std::string job_info("Running " + PluralizeString(total_tests_, " test") + " from " +
744 PluralizeString(total_suites_, " test suite") + " (" +
745 PluralizeString(options_.job_count(), " job") + ").");
746
747 int exit_code = 0;
748 for (int i = 0; options_.num_iterations() < 0 || i < options_.num_iterations(); i++) {
749 if (i > 0) {
750 printf("\nRepeating all tests (iteration %d) . . .\n\n", i + 1);
751 }
752 ColoredPrintf(COLOR_GREEN, "[==========]");
753 printf(" %s\n", job_info.c_str());
754 fflush(stdout);
755
756 time_t start_time = time(nullptr);
757 uint64_t time_ns = NanoTime();
758 RunAllTests();
759 time_ns = NanoTime() - time_ns;
760
761 PrintFooter(time_ns);
762
763 if (!options_.xml_file().empty()) {
764 WriteXmlResults(time_ns, start_time);
765 }
766
767 if (total_pass_tests_ + total_skipped_tests_ + total_xfail_tests_ != tests_.size()) {
768 exit_code = 1;
769 }
770 }
771
772 return exit_code;
773 }
774
775 } // namespace gtest_extras
776 } // namespace android
777