• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <gtest/gtest.h>
18 
19 #include <ctype.h>
20 #include <errno.h>
21 #include <fcntl.h>
22 #include <inttypes.h>
23 #include <libgen.h>
24 #include <limits.h>
25 #include <signal.h>
26 #include <stdarg.h>
27 #include <stdio.h>
28 #include <string.h>
29 #include <sys/wait.h>
30 #include <unistd.h>
31 
32 #include <chrono>
33 #include <string>
34 #include <tuple>
35 #include <utility>
36 #include <vector>
37 
38 #ifndef TEMP_FAILURE_RETRY
39 
40 /* Used to retry syscalls that can return EINTR. */
41 #define TEMP_FAILURE_RETRY(exp) ({         \
42     __typeof__(exp) _rc;                   \
43     do {                                   \
44         _rc = (exp);                       \
45     } while (_rc == -1 && errno == EINTR); \
46     _rc; })
47 
48 #endif
49 
50 static std::string g_executable_path;
51 static int g_argc;
52 static char** g_argv;
53 static char** g_envp;
54 
get_executable_path()55 const std::string& get_executable_path() {
56   return g_executable_path;
57 }
58 
get_argc()59 int get_argc() {
60   return g_argc;
61 }
62 
get_argv()63 char** get_argv() {
64   return g_argv;
65 }
66 
get_envp()67 char** get_envp() {
68   return g_envp;
69 }
70 
71 namespace testing {
72 namespace internal {
73 
74 // Reuse of testing::internal::ColoredPrintf in gtest.
75 enum GTestColor {
76   COLOR_DEFAULT,
77   COLOR_RED,
78   COLOR_GREEN,
79   COLOR_YELLOW
80 };
81 
82 void ColoredPrintf(GTestColor color, const char* fmt, ...);
83 
84 }  // namespace internal
85 }  // namespace testing
86 
87 using testing::internal::GTestColor;
88 using testing::internal::COLOR_DEFAULT;
89 using testing::internal::COLOR_RED;
90 using testing::internal::COLOR_GREEN;
91 using testing::internal::COLOR_YELLOW;
92 using testing::internal::ColoredPrintf;
93 
94 constexpr int DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS = 90000;
95 constexpr int DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS = 2000;
96 
97 // The time each test can run before killed for the reason of timeout.
98 // It takes effect only with --isolate option.
99 static int global_test_run_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
100 
101 // The time each test can run before be warned for too much running time.
102 // It takes effect only with --isolate option.
103 static int global_test_run_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
104 
105 // Return timeout duration for a test, in ms.
GetTimeoutMs(const std::string &)106 static int GetTimeoutMs(const std::string& /*test_name*/) {
107   return global_test_run_deadline_ms;
108 }
109 
110 // Return threshold for calling a test slow, in ms.
GetSlowThresholdMs(const std::string &)111 static int GetSlowThresholdMs(const std::string& /*test_name*/) {
112   return global_test_run_slow_threshold_ms;
113 }
114 
PrintHelpInfo()115 static void PrintHelpInfo() {
116   printf("Bionic Unit Test Options:\n"
117          "  -j [JOB_COUNT] or -j[JOB_COUNT]\n"
118          "      Run up to JOB_COUNT tests in parallel.\n"
119          "      Use isolation mode, Run each test in a separate process.\n"
120          "      If JOB_COUNT is not given, it is set to the count of available processors.\n"
121          "  --no-isolate\n"
122          "      Don't use isolation mode, run all tests in a single process.\n"
123          "  --deadline=[TIME_IN_MS]\n"
124          "      Run each test in no longer than [TIME_IN_MS] time.\n"
125          "      Only valid in isolation mode. Default deadline is 90000 ms.\n"
126          "  --slow-threshold=[TIME_IN_MS]\n"
127          "      Test running longer than [TIME_IN_MS] will be called slow.\n"
128          "      Only valid in isolation mode. Default slow threshold is 2000 ms.\n"
129          "  --gtest-filter=POSITIVE_PATTERNS[-NEGATIVE_PATTERNS]\n"
130          "      Used as a synonym for --gtest_filter option in gtest.\n"
131          "Default bionic unit test option is -j.\n"
132          "In isolation mode, you can send SIGQUIT to the parent process to show current\n"
133          "running tests, or send SIGINT to the parent process to stop testing and\n"
134          "clean up current running tests.\n"
135          "\n");
136 }
137 
138 enum TestResult {
139   TEST_SUCCESS = 0,
140   TEST_FAILED,
141   TEST_TIMEOUT
142 };
143 
144 class Test {
145  public:
Test()146   Test() {} // For std::vector<Test>.
Test(const char * name)147   explicit Test(const char* name) : name_(name) {}
148 
GetName() const149   const std::string& GetName() const { return name_; }
150 
SetResult(TestResult result)151   void SetResult(TestResult result) { result_ = result; }
152 
GetResult() const153   TestResult GetResult() const { return result_; }
GetExpectedResult() const154   TestResult GetExpectedResult() const {
155     return GetName().find("xfail") == 0 ? TEST_FAILED : TEST_SUCCESS;
156   }
157 
SetTestTime(int64_t elapsed_time_ns)158   void SetTestTime(int64_t elapsed_time_ns) { elapsed_time_ns_ = elapsed_time_ns; }
159 
GetTestTime() const160   int64_t GetTestTime() const { return elapsed_time_ns_; }
161 
AppendTestOutput(const std::string & s)162   void AppendTestOutput(const std::string& s) { output_ += s; }
163 
GetTestOutput() const164   const std::string& GetTestOutput() const { return output_; }
165 
166  private:
167   const std::string name_;
168   TestResult result_;
169   int64_t elapsed_time_ns_;
170   std::string output_;
171 };
172 
173 class TestCase {
174  public:
TestCase()175   TestCase() {} // For std::vector<TestCase>.
TestCase(const char * name)176   explicit TestCase(const char* name) : name_(name) {}
177 
GetName() const178   const std::string& GetName() const { return name_; }
179 
AppendTest(const char * test_name)180   void AppendTest(const char* test_name) {
181     test_list_.push_back(Test(test_name));
182   }
183 
TestCount() const184   size_t TestCount() const { return test_list_.size(); }
185 
GetTestName(size_t test_id) const186   std::string GetTestName(size_t test_id) const {
187     VerifyTestId(test_id);
188     return name_ + "." + test_list_[test_id].GetName();
189   }
190 
GetTest(size_t test_id)191   Test& GetTest(size_t test_id) {
192     VerifyTestId(test_id);
193     return test_list_[test_id];
194   }
195 
GetTest(size_t test_id) const196   const Test& GetTest(size_t test_id) const {
197     VerifyTestId(test_id);
198     return test_list_[test_id];
199   }
200 
SetTestResult(size_t test_id,TestResult result)201   void SetTestResult(size_t test_id, TestResult result) {
202     VerifyTestId(test_id);
203     test_list_[test_id].SetResult(result);
204   }
205 
GetTestResult(size_t test_id) const206   TestResult GetTestResult(size_t test_id) const {
207     VerifyTestId(test_id);
208     return test_list_[test_id].GetResult();
209   }
210 
GetExpectedTestResult(size_t test_id) const211   TestResult GetExpectedTestResult(size_t test_id) const {
212     VerifyTestId(test_id);
213     return test_list_[test_id].GetExpectedResult();
214   }
215 
GetTestSuccess(size_t test_id) const216   bool GetTestSuccess(size_t test_id) const {
217     return GetTestResult(test_id) == GetExpectedTestResult(test_id);
218   }
219 
SetTestTime(size_t test_id,int64_t elapsed_time_ns)220   void SetTestTime(size_t test_id, int64_t elapsed_time_ns) {
221     VerifyTestId(test_id);
222     test_list_[test_id].SetTestTime(elapsed_time_ns);
223   }
224 
GetTestTime(size_t test_id) const225   int64_t GetTestTime(size_t test_id) const {
226     VerifyTestId(test_id);
227     return test_list_[test_id].GetTestTime();
228   }
229 
230  private:
VerifyTestId(size_t test_id) const231   void VerifyTestId(size_t test_id) const {
232     if(test_id >= test_list_.size()) {
233       fprintf(stderr, "test_id %zu out of range [0, %zu)\n", test_id, test_list_.size());
234       exit(1);
235     }
236   }
237 
238  private:
239   const std::string name_;
240   std::vector<Test> test_list_;
241 };
242 
243 class TestResultPrinter : public testing::EmptyTestEventListener {
244  public:
TestResultPrinter()245   TestResultPrinter() : pinfo_(NULL) {}
OnTestStart(const testing::TestInfo & test_info)246   virtual void OnTestStart(const testing::TestInfo& test_info) {
247     pinfo_ = &test_info; // Record test_info for use in OnTestPartResult.
248   }
249   virtual void OnTestPartResult(const testing::TestPartResult& result);
250 
251  private:
252   const testing::TestInfo* pinfo_;
253 };
254 
255 // Called after an assertion failure.
OnTestPartResult(const testing::TestPartResult & result)256 void TestResultPrinter::OnTestPartResult(const testing::TestPartResult& result) {
257   // If the test part succeeded, we don't need to do anything.
258   if (result.type() == testing::TestPartResult::kSuccess)
259     return;
260 
261   // Print failure message from the assertion (e.g. expected this and got that).
262   printf("%s:(%d) Failure in test %s.%s\n%s\n", result.file_name(), result.line_number(),
263          pinfo_->test_case_name(), pinfo_->name(), result.message());
264   fflush(stdout);
265 }
266 
NanoTime()267 static int64_t NanoTime() {
268   std::chrono::nanoseconds duration(std::chrono::steady_clock::now().time_since_epoch());
269   return static_cast<int64_t>(duration.count());
270 }
271 
EnumerateTests(int argc,char ** argv,std::vector<TestCase> & testcase_list)272 static bool EnumerateTests(int argc, char** argv, std::vector<TestCase>& testcase_list) {
273   std::string command;
274   for (int i = 0; i < argc; ++i) {
275     command += argv[i];
276     command += " ";
277   }
278   command += "--gtest_list_tests";
279   FILE* fp = popen(command.c_str(), "r");
280   if (fp == NULL) {
281     perror("popen");
282     return false;
283   }
284 
285   char buf[200];
286   while (fgets(buf, sizeof(buf), fp) != NULL) {
287     char* p = buf;
288 
289     while (*p != '\0' && isspace(*p)) {
290       ++p;
291     }
292     if (*p == '\0') continue;
293     char* start = p;
294     while (*p != '\0' && !isspace(*p)) {
295       ++p;
296     }
297     char* end = p;
298     while (*p != '\0' && isspace(*p)) {
299       ++p;
300     }
301     if (*p != '\0' && *p != '#') {
302       // This is not we want, gtest must meet with some error when parsing the arguments.
303       fprintf(stderr, "argument error, check with --help\n");
304       return false;
305     }
306     *end = '\0';
307     if (*(end - 1) == '.') {
308       *(end - 1) = '\0';
309       testcase_list.push_back(TestCase(start));
310     } else {
311       testcase_list.back().AppendTest(start);
312     }
313   }
314   int result = pclose(fp);
315   return (result != -1 && WEXITSTATUS(result) == 0);
316 }
317 
318 // Part of the following *Print functions are copied from external/gtest/src/gtest.cc:
319 // PrettyUnitTestResultPrinter. The reason for copy is that PrettyUnitTestResultPrinter
320 // is defined and used in gtest.cc, which is hard to reuse.
OnTestIterationStartPrint(const std::vector<TestCase> & testcase_list,size_t iteration,int iteration_count,size_t job_count)321 static void OnTestIterationStartPrint(const std::vector<TestCase>& testcase_list, size_t iteration,
322                                       int iteration_count, size_t job_count) {
323   if (iteration_count != 1) {
324     printf("\nRepeating all tests (iteration %zu) . . .\n\n", iteration);
325   }
326   ColoredPrintf(COLOR_GREEN,  "[==========] ");
327 
328   size_t testcase_count = testcase_list.size();
329   size_t test_count = 0;
330   for (const auto& testcase : testcase_list) {
331     test_count += testcase.TestCount();
332   }
333 
334   printf("Running %zu %s from %zu %s (%zu %s).\n",
335          test_count, (test_count == 1) ? "test" : "tests",
336          testcase_count, (testcase_count == 1) ? "test case" : "test cases",
337          job_count, (job_count == 1) ? "job" : "jobs");
338   fflush(stdout);
339 }
340 
341 // bionic cts test needs gtest output format.
342 #if defined(USING_GTEST_OUTPUT_FORMAT)
343 
OnTestEndPrint(const TestCase & testcase,size_t test_id)344 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
345   ColoredPrintf(COLOR_GREEN, "[ RUN      ] ");
346   printf("%s\n", testcase.GetTestName(test_id).c_str());
347 
348   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
349   printf("%s", test_output.c_str());
350 
351   TestResult result = testcase.GetTestResult(test_id);
352   if (result == testcase.GetExpectedTestResult(test_id)) {
353     ColoredPrintf(COLOR_GREEN, "[       OK ] ");
354   } else {
355     ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
356   }
357   printf("%s", testcase.GetTestName(test_id).c_str());
358   if (testing::GTEST_FLAG(print_time)) {
359     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
360   }
361   printf("\n");
362   fflush(stdout);
363 }
364 
365 #else  // !defined(USING_GTEST_OUTPUT_FORMAT)
366 
OnTestEndPrint(const TestCase & testcase,size_t test_id)367 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
368   TestResult result = testcase.GetTestResult(test_id);
369   TestResult expected = testcase.GetExpectedTestResult(test_id);
370   if (result == TEST_SUCCESS) {
371     if (expected == TEST_SUCCESS) {
372       ColoredPrintf(COLOR_GREEN, "[    OK    ] ");
373     } else if (expected == TEST_FAILED) {
374       ColoredPrintf(COLOR_RED, "[  XPASS   ] ");
375     }
376   } else if (result == TEST_FAILED) {
377     if (expected == TEST_SUCCESS) {
378       ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
379     } else if (expected == TEST_FAILED) {
380       ColoredPrintf(COLOR_YELLOW, "[  XFAIL   ] ");
381     }
382   } else if (result == TEST_TIMEOUT) {
383     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
384   }
385 
386   printf("%s", testcase.GetTestName(test_id).c_str());
387   if (testing::GTEST_FLAG(print_time)) {
388     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
389   }
390   printf("\n");
391 
392   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
393   printf("%s", test_output.c_str());
394   fflush(stdout);
395 }
396 
397 #endif  // !defined(USING_GTEST_OUTPUT_FORMAT)
398 
OnTestIterationEndPrint(const std::vector<TestCase> & testcase_list,size_t,int64_t elapsed_time_ns)399 static void OnTestIterationEndPrint(const std::vector<TestCase>& testcase_list, size_t /*iteration*/,
400                                     int64_t elapsed_time_ns) {
401 
402   std::vector<std::string> fail_test_name_list;
403   std::vector<std::string> xpass_test_name_list;
404   std::vector<std::pair<std::string, int64_t>> timeout_test_list;
405 
406   // For tests that were slow but didn't time out.
407   std::vector<std::tuple<std::string, int64_t, int>> slow_test_list;
408   size_t testcase_count = testcase_list.size();
409   size_t test_count = 0;
410   size_t success_test_count = 0;
411   size_t expected_failure_count = 0;
412 
413   for (const auto& testcase : testcase_list) {
414     test_count += testcase.TestCount();
415     for (size_t i = 0; i < testcase.TestCount(); ++i) {
416       TestResult result = testcase.GetTestResult(i);
417       TestResult expected = testcase.GetExpectedTestResult(i);
418       if (result == TEST_TIMEOUT) {
419         timeout_test_list.push_back(
420             std::make_pair(testcase.GetTestName(i), testcase.GetTestTime(i)));
421       } else if (result == expected) {
422         if (result == TEST_SUCCESS) {
423           ++success_test_count;
424         } else {
425           ++expected_failure_count;
426         }
427       } else {
428         if (result == TEST_FAILED) {
429           fail_test_name_list.push_back(testcase.GetTestName(i));
430         } else {
431           xpass_test_name_list.push_back(testcase.GetTestName(i));
432         }
433       }
434       if (result != TEST_TIMEOUT &&
435           testcase.GetTestTime(i) / 1000000 >= GetSlowThresholdMs(testcase.GetTestName(i))) {
436         slow_test_list.push_back(std::make_tuple(testcase.GetTestName(i),
437                                                  testcase.GetTestTime(i),
438                                                  GetSlowThresholdMs(testcase.GetTestName(i))));
439       }
440     }
441   }
442 
443   ColoredPrintf(COLOR_GREEN,  "[==========] ");
444   printf("%zu %s from %zu %s ran.", test_count, (test_count == 1) ? "test" : "tests",
445                                     testcase_count, (testcase_count == 1) ? "test case" : "test cases");
446   if (testing::GTEST_FLAG(print_time)) {
447     printf(" (%" PRId64 " ms total)", elapsed_time_ns / 1000000);
448   }
449   printf("\n");
450   ColoredPrintf(COLOR_GREEN,  "[   PASS   ] ");
451   printf("%zu %s.", success_test_count, (success_test_count == 1) ? "test" : "tests");
452   if (expected_failure_count > 0) {
453     printf(" (%zu expected failure%s)", expected_failure_count,
454            (expected_failure_count == 1) ? "" : "s");
455   }
456   printf("\n");
457 
458   // Print tests that timed out.
459   size_t timeout_test_count = timeout_test_list.size();
460   if (timeout_test_count > 0) {
461     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
462     printf("%zu %s, listed below:\n", timeout_test_count, (timeout_test_count == 1) ? "test" : "tests");
463     for (const auto& timeout_pair : timeout_test_list) {
464       ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
465       printf("%s (stopped at %" PRId64 " ms)\n", timeout_pair.first.c_str(),
466                                                  timeout_pair.second / 1000000);
467     }
468   }
469 
470   // Print tests that were slow.
471   size_t slow_test_count = slow_test_list.size();
472   if (slow_test_count > 0) {
473     ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
474     printf("%zu %s, listed below:\n", slow_test_count, (slow_test_count == 1) ? "test" : "tests");
475     for (const auto& slow_tuple : slow_test_list) {
476       ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
477       printf("%s (%" PRId64 " ms, exceeded %d ms)\n", std::get<0>(slow_tuple).c_str(),
478              std::get<1>(slow_tuple) / 1000000, std::get<2>(slow_tuple));
479     }
480   }
481 
482   // Print tests that failed.
483   size_t fail_test_count = fail_test_name_list.size();
484   if (fail_test_count > 0) {
485     ColoredPrintf(COLOR_RED,  "[   FAIL   ] ");
486     printf("%zu %s, listed below:\n", fail_test_count, (fail_test_count == 1) ? "test" : "tests");
487     for (const auto& name : fail_test_name_list) {
488       ColoredPrintf(COLOR_RED, "[   FAIL   ] ");
489       printf("%s\n", name.c_str());
490     }
491   }
492 
493   // Print tests that should have failed.
494   size_t xpass_test_count = xpass_test_name_list.size();
495   if (xpass_test_count > 0) {
496     ColoredPrintf(COLOR_RED,  "[  XPASS   ] ");
497     printf("%zu %s, listed below:\n", xpass_test_count, (xpass_test_count == 1) ? "test" : "tests");
498     for (const auto& name : xpass_test_name_list) {
499       ColoredPrintf(COLOR_RED, "[  XPASS   ] ");
500       printf("%s\n", name.c_str());
501     }
502   }
503 
504   if (timeout_test_count > 0 || slow_test_count > 0 || fail_test_count > 0 || xpass_test_count > 0) {
505     printf("\n");
506   }
507 
508   if (timeout_test_count > 0) {
509     printf("%2zu TIMEOUT %s\n", timeout_test_count, (timeout_test_count == 1) ? "TEST" : "TESTS");
510   }
511   if (slow_test_count > 0) {
512     printf("%2zu SLOW %s\n", slow_test_count, (slow_test_count == 1) ? "TEST" : "TESTS");
513   }
514   if (fail_test_count > 0) {
515     printf("%2zu FAILED %s\n", fail_test_count, (fail_test_count == 1) ? "TEST" : "TESTS");
516   }
517   if (xpass_test_count > 0) {
518     printf("%2zu SHOULD HAVE FAILED %s\n", xpass_test_count, (xpass_test_count == 1) ? "TEST" : "TESTS");
519   }
520 
521   fflush(stdout);
522 }
523 
XmlEscape(const std::string & xml)524 std::string XmlEscape(const std::string& xml) {
525   std::string escaped;
526   escaped.reserve(xml.size());
527 
528   for (auto c : xml) {
529     switch (c) {
530     case '<':
531       escaped.append("&lt;");
532       break;
533     case '>':
534       escaped.append("&gt;");
535       break;
536     case '&':
537       escaped.append("&amp;");
538       break;
539     case '\'':
540       escaped.append("&apos;");
541       break;
542     case '"':
543       escaped.append("&quot;");
544       break;
545     default:
546       escaped.append(1, c);
547       break;
548     }
549   }
550 
551   return escaped;
552 }
553 
554 // Output xml file when --gtest_output is used, write this function as we can't reuse
555 // gtest.cc:XmlUnitTestResultPrinter. The reason is XmlUnitTestResultPrinter is totally
556 // defined in gtest.cc and not expose to outside. What's more, as we don't run gtest in
557 // the parent process, we don't have gtest classes which are needed by XmlUnitTestResultPrinter.
OnTestIterationEndXmlPrint(const std::string & xml_output_filename,const std::vector<TestCase> & testcase_list,time_t epoch_iteration_start_time,int64_t elapsed_time_ns)558 void OnTestIterationEndXmlPrint(const std::string& xml_output_filename,
559                                 const std::vector<TestCase>& testcase_list,
560                                 time_t epoch_iteration_start_time,
561                                 int64_t elapsed_time_ns) {
562   FILE* fp = fopen(xml_output_filename.c_str(), "w");
563   if (fp == NULL) {
564     fprintf(stderr, "failed to open '%s': %s\n", xml_output_filename.c_str(), strerror(errno));
565     exit(1);
566   }
567 
568   size_t total_test_count = 0;
569   size_t total_failed_count = 0;
570   std::vector<size_t> failed_count_list(testcase_list.size(), 0);
571   std::vector<int64_t> elapsed_time_list(testcase_list.size(), 0);
572   for (size_t i = 0; i < testcase_list.size(); ++i) {
573     auto& testcase = testcase_list[i];
574     total_test_count += testcase.TestCount();
575     for (size_t j = 0; j < testcase.TestCount(); ++j) {
576       if (!testcase.GetTestSuccess(j)) {
577         ++failed_count_list[i];
578       }
579       elapsed_time_list[i] += testcase.GetTestTime(j);
580     }
581     total_failed_count += failed_count_list[i];
582   }
583 
584   const tm* time_struct = localtime(&epoch_iteration_start_time);
585   char timestamp[40];
586   snprintf(timestamp, sizeof(timestamp), "%4d-%02d-%02dT%02d:%02d:%02d",
587            time_struct->tm_year + 1900, time_struct->tm_mon + 1, time_struct->tm_mday,
588            time_struct->tm_hour, time_struct->tm_min, time_struct->tm_sec);
589 
590   fputs("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", fp);
591   fprintf(fp, "<testsuites tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
592           total_test_count, total_failed_count);
593   fprintf(fp, " timestamp=\"%s\" time=\"%.3lf\" name=\"AllTests\">\n", timestamp, elapsed_time_ns / 1e9);
594   for (size_t i = 0; i < testcase_list.size(); ++i) {
595     auto& testcase = testcase_list[i];
596     fprintf(fp, "  <testsuite name=\"%s\" tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
597             testcase.GetName().c_str(), testcase.TestCount(), failed_count_list[i]);
598     fprintf(fp, " time=\"%.3lf\">\n", elapsed_time_list[i] / 1e9);
599 
600     for (size_t j = 0; j < testcase.TestCount(); ++j) {
601       fprintf(fp, "    <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"",
602               testcase.GetTest(j).GetName().c_str(), testcase.GetTestTime(j) / 1e9,
603               testcase.GetName().c_str());
604       if (!testcase.GetTestSuccess(j)) {
605         fputs(" />\n", fp);
606       } else {
607         fputs(">\n", fp);
608         const std::string& test_output = testcase.GetTest(j).GetTestOutput();
609         const std::string escaped_test_output = XmlEscape(test_output);
610         fprintf(fp, "      <failure message=\"%s\" type=\"\">\n", escaped_test_output.c_str());
611         fputs("      </failure>\n", fp);
612         fputs("    </testcase>\n", fp);
613       }
614     }
615 
616     fputs("  </testsuite>\n", fp);
617   }
618   fputs("</testsuites>\n", fp);
619   fclose(fp);
620 }
621 
622 static bool sigint_flag;
623 static bool sigquit_flag;
624 
signal_handler(int sig)625 static void signal_handler(int sig) {
626   if (sig == SIGINT) {
627     sigint_flag = true;
628   } else if (sig == SIGQUIT) {
629     sigquit_flag = true;
630   }
631 }
632 
RegisterSignalHandler()633 static bool RegisterSignalHandler() {
634   sigint_flag = false;
635   sigquit_flag = false;
636   sig_t ret = signal(SIGINT, signal_handler);
637   if (ret != SIG_ERR) {
638     ret = signal(SIGQUIT, signal_handler);
639   }
640   if (ret == SIG_ERR) {
641     perror("RegisterSignalHandler");
642     return false;
643   }
644   return true;
645 }
646 
UnregisterSignalHandler()647 static bool UnregisterSignalHandler() {
648   sig_t ret = signal(SIGINT, SIG_DFL);
649   if (ret != SIG_ERR) {
650     ret = signal(SIGQUIT, SIG_DFL);
651   }
652   if (ret == SIG_ERR) {
653     perror("UnregisterSignalHandler");
654     return false;
655   }
656   return true;
657 }
658 
659 struct ChildProcInfo {
660   pid_t pid;
661   int64_t start_time_ns;
662   int64_t end_time_ns;
663   int64_t deadline_end_time_ns; // The time when the test is thought of as timeout.
664   size_t testcase_id, test_id;
665   bool finished;
666   bool timed_out;
667   int exit_status;
668   int child_read_fd; // File descriptor to read child test failure info.
669 };
670 
671 // Forked Child process, run the single test.
ChildProcessFn(int argc,char ** argv,const std::string & test_name)672 static void ChildProcessFn(int argc, char** argv, const std::string& test_name) {
673   char** new_argv = new char*[argc + 2];
674   memcpy(new_argv, argv, sizeof(char*) * argc);
675 
676   char* filter_arg = new char [test_name.size() + 20];
677   strcpy(filter_arg, "--gtest_filter=");
678   strcat(filter_arg, test_name.c_str());
679   new_argv[argc] = filter_arg;
680   new_argv[argc + 1] = NULL;
681 
682   int new_argc = argc + 1;
683   testing::InitGoogleTest(&new_argc, new_argv);
684   int result = RUN_ALL_TESTS();
685   exit(result);
686 }
687 
RunChildProcess(const std::string & test_name,int testcase_id,int test_id,int argc,char ** argv)688 static ChildProcInfo RunChildProcess(const std::string& test_name, int testcase_id, int test_id,
689                                      int argc, char** argv) {
690   int pipefd[2];
691   if (pipe(pipefd) == -1) {
692     perror("pipe in RunTestInSeparateProc");
693     exit(1);
694   }
695   if (fcntl(pipefd[0], F_SETFL, O_NONBLOCK) == -1) {
696     perror("fcntl in RunTestInSeparateProc");
697     exit(1);
698   }
699   pid_t pid = fork();
700   if (pid == -1) {
701     perror("fork in RunTestInSeparateProc");
702     exit(1);
703   } else if (pid == 0) {
704     // In child process, run a single test.
705     close(pipefd[0]);
706     close(STDOUT_FILENO);
707     close(STDERR_FILENO);
708     dup2(pipefd[1], STDOUT_FILENO);
709     dup2(pipefd[1], STDERR_FILENO);
710 
711     if (!UnregisterSignalHandler()) {
712       exit(1);
713     }
714     ChildProcessFn(argc, argv, test_name);
715     // Unreachable.
716   }
717   // In parent process, initialize child process info.
718   close(pipefd[1]);
719   ChildProcInfo child_proc;
720   child_proc.child_read_fd = pipefd[0];
721   child_proc.pid = pid;
722   child_proc.start_time_ns = NanoTime();
723   child_proc.deadline_end_time_ns = child_proc.start_time_ns + GetTimeoutMs(test_name) * 1000000LL;
724   child_proc.testcase_id = testcase_id;
725   child_proc.test_id = test_id;
726   child_proc.finished = false;
727   return child_proc;
728 }
729 
HandleSignals(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)730 static void HandleSignals(std::vector<TestCase>& testcase_list,
731                             std::vector<ChildProcInfo>& child_proc_list) {
732   if (sigquit_flag) {
733     sigquit_flag = false;
734     // Print current running tests.
735     printf("List of current running tests:\n");
736     for (const auto& child_proc : child_proc_list) {
737       if (child_proc.pid != 0) {
738         std::string test_name = testcase_list[child_proc.testcase_id].GetTestName(child_proc.test_id);
739         int64_t current_time_ns = NanoTime();
740         int64_t run_time_ms = (current_time_ns - child_proc.start_time_ns) / 1000000;
741         printf("  %s (%" PRId64 " ms)\n", test_name.c_str(), run_time_ms);
742       }
743     }
744   } else if (sigint_flag) {
745     sigint_flag = false;
746     // Kill current running tests.
747     for (const auto& child_proc : child_proc_list) {
748       if (child_proc.pid != 0) {
749         // Send SIGKILL to ensure the child process can be killed unconditionally.
750         kill(child_proc.pid, SIGKILL);
751       }
752     }
753     // SIGINT kills the parent process as well.
754     exit(1);
755   }
756 }
757 
CheckChildProcExit(pid_t exit_pid,int exit_status,std::vector<ChildProcInfo> & child_proc_list)758 static bool CheckChildProcExit(pid_t exit_pid, int exit_status,
759                                std::vector<ChildProcInfo>& child_proc_list) {
760   for (size_t i = 0; i < child_proc_list.size(); ++i) {
761     if (child_proc_list[i].pid == exit_pid) {
762       child_proc_list[i].finished = true;
763       child_proc_list[i].timed_out = false;
764       child_proc_list[i].exit_status = exit_status;
765       child_proc_list[i].end_time_ns = NanoTime();
766       return true;
767     }
768   }
769   return false;
770 }
771 
CheckChildProcTimeout(std::vector<ChildProcInfo> & child_proc_list)772 static size_t CheckChildProcTimeout(std::vector<ChildProcInfo>& child_proc_list) {
773   int64_t current_time_ns = NanoTime();
774   size_t timeout_child_count = 0;
775   for (size_t i = 0; i < child_proc_list.size(); ++i) {
776     if (child_proc_list[i].deadline_end_time_ns <= current_time_ns) {
777       child_proc_list[i].finished = true;
778       child_proc_list[i].timed_out = true;
779       child_proc_list[i].end_time_ns = current_time_ns;
780       ++timeout_child_count;
781     }
782   }
783   return timeout_child_count;
784 }
785 
ReadChildProcOutput(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)786 static void ReadChildProcOutput(std::vector<TestCase>& testcase_list,
787                                 std::vector<ChildProcInfo>& child_proc_list) {
788   for (const auto& child_proc : child_proc_list) {
789     TestCase& testcase = testcase_list[child_proc.testcase_id];
790     int test_id = child_proc.test_id;
791     while (true) {
792       char buf[1024];
793       ssize_t bytes_read = TEMP_FAILURE_RETRY(read(child_proc.child_read_fd, buf, sizeof(buf) - 1));
794       if (bytes_read > 0) {
795         buf[bytes_read] = '\0';
796         testcase.GetTest(test_id).AppendTestOutput(buf);
797       } else if (bytes_read == 0) {
798         break; // Read end.
799       } else {
800         if (errno == EAGAIN) {
801           break;
802         }
803         perror("failed to read child_read_fd");
804         exit(1);
805       }
806     }
807   }
808 }
809 
WaitChildProcs(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)810 static void WaitChildProcs(std::vector<TestCase>& testcase_list,
811                            std::vector<ChildProcInfo>& child_proc_list) {
812   size_t finished_child_count = 0;
813   while (true) {
814     int status;
815     pid_t result;
816     while ((result = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) {
817       if (CheckChildProcExit(result, status, child_proc_list)) {
818         ++finished_child_count;
819       }
820     }
821 
822     if (result == -1) {
823       if (errno == ECHILD) {
824         // This happens when we have no running child processes.
825         return;
826       } else {
827         perror("waitpid");
828         exit(1);
829       }
830     } else if (result == 0) {
831       finished_child_count += CheckChildProcTimeout(child_proc_list);
832     }
833 
834     ReadChildProcOutput(testcase_list, child_proc_list);
835     if (finished_child_count > 0) {
836       return;
837     }
838 
839     HandleSignals(testcase_list, child_proc_list);
840 
841     // sleep 1 ms to avoid busy looping.
842     timespec sleep_time;
843     sleep_time.tv_sec = 0;
844     sleep_time.tv_nsec = 1000000;
845     nanosleep(&sleep_time, NULL);
846   }
847 }
848 
WaitForOneChild(pid_t pid)849 static TestResult WaitForOneChild(pid_t pid) {
850   int exit_status;
851   pid_t result = TEMP_FAILURE_RETRY(waitpid(pid, &exit_status, 0));
852 
853   TestResult test_result = TEST_SUCCESS;
854   if (result != pid || WEXITSTATUS(exit_status) != 0) {
855     test_result = TEST_FAILED;
856   }
857   return test_result;
858 }
859 
CollectChildTestResult(const ChildProcInfo & child_proc,TestCase & testcase)860 static void CollectChildTestResult(const ChildProcInfo& child_proc, TestCase& testcase) {
861   int test_id = child_proc.test_id;
862   testcase.SetTestTime(test_id, child_proc.end_time_ns - child_proc.start_time_ns);
863   if (child_proc.timed_out) {
864     // The child process marked as timed_out has not exited, and we should kill it manually.
865     kill(child_proc.pid, SIGKILL);
866     WaitForOneChild(child_proc.pid);
867   }
868   close(child_proc.child_read_fd);
869 
870   if (child_proc.timed_out) {
871     testcase.SetTestResult(test_id, TEST_TIMEOUT);
872     char buf[1024];
873     snprintf(buf, sizeof(buf), "%s killed because of timeout at %" PRId64 " ms.\n",
874              testcase.GetTestName(test_id).c_str(), testcase.GetTestTime(test_id) / 1000000);
875     testcase.GetTest(test_id).AppendTestOutput(buf);
876 
877   } else if (WIFSIGNALED(child_proc.exit_status)) {
878     // Record signal terminated test as failed.
879     testcase.SetTestResult(test_id, TEST_FAILED);
880     char buf[1024];
881     snprintf(buf, sizeof(buf), "%s terminated by signal: %s.\n",
882              testcase.GetTestName(test_id).c_str(), strsignal(WTERMSIG(child_proc.exit_status)));
883     testcase.GetTest(test_id).AppendTestOutput(buf);
884 
885   } else {
886     int exitcode = WEXITSTATUS(child_proc.exit_status);
887     testcase.SetTestResult(test_id, exitcode == 0 ? TEST_SUCCESS : TEST_FAILED);
888     if (exitcode != 0) {
889       char buf[1024];
890       snprintf(buf, sizeof(buf), "%s exited with exitcode %d.\n",
891                testcase.GetTestName(test_id).c_str(), exitcode);
892       testcase.GetTest(test_id).AppendTestOutput(buf);
893     }
894   }
895 }
896 
897 // We choose to use multi-fork and multi-wait here instead of multi-thread, because it always
898 // makes deadlock to use fork in multi-thread.
899 // Returns true if all tests run successfully, otherwise return false.
RunTestInSeparateProc(int argc,char ** argv,std::vector<TestCase> & testcase_list,int iteration_count,size_t job_count,const std::string & xml_output_filename)900 static bool RunTestInSeparateProc(int argc, char** argv, std::vector<TestCase>& testcase_list,
901                                   int iteration_count, size_t job_count,
902                                   const std::string& xml_output_filename) {
903   // Stop default result printer to avoid environment setup/teardown information for each test.
904   testing::UnitTest::GetInstance()->listeners().Release(
905                         testing::UnitTest::GetInstance()->listeners().default_result_printer());
906   testing::UnitTest::GetInstance()->listeners().Append(new TestResultPrinter);
907 
908   if (!RegisterSignalHandler()) {
909     exit(1);
910   }
911 
912   bool all_tests_passed = true;
913 
914   for (size_t iteration = 1;
915        iteration_count < 0 || iteration <= static_cast<size_t>(iteration_count);
916        ++iteration) {
917     OnTestIterationStartPrint(testcase_list, iteration, iteration_count, job_count);
918     int64_t iteration_start_time_ns = NanoTime();
919     time_t epoch_iteration_start_time = time(NULL);
920 
921     // Run up to job_count tests in parallel, each test in a child process.
922     std::vector<ChildProcInfo> child_proc_list;
923 
924     // Next test to run is [next_testcase_id:next_test_id].
925     size_t next_testcase_id = 0;
926     size_t next_test_id = 0;
927 
928     // Record how many tests are finished.
929     std::vector<size_t> finished_test_count_list(testcase_list.size(), 0);
930     size_t finished_testcase_count = 0;
931 
932     while (finished_testcase_count < testcase_list.size()) {
933       // run up to job_count child processes.
934       while (child_proc_list.size() < job_count && next_testcase_id < testcase_list.size()) {
935         std::string test_name = testcase_list[next_testcase_id].GetTestName(next_test_id);
936         ChildProcInfo child_proc = RunChildProcess(test_name, next_testcase_id, next_test_id,
937                                                    argc, argv);
938         child_proc_list.push_back(child_proc);
939         if (++next_test_id == testcase_list[next_testcase_id].TestCount()) {
940           next_test_id = 0;
941           ++next_testcase_id;
942         }
943       }
944 
945       // Wait for any child proc finish or timeout.
946       WaitChildProcs(testcase_list, child_proc_list);
947 
948       // Collect result.
949       auto it = child_proc_list.begin();
950       while (it != child_proc_list.end()) {
951         auto& child_proc = *it;
952         if (child_proc.finished == true) {
953           size_t testcase_id = child_proc.testcase_id;
954           size_t test_id = child_proc.test_id;
955           TestCase& testcase = testcase_list[testcase_id];
956 
957           CollectChildTestResult(child_proc, testcase);
958           OnTestEndPrint(testcase, test_id);
959 
960           if (++finished_test_count_list[testcase_id] == testcase.TestCount()) {
961             ++finished_testcase_count;
962           }
963           if (!testcase.GetTestSuccess(test_id)) {
964             all_tests_passed = false;
965           }
966 
967           it = child_proc_list.erase(it);
968         } else {
969           ++it;
970         }
971       }
972     }
973 
974     int64_t elapsed_time_ns = NanoTime() - iteration_start_time_ns;
975     OnTestIterationEndPrint(testcase_list, iteration, elapsed_time_ns);
976     if (!xml_output_filename.empty()) {
977       OnTestIterationEndXmlPrint(xml_output_filename, testcase_list, epoch_iteration_start_time,
978                                  elapsed_time_ns);
979     }
980   }
981 
982   if (!UnregisterSignalHandler()) {
983     exit(1);
984   }
985 
986   return all_tests_passed;
987 }
988 
GetDefaultJobCount()989 static size_t GetDefaultJobCount() {
990   return static_cast<size_t>(sysconf(_SC_NPROCESSORS_ONLN));
991 }
992 
AddPathSeparatorInTestProgramPath(std::vector<char * > & args)993 static void AddPathSeparatorInTestProgramPath(std::vector<char*>& args) {
994   // To run DeathTest in threadsafe mode, gtest requires that the user must invoke the
995   // test program via a valid path that contains at least one path separator.
996   // The reason is that gtest uses clone() + execve() to run DeathTest in threadsafe mode,
997   // and execve() doesn't read environment variable PATH, so execve() will not success
998   // until we specify the absolute path or relative path of the test program directly.
999   if (strchr(args[0], '/') == nullptr) {
1000     args[0] = strdup(g_executable_path.c_str());
1001   }
1002 }
1003 
AddGtestFilterSynonym(std::vector<char * > & args)1004 static void AddGtestFilterSynonym(std::vector<char*>& args) {
1005   // Support --gtest-filter as a synonym for --gtest_filter.
1006   for (size_t i = 1; i < args.size(); ++i) {
1007     if (strncmp(args[i], "--gtest-filter", strlen("--gtest-filter")) == 0) {
1008       args[i][7] = '_';
1009     }
1010   }
1011 }
1012 
1013 struct IsolationTestOptions {
1014   bool isolate;
1015   size_t job_count;
1016   int test_deadline_ms;
1017   int test_slow_threshold_ms;
1018   std::string gtest_color;
1019   bool gtest_print_time;
1020   int gtest_repeat;
1021   std::string gtest_output;
1022 };
1023 
1024 // Pick options not for gtest: There are two parts in args, one part is used in isolation test mode
1025 // as described in PrintHelpInfo(), the other part is handled by testing::InitGoogleTest() in
1026 // gtest. PickOptions() picks the first part into IsolationTestOptions structure, leaving the second
1027 // part in args.
1028 // Arguments:
1029 //   args is used to pass in all command arguments, and pass out only the part of options for gtest.
1030 //   options is used to pass out test options in isolation mode.
1031 // Return false if there is error in arguments.
PickOptions(std::vector<char * > & args,IsolationTestOptions & options)1032 static bool PickOptions(std::vector<char*>& args, IsolationTestOptions& options) {
1033   for (size_t i = 1; i < args.size(); ++i) {
1034     if (strcmp(args[i], "--help") == 0 || strcmp(args[i], "-h") == 0) {
1035       PrintHelpInfo();
1036       options.isolate = false;
1037       return true;
1038     }
1039   }
1040 
1041   AddPathSeparatorInTestProgramPath(args);
1042   AddGtestFilterSynonym(args);
1043 
1044   // if --bionic-selftest argument is used, only enable self tests, otherwise remove self tests.
1045   bool enable_selftest = false;
1046   for (size_t i = 1; i < args.size(); ++i) {
1047     if (strcmp(args[i], "--bionic-selftest") == 0) {
1048       // This argument is to enable "bionic_selftest*" for self test, and is not shown in help info.
1049       // Don't remove this option from arguments.
1050       enable_selftest = true;
1051     }
1052   }
1053   std::string gtest_filter_str;
1054   for (size_t i = args.size() - 1; i >= 1; --i) {
1055     if (strncmp(args[i], "--gtest_filter=", strlen("--gtest_filter=")) == 0) {
1056       gtest_filter_str = args[i] + strlen("--gtest_filter=");
1057       args.erase(args.begin() + i);
1058       break;
1059     }
1060   }
1061   if (enable_selftest == true) {
1062     gtest_filter_str = "bionic_selftest*";
1063   } else {
1064     if (gtest_filter_str.empty()) {
1065       gtest_filter_str = "-bionic_selftest*";
1066     } else {
1067       // Find if '-' for NEGATIVE_PATTERNS exists.
1068       if (gtest_filter_str.find("-") != std::string::npos) {
1069         gtest_filter_str += ":bionic_selftest*";
1070       } else {
1071         gtest_filter_str += ":-bionic_selftest*";
1072       }
1073     }
1074   }
1075   gtest_filter_str = "--gtest_filter=" + gtest_filter_str;
1076   args.push_back(strdup(gtest_filter_str.c_str()));
1077 
1078   options.isolate = true;
1079   // Parse arguments that make us can't run in isolation mode.
1080   for (size_t i = 1; i < args.size(); ++i) {
1081     if (strcmp(args[i], "--no-isolate") == 0) {
1082       options.isolate = false;
1083     } else if (strcmp(args[i], "--gtest_list_tests") == 0) {
1084       options.isolate = false;
1085     }
1086   }
1087 
1088   // Stop parsing if we will not run in isolation mode.
1089   if (options.isolate == false) {
1090     return true;
1091   }
1092 
1093   // Init default isolation test options.
1094   options.job_count = GetDefaultJobCount();
1095   options.test_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
1096   options.test_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
1097   options.gtest_color = testing::GTEST_FLAG(color);
1098   options.gtest_print_time = testing::GTEST_FLAG(print_time);
1099   options.gtest_repeat = testing::GTEST_FLAG(repeat);
1100   options.gtest_output = testing::GTEST_FLAG(output);
1101 
1102   // Parse arguments speficied for isolation mode.
1103   for (size_t i = 1; i < args.size(); ++i) {
1104     if (strncmp(args[i], "-j", strlen("-j")) == 0) {
1105       char* p = args[i] + strlen("-j");
1106       int count = 0;
1107       if (*p != '\0') {
1108         // Argument like -j5.
1109         count = atoi(p);
1110       } else if (args.size() > i + 1) {
1111         // Arguments like -j 5.
1112         count = atoi(args[i + 1]);
1113         ++i;
1114       }
1115       if (count <= 0) {
1116         fprintf(stderr, "invalid job count: %d\n", count);
1117         return false;
1118       }
1119       options.job_count = static_cast<size_t>(count);
1120     } else if (strncmp(args[i], "--deadline=", strlen("--deadline=")) == 0) {
1121       int time_ms = atoi(args[i] + strlen("--deadline="));
1122       if (time_ms <= 0) {
1123         fprintf(stderr, "invalid deadline: %d\n", time_ms);
1124         return false;
1125       }
1126       options.test_deadline_ms = time_ms;
1127     } else if (strncmp(args[i], "--slow-threshold=", strlen("--slow-threshold=")) == 0) {
1128       int time_ms = atoi(args[i] + strlen("--slow-threshold="));
1129       if (time_ms <= 0) {
1130         fprintf(stderr, "invalid slow test threshold: %d\n", time_ms);
1131         return false;
1132       }
1133       options.test_slow_threshold_ms = time_ms;
1134     } else if (strncmp(args[i], "--gtest_color=", strlen("--gtest_color=")) == 0) {
1135       options.gtest_color = args[i] + strlen("--gtest_color=");
1136     } else if (strcmp(args[i], "--gtest_print_time=0") == 0) {
1137       options.gtest_print_time = false;
1138     } else if (strncmp(args[i], "--gtest_repeat=", strlen("--gtest_repeat=")) == 0) {
1139       // If the value of gtest_repeat is < 0, then it indicates the tests
1140       // should be repeated forever.
1141       options.gtest_repeat = atoi(args[i] + strlen("--gtest_repeat="));
1142       // Remove --gtest_repeat=xx from arguments, so child process only run one iteration for a single test.
1143       args.erase(args.begin() + i);
1144       --i;
1145     } else if (strncmp(args[i], "--gtest_output=", strlen("--gtest_output=")) == 0) {
1146       std::string output = args[i] + strlen("--gtest_output=");
1147       // generate output xml file path according to the strategy in gtest.
1148       bool success = true;
1149       if (strncmp(output.c_str(), "xml:", strlen("xml:")) == 0) {
1150         output = output.substr(strlen("xml:"));
1151         if (output.size() == 0) {
1152           success = false;
1153         }
1154         // Make absolute path.
1155         if (success && output[0] != '/') {
1156           char* cwd = getcwd(NULL, 0);
1157           if (cwd != NULL) {
1158             output = std::string(cwd) + "/" + output;
1159             free(cwd);
1160           } else {
1161             success = false;
1162           }
1163         }
1164         // Add file name if output is a directory.
1165         if (success && output.back() == '/') {
1166           output += "test_details.xml";
1167         }
1168       }
1169       if (success) {
1170         options.gtest_output = output;
1171       } else {
1172         fprintf(stderr, "invalid gtest_output file: %s\n", args[i]);
1173         return false;
1174       }
1175 
1176       // Remove --gtest_output=xxx from arguments, so child process will not write xml file.
1177       args.erase(args.begin() + i);
1178       --i;
1179     }
1180   }
1181 
1182   // Add --no-isolate in args to prevent child process from running in isolation mode again.
1183   // As DeathTest will try to call execve(), this argument should always be added.
1184   args.insert(args.begin() + 1, strdup("--no-isolate"));
1185   return true;
1186 }
1187 
get_proc_self_exe()1188 static std::string get_proc_self_exe() {
1189   char path[PATH_MAX];
1190   ssize_t path_len = readlink("/proc/self/exe", path, sizeof(path));
1191   if (path_len <= 0 || path_len >= static_cast<ssize_t>(sizeof(path))) {
1192     perror("readlink");
1193     exit(1);
1194   }
1195 
1196   return std::string(path, path_len);
1197 }
1198 
main(int argc,char ** argv,char ** envp)1199 int main(int argc, char** argv, char** envp) {
1200   g_executable_path = get_proc_self_exe();
1201   g_argc = argc;
1202   g_argv = argv;
1203   g_envp = envp;
1204   std::vector<char*> arg_list;
1205   for (int i = 0; i < argc; ++i) {
1206     arg_list.push_back(argv[i]);
1207   }
1208 
1209   IsolationTestOptions options;
1210   if (PickOptions(arg_list, options) == false) {
1211     return 1;
1212   }
1213 
1214   if (options.isolate == true) {
1215     // Set global variables.
1216     global_test_run_deadline_ms = options.test_deadline_ms;
1217     global_test_run_slow_threshold_ms = options.test_slow_threshold_ms;
1218     testing::GTEST_FLAG(color) = options.gtest_color.c_str();
1219     testing::GTEST_FLAG(print_time) = options.gtest_print_time;
1220     std::vector<TestCase> testcase_list;
1221 
1222     argc = static_cast<int>(arg_list.size());
1223     arg_list.push_back(NULL);
1224     if (EnumerateTests(argc, arg_list.data(), testcase_list) == false) {
1225       return 1;
1226     }
1227     bool all_test_passed =  RunTestInSeparateProc(argc, arg_list.data(), testcase_list,
1228                               options.gtest_repeat, options.job_count, options.gtest_output);
1229     return all_test_passed ? 0 : 1;
1230   } else {
1231     argc = static_cast<int>(arg_list.size());
1232     arg_list.push_back(NULL);
1233     testing::InitGoogleTest(&argc, arg_list.data());
1234     return RUN_ALL_TESTS();
1235   }
1236 }
1237 
1238 //################################################################################
1239 // Bionic Gtest self test, run this by --bionic-selftest option.
1240 
TEST(bionic_selftest,test_success)1241 TEST(bionic_selftest, test_success) {
1242   ASSERT_EQ(1, 1);
1243 }
1244 
TEST(bionic_selftest,test_fail)1245 TEST(bionic_selftest, test_fail) {
1246   ASSERT_EQ(0, 1);
1247 }
1248 
TEST(bionic_selftest,test_time_warn)1249 TEST(bionic_selftest, test_time_warn) {
1250   sleep(4);
1251 }
1252 
TEST(bionic_selftest,test_timeout)1253 TEST(bionic_selftest, test_timeout) {
1254   while (1) {}
1255 }
1256 
TEST(bionic_selftest,test_signal_SEGV_terminated)1257 TEST(bionic_selftest, test_signal_SEGV_terminated) {
1258   char* p = reinterpret_cast<char*>(static_cast<intptr_t>(atoi("0")));
1259   *p = 3;
1260 }
1261 
1262 class bionic_selftest_DeathTest : public ::testing::Test {
1263  protected:
SetUp()1264   virtual void SetUp() {
1265     ::testing::FLAGS_gtest_death_test_style = "threadsafe";
1266   }
1267 };
1268 
deathtest_helper_success()1269 static void deathtest_helper_success() {
1270   ASSERT_EQ(1, 1);
1271   exit(0);
1272 }
1273 
TEST_F(bionic_selftest_DeathTest,success)1274 TEST_F(bionic_selftest_DeathTest, success) {
1275   ASSERT_EXIT(deathtest_helper_success(), ::testing::ExitedWithCode(0), "");
1276 }
1277 
deathtest_helper_fail()1278 static void deathtest_helper_fail() {
1279   ASSERT_EQ(1, 0);
1280 }
1281 
TEST_F(bionic_selftest_DeathTest,fail)1282 TEST_F(bionic_selftest_DeathTest, fail) {
1283   ASSERT_EXIT(deathtest_helper_fail(), ::testing::ExitedWithCode(0), "");
1284 }
1285