• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include <gtest/gtest.h>
18 
19 #include <ctype.h>
20 #include <errno.h>
21 #include <fcntl.h>
22 #include <inttypes.h>
23 #include <libgen.h>
24 #include <limits.h>
25 #include <signal.h>
26 #include <stdarg.h>
27 #include <stdio.h>
28 #include <string.h>
29 #include <sys/wait.h>
30 #include <unistd.h>
31 
32 #include <chrono>
33 #include <string>
34 #include <tuple>
35 #include <utility>
36 #include <vector>
37 
38 #ifndef TEMP_FAILURE_RETRY
39 
40 /* Used to retry syscalls that can return EINTR. */
41 #define TEMP_FAILURE_RETRY(exp) ({         \
42     __typeof__(exp) _rc;                   \
43     do {                                   \
44         _rc = (exp);                       \
45     } while (_rc == -1 && errno == EINTR); \
46     _rc; })
47 
48 #endif
49 
50 static std::string g_executable_path;
51 static int g_argc;
52 static char** g_argv;
53 static char** g_envp;
54 
get_executable_path()55 const std::string& get_executable_path() {
56   return g_executable_path;
57 }
58 
get_argc()59 int get_argc() {
60   return g_argc;
61 }
62 
get_argv()63 char** get_argv() {
64   return g_argv;
65 }
66 
get_envp()67 char** get_envp() {
68   return g_envp;
69 }
70 
71 namespace testing {
72 namespace internal {
73 
74 // Reuse of testing::internal::ColoredPrintf in gtest.
75 enum GTestColor {
76   COLOR_DEFAULT,
77   COLOR_RED,
78   COLOR_GREEN,
79   COLOR_YELLOW
80 };
81 
82 void ColoredPrintf(GTestColor color, const char* fmt, ...);
83 
84 }  // namespace internal
85 }  // namespace testing
86 
87 using testing::internal::GTestColor;
88 using testing::internal::COLOR_DEFAULT;
89 using testing::internal::COLOR_RED;
90 using testing::internal::COLOR_GREEN;
91 using testing::internal::COLOR_YELLOW;
92 using testing::internal::ColoredPrintf;
93 
94 constexpr int DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS = 90000;
95 constexpr int DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS = 2000;
96 
97 // The time each test can run before killed for the reason of timeout.
98 // It takes effect only with --isolate option.
99 static int global_test_run_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
100 
101 // The time each test can run before be warned for too much running time.
102 // It takes effect only with --isolate option.
103 static int global_test_run_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
104 
105 // Return timeout duration for a test, in ms.
GetTimeoutMs(const std::string &)106 static int GetTimeoutMs(const std::string& /*test_name*/) {
107   return global_test_run_deadline_ms;
108 }
109 
110 // Return threshold for calling a test slow, in ms.
GetSlowThresholdMs(const std::string &)111 static int GetSlowThresholdMs(const std::string& /*test_name*/) {
112   return global_test_run_slow_threshold_ms;
113 }
114 
PrintHelpInfo()115 static void PrintHelpInfo() {
116   printf("Bionic Unit Test Options:\n"
117          "  -j [JOB_COUNT] or -j[JOB_COUNT]\n"
118          "      Run up to JOB_COUNT tests in parallel.\n"
119          "      Use isolation mode, Run each test in a separate process.\n"
120          "      If JOB_COUNT is not given, it is set to the count of available processors.\n"
121          "  --no-isolate\n"
122          "      Don't use isolation mode, run all tests in a single process.\n"
123          "  --deadline=[TIME_IN_MS]\n"
124          "      Run each test in no longer than [TIME_IN_MS] time.\n"
125          "      Only valid in isolation mode. Default deadline is 90000 ms.\n"
126          "  --slow-threshold=[TIME_IN_MS]\n"
127          "      Test running longer than [TIME_IN_MS] will be called slow.\n"
128          "      Only valid in isolation mode. Default slow threshold is 2000 ms.\n"
129          "  --gtest-filter=POSITIVE_PATTERNS[-NEGATIVE_PATTERNS]\n"
130          "      Used as a synonym for --gtest_filter option in gtest.\n"
131          "Default bionic unit test option is -j.\n"
132          "In isolation mode, you can send SIGQUIT to the parent process to show current\n"
133          "running tests, or send SIGINT to the parent process to stop testing and\n"
134          "clean up current running tests.\n"
135          "\n");
136 }
137 
138 enum TestResult {
139   TEST_SUCCESS = 0,
140   TEST_FAILED,
141   TEST_TIMEOUT
142 };
143 
144 class Test {
145  public:
Test()146   Test() {} // For std::vector<Test>.
Test(const char * name)147   explicit Test(const char* name) : name_(name) {}
148 
GetName() const149   const std::string& GetName() const { return name_; }
150 
SetResult(TestResult result)151   void SetResult(TestResult result) {
152     // Native xfails are inherently likely to actually be relying on undefined
153     // behavior/uninitialized memory, and thus likely to pass from time to time
154     // on CTS. Avoid that unpleasantness by just rewriting all xfail failures
155     // as successes. You'll still see the actual failure details.
156     if (GetName().find("xfail") == 0) result = TEST_SUCCESS;
157     result_ = result;
158   }
159 
GetResult() const160   TestResult GetResult() const { return result_; }
161 
SetTestTime(int64_t elapsed_time_ns)162   void SetTestTime(int64_t elapsed_time_ns) { elapsed_time_ns_ = elapsed_time_ns; }
163 
GetTestTime() const164   int64_t GetTestTime() const { return elapsed_time_ns_; }
165 
AppendTestOutput(const std::string & s)166   void AppendTestOutput(const std::string& s) { output_ += s; }
167 
GetTestOutput() const168   const std::string& GetTestOutput() const { return output_; }
169 
170  private:
171   const std::string name_;
172   TestResult result_;
173   int64_t elapsed_time_ns_;
174   std::string output_;
175 };
176 
177 class TestCase {
178  public:
TestCase()179   TestCase() {} // For std::vector<TestCase>.
TestCase(const char * name)180   explicit TestCase(const char* name) : name_(name) {}
181 
GetName() const182   const std::string& GetName() const { return name_; }
183 
AppendTest(const char * test_name)184   void AppendTest(const char* test_name) {
185     test_list_.push_back(Test(test_name));
186   }
187 
TestCount() const188   size_t TestCount() const { return test_list_.size(); }
189 
GetTestName(size_t test_id) const190   std::string GetTestName(size_t test_id) const {
191     VerifyTestId(test_id);
192     return name_ + "." + test_list_[test_id].GetName();
193   }
194 
GetTest(size_t test_id)195   Test& GetTest(size_t test_id) {
196     VerifyTestId(test_id);
197     return test_list_[test_id];
198   }
199 
GetTest(size_t test_id) const200   const Test& GetTest(size_t test_id) const {
201     VerifyTestId(test_id);
202     return test_list_[test_id];
203   }
204 
SetTestResult(size_t test_id,TestResult result)205   void SetTestResult(size_t test_id, TestResult result) {
206     VerifyTestId(test_id);
207     test_list_[test_id].SetResult(result);
208   }
209 
GetTestResult(size_t test_id) const210   TestResult GetTestResult(size_t test_id) const {
211     VerifyTestId(test_id);
212     return test_list_[test_id].GetResult();
213   }
214 
GetTestSuccess(size_t test_id) const215   bool GetTestSuccess(size_t test_id) const {
216     return GetTestResult(test_id) == TEST_SUCCESS;
217   }
218 
SetTestTime(size_t test_id,int64_t elapsed_time_ns)219   void SetTestTime(size_t test_id, int64_t elapsed_time_ns) {
220     VerifyTestId(test_id);
221     test_list_[test_id].SetTestTime(elapsed_time_ns);
222   }
223 
GetTestTime(size_t test_id) const224   int64_t GetTestTime(size_t test_id) const {
225     VerifyTestId(test_id);
226     return test_list_[test_id].GetTestTime();
227   }
228 
229  private:
VerifyTestId(size_t test_id) const230   void VerifyTestId(size_t test_id) const {
231     if(test_id >= test_list_.size()) {
232       fprintf(stderr, "test_id %zu out of range [0, %zu)\n", test_id, test_list_.size());
233       exit(1);
234     }
235   }
236 
237  private:
238   const std::string name_;
239   std::vector<Test> test_list_;
240 };
241 
242 class TestResultPrinter : public testing::EmptyTestEventListener {
243  public:
TestResultPrinter()244   TestResultPrinter() : pinfo_(NULL) {}
OnTestStart(const testing::TestInfo & test_info)245   virtual void OnTestStart(const testing::TestInfo& test_info) {
246     pinfo_ = &test_info; // Record test_info for use in OnTestPartResult.
247   }
248   virtual void OnTestPartResult(const testing::TestPartResult& result);
249 
250  private:
251   const testing::TestInfo* pinfo_;
252 };
253 
254 // Called after an assertion failure.
OnTestPartResult(const testing::TestPartResult & result)255 void TestResultPrinter::OnTestPartResult(const testing::TestPartResult& result) {
256   // If the test part succeeded, we don't need to do anything.
257   if (result.type() == testing::TestPartResult::kSuccess)
258     return;
259 
260   // Print failure message from the assertion (e.g. expected this and got that).
261   printf("%s:(%d) Failure in test %s.%s\n%s\n", result.file_name(), result.line_number(),
262          pinfo_->test_case_name(), pinfo_->name(), result.message());
263   fflush(stdout);
264 }
265 
NanoTime()266 static int64_t NanoTime() {
267   std::chrono::nanoseconds duration(std::chrono::steady_clock::now().time_since_epoch());
268   return static_cast<int64_t>(duration.count());
269 }
270 
EnumerateTests(int argc,char ** argv,std::vector<TestCase> & testcase_list)271 static bool EnumerateTests(int argc, char** argv, std::vector<TestCase>& testcase_list) {
272   std::string command;
273   for (int i = 0; i < argc; ++i) {
274     command += argv[i];
275     command += " ";
276   }
277   command += "--gtest_list_tests";
278   FILE* fp = popen(command.c_str(), "r");
279   if (fp == NULL) {
280     perror("popen");
281     return false;
282   }
283 
284   char buf[200];
285   while (fgets(buf, sizeof(buf), fp) != NULL) {
286     char* p = buf;
287 
288     while (*p != '\0' && isspace(*p)) {
289       ++p;
290     }
291     if (*p == '\0') continue;
292     char* start = p;
293     while (*p != '\0' && !isspace(*p)) {
294       ++p;
295     }
296     char* end = p;
297     while (*p != '\0' && isspace(*p)) {
298       ++p;
299     }
300     if (*p != '\0' && *p != '#') {
301       // This is not we want, gtest must meet with some error when parsing the arguments.
302       fprintf(stderr, "argument error, check with --help\n");
303       return false;
304     }
305     *end = '\0';
306     if (*(end - 1) == '.') {
307       *(end - 1) = '\0';
308       testcase_list.push_back(TestCase(start));
309     } else {
310       testcase_list.back().AppendTest(start);
311     }
312   }
313   int result = pclose(fp);
314   return (result != -1 && WEXITSTATUS(result) == 0);
315 }
316 
317 // Part of the following *Print functions are copied from external/gtest/src/gtest.cc:
318 // PrettyUnitTestResultPrinter. The reason for copy is that PrettyUnitTestResultPrinter
319 // is defined and used in gtest.cc, which is hard to reuse.
OnTestIterationStartPrint(const std::vector<TestCase> & testcase_list,size_t iteration,int iteration_count,size_t job_count)320 static void OnTestIterationStartPrint(const std::vector<TestCase>& testcase_list, size_t iteration,
321                                       int iteration_count, size_t job_count) {
322   if (iteration_count != 1) {
323     printf("\nRepeating all tests (iteration %zu) . . .\n\n", iteration);
324   }
325   ColoredPrintf(COLOR_GREEN,  "[==========] ");
326 
327   size_t testcase_count = testcase_list.size();
328   size_t test_count = 0;
329   for (const auto& testcase : testcase_list) {
330     test_count += testcase.TestCount();
331   }
332 
333   printf("Running %zu %s from %zu %s (%zu %s).\n",
334          test_count, (test_count == 1) ? "test" : "tests",
335          testcase_count, (testcase_count == 1) ? "test case" : "test cases",
336          job_count, (job_count == 1) ? "job" : "jobs");
337   fflush(stdout);
338 }
339 
340 // bionic cts test needs gtest output format.
341 #if defined(USING_GTEST_OUTPUT_FORMAT)
342 
OnTestEndPrint(const TestCase & testcase,size_t test_id)343 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
344   ColoredPrintf(COLOR_GREEN, "[ RUN      ] ");
345   printf("%s\n", testcase.GetTestName(test_id).c_str());
346 
347   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
348   printf("%s", test_output.c_str());
349 
350   TestResult result = testcase.GetTestResult(test_id);
351   if (result == TEST_SUCCESS) {
352     ColoredPrintf(COLOR_GREEN, "[       OK ] ");
353   } else {
354     ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
355   }
356   printf("%s", testcase.GetTestName(test_id).c_str());
357   if (testing::GTEST_FLAG(print_time)) {
358     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
359   }
360   printf("\n");
361   fflush(stdout);
362 }
363 
364 #else  // !defined(USING_GTEST_OUTPUT_FORMAT)
365 
OnTestEndPrint(const TestCase & testcase,size_t test_id)366 static void OnTestEndPrint(const TestCase& testcase, size_t test_id) {
367   TestResult result = testcase.GetTestResult(test_id);
368   if (result == TEST_SUCCESS) {
369     ColoredPrintf(COLOR_GREEN, "[    OK    ] ");
370   } else if (result == TEST_FAILED) {
371     ColoredPrintf(COLOR_RED, "[  FAILED  ] ");
372   } else if (result == TEST_TIMEOUT) {
373     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
374   }
375 
376   printf("%s", testcase.GetTestName(test_id).c_str());
377   if (testing::GTEST_FLAG(print_time)) {
378     printf(" (%" PRId64 " ms)", testcase.GetTestTime(test_id) / 1000000);
379   }
380   printf("\n");
381 
382   const std::string& test_output = testcase.GetTest(test_id).GetTestOutput();
383   printf("%s", test_output.c_str());
384   fflush(stdout);
385 }
386 
387 #endif  // !defined(USING_GTEST_OUTPUT_FORMAT)
388 
OnTestIterationEndPrint(const std::vector<TestCase> & testcase_list,size_t,int64_t elapsed_time_ns)389 static void OnTestIterationEndPrint(const std::vector<TestCase>& testcase_list, size_t /*iteration*/,
390                                     int64_t elapsed_time_ns) {
391 
392   std::vector<std::string> fail_test_name_list;
393   std::vector<std::pair<std::string, int64_t>> timeout_test_list;
394 
395   // For tests that were slow but didn't time out.
396   std::vector<std::tuple<std::string, int64_t, int>> slow_test_list;
397   size_t testcase_count = testcase_list.size();
398   size_t test_count = 0;
399   size_t success_test_count = 0;
400   size_t expected_failure_count = 0;
401 
402   for (const auto& testcase : testcase_list) {
403     test_count += testcase.TestCount();
404     for (size_t i = 0; i < testcase.TestCount(); ++i) {
405       TestResult result = testcase.GetTestResult(i);
406       if (result == TEST_TIMEOUT) {
407         timeout_test_list.push_back(
408             std::make_pair(testcase.GetTestName(i), testcase.GetTestTime(i)));
409       } else if (result == TEST_SUCCESS) {
410         ++success_test_count;
411         if (testcase.GetTestName(i).find(".xfail_") != std::string::npos) ++expected_failure_count;
412       } else if (result == TEST_FAILED) {
413           fail_test_name_list.push_back(testcase.GetTestName(i));
414       }
415       if (result != TEST_TIMEOUT &&
416           testcase.GetTestTime(i) / 1000000 >= GetSlowThresholdMs(testcase.GetTestName(i))) {
417         slow_test_list.push_back(std::make_tuple(testcase.GetTestName(i),
418                                                  testcase.GetTestTime(i),
419                                                  GetSlowThresholdMs(testcase.GetTestName(i))));
420       }
421     }
422   }
423 
424   ColoredPrintf(COLOR_GREEN,  "[==========] ");
425   printf("%zu %s from %zu %s ran.", test_count, (test_count == 1) ? "test" : "tests",
426                                     testcase_count, (testcase_count == 1) ? "test case" : "test cases");
427   if (testing::GTEST_FLAG(print_time)) {
428     printf(" (%" PRId64 " ms total)", elapsed_time_ns / 1000000);
429   }
430   printf("\n");
431   ColoredPrintf(COLOR_GREEN,  "[   PASS   ] ");
432   printf("%zu %s.", success_test_count, (success_test_count == 1) ? "test" : "tests");
433   if (expected_failure_count > 0) {
434     printf(" (%zu expected failure%s.)", expected_failure_count,
435            (expected_failure_count == 1) ? "" : "s");
436   }
437   printf("\n");
438 
439   // Print tests that timed out.
440   size_t timeout_test_count = timeout_test_list.size();
441   if (timeout_test_count > 0) {
442     ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
443     printf("%zu %s, listed below:\n", timeout_test_count, (timeout_test_count == 1) ? "test" : "tests");
444     for (const auto& timeout_pair : timeout_test_list) {
445       ColoredPrintf(COLOR_RED, "[ TIMEOUT  ] ");
446       printf("%s (stopped at %" PRId64 " ms)\n", timeout_pair.first.c_str(),
447                                                  timeout_pair.second / 1000000);
448     }
449   }
450 
451   // Print tests that were slow.
452   size_t slow_test_count = slow_test_list.size();
453   if (slow_test_count > 0) {
454     ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
455     printf("%zu %s, listed below:\n", slow_test_count, (slow_test_count == 1) ? "test" : "tests");
456     for (const auto& slow_tuple : slow_test_list) {
457       ColoredPrintf(COLOR_YELLOW, "[   SLOW   ] ");
458       printf("%s (%" PRId64 " ms, exceeded %d ms)\n", std::get<0>(slow_tuple).c_str(),
459              std::get<1>(slow_tuple) / 1000000, std::get<2>(slow_tuple));
460     }
461   }
462 
463   // Print tests that failed.
464   size_t fail_test_count = fail_test_name_list.size();
465   if (fail_test_count > 0) {
466     ColoredPrintf(COLOR_RED,  "[   FAIL   ] ");
467     printf("%zu %s, listed below:\n", fail_test_count, (fail_test_count == 1) ? "test" : "tests");
468     for (const auto& name : fail_test_name_list) {
469       ColoredPrintf(COLOR_RED, "[   FAIL   ] ");
470       printf("%s\n", name.c_str());
471     }
472   }
473 
474   if (timeout_test_count > 0 || slow_test_count > 0 || fail_test_count > 0) {
475     printf("\n");
476   }
477 
478   if (timeout_test_count > 0) {
479     printf("%2zu TIMEOUT %s\n", timeout_test_count, (timeout_test_count == 1) ? "TEST" : "TESTS");
480   }
481   if (slow_test_count > 0) {
482     printf("%2zu SLOW %s\n", slow_test_count, (slow_test_count == 1) ? "TEST" : "TESTS");
483   }
484   if (fail_test_count > 0) {
485     printf("%2zu FAILED %s\n", fail_test_count, (fail_test_count == 1) ? "TEST" : "TESTS");
486   }
487 
488   fflush(stdout);
489 }
490 
XmlEscape(const std::string & xml)491 std::string XmlEscape(const std::string& xml) {
492   std::string escaped;
493   escaped.reserve(xml.size());
494 
495   for (auto c : xml) {
496     switch (c) {
497     case '<':
498       escaped.append("&lt;");
499       break;
500     case '>':
501       escaped.append("&gt;");
502       break;
503     case '&':
504       escaped.append("&amp;");
505       break;
506     case '\'':
507       escaped.append("&apos;");
508       break;
509     case '"':
510       escaped.append("&quot;");
511       break;
512     default:
513       escaped.append(1, c);
514       break;
515     }
516   }
517 
518   return escaped;
519 }
520 
521 // Output xml file when --gtest_output is used, write this function as we can't reuse
522 // gtest.cc:XmlUnitTestResultPrinter. The reason is XmlUnitTestResultPrinter is totally
523 // defined in gtest.cc and not expose to outside. What's more, as we don't run gtest in
524 // the parent process, we don't have gtest classes which are needed by XmlUnitTestResultPrinter.
OnTestIterationEndXmlPrint(const std::string & xml_output_filename,const std::vector<TestCase> & testcase_list,time_t epoch_iteration_start_time,int64_t elapsed_time_ns)525 void OnTestIterationEndXmlPrint(const std::string& xml_output_filename,
526                                 const std::vector<TestCase>& testcase_list,
527                                 time_t epoch_iteration_start_time,
528                                 int64_t elapsed_time_ns) {
529   FILE* fp = fopen(xml_output_filename.c_str(), "w");
530   if (fp == NULL) {
531     fprintf(stderr, "failed to open '%s': %s\n", xml_output_filename.c_str(), strerror(errno));
532     exit(1);
533   }
534 
535   size_t total_test_count = 0;
536   size_t total_failed_count = 0;
537   std::vector<size_t> failed_count_list(testcase_list.size(), 0);
538   std::vector<int64_t> elapsed_time_list(testcase_list.size(), 0);
539   for (size_t i = 0; i < testcase_list.size(); ++i) {
540     auto& testcase = testcase_list[i];
541     total_test_count += testcase.TestCount();
542     for (size_t j = 0; j < testcase.TestCount(); ++j) {
543       if (!testcase.GetTestSuccess(j)) {
544         ++failed_count_list[i];
545       }
546       elapsed_time_list[i] += testcase.GetTestTime(j);
547     }
548     total_failed_count += failed_count_list[i];
549   }
550 
551   const tm* time_struct = localtime(&epoch_iteration_start_time);
552   char timestamp[40];
553   snprintf(timestamp, sizeof(timestamp), "%4d-%02d-%02dT%02d:%02d:%02d",
554            time_struct->tm_year + 1900, time_struct->tm_mon + 1, time_struct->tm_mday,
555            time_struct->tm_hour, time_struct->tm_min, time_struct->tm_sec);
556 
557   fputs("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n", fp);
558   fprintf(fp, "<testsuites tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
559           total_test_count, total_failed_count);
560   fprintf(fp, " timestamp=\"%s\" time=\"%.3lf\" name=\"AllTests\">\n", timestamp, elapsed_time_ns / 1e9);
561   for (size_t i = 0; i < testcase_list.size(); ++i) {
562     auto& testcase = testcase_list[i];
563     fprintf(fp, "  <testsuite name=\"%s\" tests=\"%zu\" failures=\"%zu\" disabled=\"0\" errors=\"0\"",
564             testcase.GetName().c_str(), testcase.TestCount(), failed_count_list[i]);
565     fprintf(fp, " time=\"%.3lf\">\n", elapsed_time_list[i] / 1e9);
566 
567     for (size_t j = 0; j < testcase.TestCount(); ++j) {
568       fprintf(fp, "    <testcase name=\"%s\" status=\"run\" time=\"%.3lf\" classname=\"%s\"",
569               testcase.GetTest(j).GetName().c_str(), testcase.GetTestTime(j) / 1e9,
570               testcase.GetName().c_str());
571       if (!testcase.GetTestSuccess(j)) {
572         fputs(" />\n", fp);
573       } else {
574         fputs(">\n", fp);
575         const std::string& test_output = testcase.GetTest(j).GetTestOutput();
576         const std::string escaped_test_output = XmlEscape(test_output);
577         fprintf(fp, "      <failure message=\"%s\" type=\"\">\n", escaped_test_output.c_str());
578         fputs("      </failure>\n", fp);
579         fputs("    </testcase>\n", fp);
580       }
581     }
582 
583     fputs("  </testsuite>\n", fp);
584   }
585   fputs("</testsuites>\n", fp);
586   fclose(fp);
587 }
588 
589 static bool sigint_flag;
590 static bool sigquit_flag;
591 
signal_handler(int sig)592 static void signal_handler(int sig) {
593   if (sig == SIGINT) {
594     sigint_flag = true;
595   } else if (sig == SIGQUIT) {
596     sigquit_flag = true;
597   }
598 }
599 
RegisterSignalHandler()600 static bool RegisterSignalHandler() {
601   sigint_flag = false;
602   sigquit_flag = false;
603   sig_t ret = signal(SIGINT, signal_handler);
604   if (ret != SIG_ERR) {
605     ret = signal(SIGQUIT, signal_handler);
606   }
607   if (ret == SIG_ERR) {
608     perror("RegisterSignalHandler");
609     return false;
610   }
611   return true;
612 }
613 
UnregisterSignalHandler()614 static bool UnregisterSignalHandler() {
615   sig_t ret = signal(SIGINT, SIG_DFL);
616   if (ret != SIG_ERR) {
617     ret = signal(SIGQUIT, SIG_DFL);
618   }
619   if (ret == SIG_ERR) {
620     perror("UnregisterSignalHandler");
621     return false;
622   }
623   return true;
624 }
625 
626 struct ChildProcInfo {
627   pid_t pid;
628   int64_t start_time_ns;
629   int64_t end_time_ns;
630   int64_t deadline_end_time_ns; // The time when the test is thought of as timeout.
631   size_t testcase_id, test_id;
632   bool finished;
633   bool timed_out;
634   int exit_status;
635   int child_read_fd; // File descriptor to read child test failure info.
636 };
637 
638 // Forked Child process, run the single test.
ChildProcessFn(int argc,char ** argv,const std::string & test_name)639 static void ChildProcessFn(int argc, char** argv, const std::string& test_name) {
640   char** new_argv = new char*[argc + 2];
641   memcpy(new_argv, argv, sizeof(char*) * argc);
642 
643   char* filter_arg = new char [test_name.size() + 20];
644   strcpy(filter_arg, "--gtest_filter=");
645   strcat(filter_arg, test_name.c_str());
646   new_argv[argc] = filter_arg;
647   new_argv[argc + 1] = NULL;
648 
649   int new_argc = argc + 1;
650   testing::InitGoogleTest(&new_argc, new_argv);
651   int result = RUN_ALL_TESTS();
652   exit(result);
653 }
654 
RunChildProcess(const std::string & test_name,int testcase_id,int test_id,int argc,char ** argv)655 static ChildProcInfo RunChildProcess(const std::string& test_name, int testcase_id, int test_id,
656                                      int argc, char** argv) {
657   int pipefd[2];
658   if (pipe(pipefd) == -1) {
659     perror("pipe in RunTestInSeparateProc");
660     exit(1);
661   }
662   if (fcntl(pipefd[0], F_SETFL, O_NONBLOCK) == -1) {
663     perror("fcntl in RunTestInSeparateProc");
664     exit(1);
665   }
666   pid_t pid = fork();
667   if (pid == -1) {
668     perror("fork in RunTestInSeparateProc");
669     exit(1);
670   } else if (pid == 0) {
671     // In child process, run a single test.
672     close(pipefd[0]);
673     close(STDOUT_FILENO);
674     close(STDERR_FILENO);
675     dup2(pipefd[1], STDOUT_FILENO);
676     dup2(pipefd[1], STDERR_FILENO);
677 
678     if (!UnregisterSignalHandler()) {
679       exit(1);
680     }
681     ChildProcessFn(argc, argv, test_name);
682     // Unreachable.
683   }
684   // In parent process, initialize child process info.
685   close(pipefd[1]);
686   ChildProcInfo child_proc;
687   child_proc.child_read_fd = pipefd[0];
688   child_proc.pid = pid;
689   child_proc.start_time_ns = NanoTime();
690   child_proc.deadline_end_time_ns = child_proc.start_time_ns + GetTimeoutMs(test_name) * 1000000LL;
691   child_proc.testcase_id = testcase_id;
692   child_proc.test_id = test_id;
693   child_proc.finished = false;
694   return child_proc;
695 }
696 
HandleSignals(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)697 static void HandleSignals(std::vector<TestCase>& testcase_list,
698                             std::vector<ChildProcInfo>& child_proc_list) {
699   if (sigquit_flag) {
700     sigquit_flag = false;
701     // Print current running tests.
702     printf("List of current running tests:\n");
703     for (const auto& child_proc : child_proc_list) {
704       if (child_proc.pid != 0) {
705         std::string test_name = testcase_list[child_proc.testcase_id].GetTestName(child_proc.test_id);
706         int64_t current_time_ns = NanoTime();
707         int64_t run_time_ms = (current_time_ns - child_proc.start_time_ns) / 1000000;
708         printf("  %s (%" PRId64 " ms)\n", test_name.c_str(), run_time_ms);
709       }
710     }
711   } else if (sigint_flag) {
712     sigint_flag = false;
713     // Kill current running tests.
714     for (const auto& child_proc : child_proc_list) {
715       if (child_proc.pid != 0) {
716         // Send SIGKILL to ensure the child process can be killed unconditionally.
717         kill(child_proc.pid, SIGKILL);
718       }
719     }
720     // SIGINT kills the parent process as well.
721     exit(1);
722   }
723 }
724 
CheckChildProcExit(pid_t exit_pid,int exit_status,std::vector<ChildProcInfo> & child_proc_list)725 static bool CheckChildProcExit(pid_t exit_pid, int exit_status,
726                                std::vector<ChildProcInfo>& child_proc_list) {
727   for (size_t i = 0; i < child_proc_list.size(); ++i) {
728     if (child_proc_list[i].pid == exit_pid) {
729       child_proc_list[i].finished = true;
730       child_proc_list[i].timed_out = false;
731       child_proc_list[i].exit_status = exit_status;
732       child_proc_list[i].end_time_ns = NanoTime();
733       return true;
734     }
735   }
736   return false;
737 }
738 
CheckChildProcTimeout(std::vector<ChildProcInfo> & child_proc_list)739 static size_t CheckChildProcTimeout(std::vector<ChildProcInfo>& child_proc_list) {
740   int64_t current_time_ns = NanoTime();
741   size_t timeout_child_count = 0;
742   for (size_t i = 0; i < child_proc_list.size(); ++i) {
743     if (child_proc_list[i].deadline_end_time_ns <= current_time_ns) {
744       child_proc_list[i].finished = true;
745       child_proc_list[i].timed_out = true;
746       child_proc_list[i].end_time_ns = current_time_ns;
747       ++timeout_child_count;
748     }
749   }
750   return timeout_child_count;
751 }
752 
ReadChildProcOutput(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)753 static void ReadChildProcOutput(std::vector<TestCase>& testcase_list,
754                                 std::vector<ChildProcInfo>& child_proc_list) {
755   for (const auto& child_proc : child_proc_list) {
756     TestCase& testcase = testcase_list[child_proc.testcase_id];
757     int test_id = child_proc.test_id;
758     while (true) {
759       char buf[1024];
760       ssize_t bytes_read = TEMP_FAILURE_RETRY(read(child_proc.child_read_fd, buf, sizeof(buf) - 1));
761       if (bytes_read > 0) {
762         buf[bytes_read] = '\0';
763         testcase.GetTest(test_id).AppendTestOutput(buf);
764       } else if (bytes_read == 0) {
765         break; // Read end.
766       } else {
767         if (errno == EAGAIN) {
768           break;
769         }
770         perror("failed to read child_read_fd");
771         exit(1);
772       }
773     }
774   }
775 }
776 
WaitChildProcs(std::vector<TestCase> & testcase_list,std::vector<ChildProcInfo> & child_proc_list)777 static void WaitChildProcs(std::vector<TestCase>& testcase_list,
778                            std::vector<ChildProcInfo>& child_proc_list) {
779   size_t finished_child_count = 0;
780   while (true) {
781     int status;
782     pid_t result;
783     while ((result = TEMP_FAILURE_RETRY(waitpid(-1, &status, WNOHANG))) > 0) {
784       if (CheckChildProcExit(result, status, child_proc_list)) {
785         ++finished_child_count;
786       }
787     }
788 
789     if (result == -1) {
790       if (errno == ECHILD) {
791         // This happens when we have no running child processes.
792         return;
793       } else {
794         perror("waitpid");
795         exit(1);
796       }
797     } else if (result == 0) {
798       finished_child_count += CheckChildProcTimeout(child_proc_list);
799     }
800 
801     ReadChildProcOutput(testcase_list, child_proc_list);
802     if (finished_child_count > 0) {
803       return;
804     }
805 
806     HandleSignals(testcase_list, child_proc_list);
807 
808     // sleep 1 ms to avoid busy looping.
809     timespec sleep_time;
810     sleep_time.tv_sec = 0;
811     sleep_time.tv_nsec = 1000000;
812     nanosleep(&sleep_time, NULL);
813   }
814 }
815 
WaitForOneChild(pid_t pid)816 static TestResult WaitForOneChild(pid_t pid) {
817   int exit_status;
818   pid_t result = TEMP_FAILURE_RETRY(waitpid(pid, &exit_status, 0));
819 
820   TestResult test_result = TEST_SUCCESS;
821   if (result != pid || WEXITSTATUS(exit_status) != 0) {
822     test_result = TEST_FAILED;
823   }
824   return test_result;
825 }
826 
CollectChildTestResult(const ChildProcInfo & child_proc,TestCase & testcase)827 static void CollectChildTestResult(const ChildProcInfo& child_proc, TestCase& testcase) {
828   int test_id = child_proc.test_id;
829   testcase.SetTestTime(test_id, child_proc.end_time_ns - child_proc.start_time_ns);
830   if (child_proc.timed_out) {
831     // The child process marked as timed_out has not exited, and we should kill it manually.
832     kill(child_proc.pid, SIGKILL);
833     WaitForOneChild(child_proc.pid);
834   }
835   close(child_proc.child_read_fd);
836 
837   if (child_proc.timed_out) {
838     testcase.SetTestResult(test_id, TEST_TIMEOUT);
839     char buf[1024];
840     snprintf(buf, sizeof(buf), "%s killed because of timeout at %" PRId64 " ms.\n",
841              testcase.GetTestName(test_id).c_str(), testcase.GetTestTime(test_id) / 1000000);
842     testcase.GetTest(test_id).AppendTestOutput(buf);
843 
844   } else if (WIFSIGNALED(child_proc.exit_status)) {
845     // Record signal terminated test as failed.
846     testcase.SetTestResult(test_id, TEST_FAILED);
847     char buf[1024];
848     snprintf(buf, sizeof(buf), "%s terminated by signal: %s.\n",
849              testcase.GetTestName(test_id).c_str(), strsignal(WTERMSIG(child_proc.exit_status)));
850     testcase.GetTest(test_id).AppendTestOutput(buf);
851 
852   } else {
853     int exitcode = WEXITSTATUS(child_proc.exit_status);
854     testcase.SetTestResult(test_id, exitcode == 0 ? TEST_SUCCESS : TEST_FAILED);
855     if (exitcode != 0) {
856       char buf[1024];
857       snprintf(buf, sizeof(buf), "%s exited with exitcode %d.\n",
858                testcase.GetTestName(test_id).c_str(), exitcode);
859       testcase.GetTest(test_id).AppendTestOutput(buf);
860     }
861   }
862 }
863 
864 // We choose to use multi-fork and multi-wait here instead of multi-thread, because it always
865 // makes deadlock to use fork in multi-thread.
866 // Returns true if all tests run successfully, otherwise return false.
RunTestInSeparateProc(int argc,char ** argv,std::vector<TestCase> & testcase_list,int iteration_count,size_t job_count,const std::string & xml_output_filename)867 static bool RunTestInSeparateProc(int argc, char** argv, std::vector<TestCase>& testcase_list,
868                                   int iteration_count, size_t job_count,
869                                   const std::string& xml_output_filename) {
870   // Stop default result printer to avoid environment setup/teardown information for each test.
871   testing::UnitTest::GetInstance()->listeners().Release(
872                         testing::UnitTest::GetInstance()->listeners().default_result_printer());
873   testing::UnitTest::GetInstance()->listeners().Append(new TestResultPrinter);
874 
875   if (!RegisterSignalHandler()) {
876     exit(1);
877   }
878 
879   bool all_tests_passed = true;
880 
881   for (size_t iteration = 1;
882        iteration_count < 0 || iteration <= static_cast<size_t>(iteration_count);
883        ++iteration) {
884     OnTestIterationStartPrint(testcase_list, iteration, iteration_count, job_count);
885     int64_t iteration_start_time_ns = NanoTime();
886     time_t epoch_iteration_start_time = time(NULL);
887 
888     // Run up to job_count tests in parallel, each test in a child process.
889     std::vector<ChildProcInfo> child_proc_list;
890 
891     // Next test to run is [next_testcase_id:next_test_id].
892     size_t next_testcase_id = 0;
893     size_t next_test_id = 0;
894 
895     // Record how many tests are finished.
896     std::vector<size_t> finished_test_count_list(testcase_list.size(), 0);
897     size_t finished_testcase_count = 0;
898 
899     while (finished_testcase_count < testcase_list.size()) {
900       // run up to job_count child processes.
901       while (child_proc_list.size() < job_count && next_testcase_id < testcase_list.size()) {
902         std::string test_name = testcase_list[next_testcase_id].GetTestName(next_test_id);
903         ChildProcInfo child_proc = RunChildProcess(test_name, next_testcase_id, next_test_id,
904                                                    argc, argv);
905         child_proc_list.push_back(child_proc);
906         if (++next_test_id == testcase_list[next_testcase_id].TestCount()) {
907           next_test_id = 0;
908           ++next_testcase_id;
909         }
910       }
911 
912       // Wait for any child proc finish or timeout.
913       WaitChildProcs(testcase_list, child_proc_list);
914 
915       // Collect result.
916       auto it = child_proc_list.begin();
917       while (it != child_proc_list.end()) {
918         auto& child_proc = *it;
919         if (child_proc.finished == true) {
920           size_t testcase_id = child_proc.testcase_id;
921           size_t test_id = child_proc.test_id;
922           TestCase& testcase = testcase_list[testcase_id];
923 
924           CollectChildTestResult(child_proc, testcase);
925           OnTestEndPrint(testcase, test_id);
926 
927           if (++finished_test_count_list[testcase_id] == testcase.TestCount()) {
928             ++finished_testcase_count;
929           }
930           if (!testcase.GetTestSuccess(test_id)) {
931             all_tests_passed = false;
932           }
933 
934           it = child_proc_list.erase(it);
935         } else {
936           ++it;
937         }
938       }
939     }
940 
941     int64_t elapsed_time_ns = NanoTime() - iteration_start_time_ns;
942     OnTestIterationEndPrint(testcase_list, iteration, elapsed_time_ns);
943     if (!xml_output_filename.empty()) {
944       OnTestIterationEndXmlPrint(xml_output_filename, testcase_list, epoch_iteration_start_time,
945                                  elapsed_time_ns);
946     }
947   }
948 
949   if (!UnregisterSignalHandler()) {
950     exit(1);
951   }
952 
953   return all_tests_passed;
954 }
955 
GetDefaultJobCount()956 static size_t GetDefaultJobCount() {
957   return static_cast<size_t>(sysconf(_SC_NPROCESSORS_ONLN));
958 }
959 
AddPathSeparatorInTestProgramPath(std::vector<char * > & args)960 static void AddPathSeparatorInTestProgramPath(std::vector<char*>& args) {
961   // To run DeathTest in threadsafe mode, gtest requires that the user must invoke the
962   // test program via a valid path that contains at least one path separator.
963   // The reason is that gtest uses clone() + execve() to run DeathTest in threadsafe mode,
964   // and execve() doesn't read environment variable PATH, so execve() will not success
965   // until we specify the absolute path or relative path of the test program directly.
966   if (strchr(args[0], '/') == nullptr) {
967     args[0] = strdup(g_executable_path.c_str());
968   }
969 }
970 
AddGtestFilterSynonym(std::vector<char * > & args)971 static void AddGtestFilterSynonym(std::vector<char*>& args) {
972   // Support --gtest-filter as a synonym for --gtest_filter.
973   for (size_t i = 1; i < args.size(); ++i) {
974     if (strncmp(args[i], "--gtest-filter", strlen("--gtest-filter")) == 0) {
975       args[i][7] = '_';
976     }
977   }
978 }
979 
980 struct IsolationTestOptions {
981   bool isolate;
982   size_t job_count;
983   int test_deadline_ms;
984   int test_slow_threshold_ms;
985   std::string gtest_color;
986   bool gtest_print_time;
987   int gtest_repeat;
988   std::string gtest_output;
989 };
990 
991 // Pick options not for gtest: There are two parts in args, one part is used in isolation test mode
992 // as described in PrintHelpInfo(), the other part is handled by testing::InitGoogleTest() in
993 // gtest. PickOptions() picks the first part into IsolationTestOptions structure, leaving the second
994 // part in args.
995 // Arguments:
996 //   args is used to pass in all command arguments, and pass out only the part of options for gtest.
997 //   options is used to pass out test options in isolation mode.
998 // Return false if there is error in arguments.
PickOptions(std::vector<char * > & args,IsolationTestOptions & options)999 static bool PickOptions(std::vector<char*>& args, IsolationTestOptions& options) {
1000   for (size_t i = 1; i < args.size(); ++i) {
1001     if (strcmp(args[i], "--help") == 0 || strcmp(args[i], "-h") == 0) {
1002       PrintHelpInfo();
1003       options.isolate = false;
1004       return true;
1005     }
1006   }
1007 
1008   AddPathSeparatorInTestProgramPath(args);
1009   AddGtestFilterSynonym(args);
1010 
1011   // if --bionic-selftest argument is used, only enable self tests, otherwise remove self tests.
1012   bool enable_selftest = false;
1013   for (size_t i = 1; i < args.size(); ++i) {
1014     if (strcmp(args[i], "--bionic-selftest") == 0) {
1015       // This argument is to enable "bionic_selftest*" for self test, and is not shown in help info.
1016       // Don't remove this option from arguments.
1017       enable_selftest = true;
1018     }
1019   }
1020   std::string gtest_filter_str;
1021   for (size_t i = args.size() - 1; i >= 1; --i) {
1022     if (strncmp(args[i], "--gtest_filter=", strlen("--gtest_filter=")) == 0) {
1023       gtest_filter_str = args[i] + strlen("--gtest_filter=");
1024       args.erase(args.begin() + i);
1025       break;
1026     }
1027   }
1028   if (enable_selftest == true) {
1029     gtest_filter_str = "bionic_selftest*";
1030   } else {
1031     if (gtest_filter_str.empty()) {
1032       gtest_filter_str = "-bionic_selftest*";
1033     } else {
1034       // Find if '-' for NEGATIVE_PATTERNS exists.
1035       if (gtest_filter_str.find('-') != std::string::npos) {
1036         gtest_filter_str += ":bionic_selftest*";
1037       } else {
1038         gtest_filter_str += ":-bionic_selftest*";
1039       }
1040     }
1041   }
1042   gtest_filter_str = "--gtest_filter=" + gtest_filter_str;
1043   args.push_back(strdup(gtest_filter_str.c_str()));
1044 
1045   options.isolate = true;
1046   // Parse arguments that make us can't run in isolation mode.
1047   for (size_t i = 1; i < args.size(); ++i) {
1048     if (strcmp(args[i], "--no-isolate") == 0) {
1049       options.isolate = false;
1050     } else if (strcmp(args[i], "--gtest_list_tests") == 0) {
1051       options.isolate = false;
1052     }
1053   }
1054 
1055   // Stop parsing if we will not run in isolation mode.
1056   if (options.isolate == false) {
1057     return true;
1058   }
1059 
1060   // Init default isolation test options.
1061   options.job_count = GetDefaultJobCount();
1062   options.test_deadline_ms = DEFAULT_GLOBAL_TEST_RUN_DEADLINE_MS;
1063   options.test_slow_threshold_ms = DEFAULT_GLOBAL_TEST_RUN_SLOW_THRESHOLD_MS;
1064   options.gtest_color = testing::GTEST_FLAG(color);
1065   options.gtest_print_time = testing::GTEST_FLAG(print_time);
1066   options.gtest_repeat = testing::GTEST_FLAG(repeat);
1067   options.gtest_output = testing::GTEST_FLAG(output);
1068 
1069   // Parse arguments speficied for isolation mode.
1070   for (size_t i = 1; i < args.size(); ++i) {
1071     if (strncmp(args[i], "-j", strlen("-j")) == 0) {
1072       char* p = args[i] + strlen("-j");
1073       int count = 0;
1074       if (*p != '\0') {
1075         // Argument like -j5.
1076         count = atoi(p);
1077       } else if (args.size() > i + 1) {
1078         // Arguments like -j 5.
1079         count = atoi(args[i + 1]);
1080         ++i;
1081       }
1082       if (count <= 0) {
1083         fprintf(stderr, "invalid job count: %d\n", count);
1084         return false;
1085       }
1086       options.job_count = static_cast<size_t>(count);
1087     } else if (strncmp(args[i], "--deadline=", strlen("--deadline=")) == 0) {
1088       int time_ms = atoi(args[i] + strlen("--deadline="));
1089       if (time_ms <= 0) {
1090         fprintf(stderr, "invalid deadline: %d\n", time_ms);
1091         return false;
1092       }
1093       options.test_deadline_ms = time_ms;
1094     } else if (strncmp(args[i], "--slow-threshold=", strlen("--slow-threshold=")) == 0) {
1095       int time_ms = atoi(args[i] + strlen("--slow-threshold="));
1096       if (time_ms <= 0) {
1097         fprintf(stderr, "invalid slow test threshold: %d\n", time_ms);
1098         return false;
1099       }
1100       options.test_slow_threshold_ms = time_ms;
1101     } else if (strncmp(args[i], "--gtest_color=", strlen("--gtest_color=")) == 0) {
1102       options.gtest_color = args[i] + strlen("--gtest_color=");
1103     } else if (strcmp(args[i], "--gtest_print_time=0") == 0) {
1104       options.gtest_print_time = false;
1105     } else if (strncmp(args[i], "--gtest_repeat=", strlen("--gtest_repeat=")) == 0) {
1106       // If the value of gtest_repeat is < 0, then it indicates the tests
1107       // should be repeated forever.
1108       options.gtest_repeat = atoi(args[i] + strlen("--gtest_repeat="));
1109       // Remove --gtest_repeat=xx from arguments, so child process only run one iteration for a single test.
1110       args.erase(args.begin() + i);
1111       --i;
1112     } else if (strncmp(args[i], "--gtest_output=", strlen("--gtest_output=")) == 0) {
1113       std::string output = args[i] + strlen("--gtest_output=");
1114       // generate output xml file path according to the strategy in gtest.
1115       bool success = true;
1116       if (strncmp(output.c_str(), "xml:", strlen("xml:")) == 0) {
1117         output = output.substr(strlen("xml:"));
1118         if (output.size() == 0) {
1119           success = false;
1120         }
1121         // Make absolute path.
1122         if (success && output[0] != '/') {
1123           char* cwd = getcwd(NULL, 0);
1124           if (cwd != NULL) {
1125             output = std::string(cwd) + "/" + output;
1126             free(cwd);
1127           } else {
1128             success = false;
1129           }
1130         }
1131         // Add file name if output is a directory.
1132         if (success && output.back() == '/') {
1133           output += "test_details.xml";
1134         }
1135       }
1136       if (success) {
1137         options.gtest_output = output;
1138       } else {
1139         fprintf(stderr, "invalid gtest_output file: %s\n", args[i]);
1140         return false;
1141       }
1142 
1143       // Remove --gtest_output=xxx from arguments, so child process will not write xml file.
1144       args.erase(args.begin() + i);
1145       --i;
1146     }
1147   }
1148 
1149   // Add --no-isolate in args to prevent child process from running in isolation mode again.
1150   // As DeathTest will try to call execve(), this argument should always be added.
1151   args.insert(args.begin() + 1, strdup("--no-isolate"));
1152   return true;
1153 }
1154 
get_proc_self_exe()1155 static std::string get_proc_self_exe() {
1156   char path[PATH_MAX];
1157   ssize_t path_len = readlink("/proc/self/exe", path, sizeof(path));
1158   if (path_len <= 0 || path_len >= static_cast<ssize_t>(sizeof(path))) {
1159     perror("readlink");
1160     exit(1);
1161   }
1162 
1163   return std::string(path, path_len);
1164 }
1165 
main(int argc,char ** argv,char ** envp)1166 int main(int argc, char** argv, char** envp) {
1167   g_executable_path = get_proc_self_exe();
1168   g_argc = argc;
1169   g_argv = argv;
1170   g_envp = envp;
1171   std::vector<char*> arg_list;
1172   for (int i = 0; i < argc; ++i) {
1173     arg_list.push_back(argv[i]);
1174   }
1175 
1176   IsolationTestOptions options;
1177   if (PickOptions(arg_list, options) == false) {
1178     return 1;
1179   }
1180 
1181   if (options.isolate == true) {
1182     // Set global variables.
1183     global_test_run_deadline_ms = options.test_deadline_ms;
1184     global_test_run_slow_threshold_ms = options.test_slow_threshold_ms;
1185     testing::GTEST_FLAG(color) = options.gtest_color.c_str();
1186     testing::GTEST_FLAG(print_time) = options.gtest_print_time;
1187     std::vector<TestCase> testcase_list;
1188 
1189     argc = static_cast<int>(arg_list.size());
1190     arg_list.push_back(NULL);
1191     if (EnumerateTests(argc, arg_list.data(), testcase_list) == false) {
1192       return 1;
1193     }
1194     bool all_test_passed =  RunTestInSeparateProc(argc, arg_list.data(), testcase_list,
1195                               options.gtest_repeat, options.job_count, options.gtest_output);
1196     return all_test_passed ? 0 : 1;
1197   } else {
1198     argc = static_cast<int>(arg_list.size());
1199     arg_list.push_back(NULL);
1200     testing::InitGoogleTest(&argc, arg_list.data());
1201     return RUN_ALL_TESTS();
1202   }
1203 }
1204 
1205 //################################################################################
1206 // Bionic Gtest self test, run this by --bionic-selftest option.
1207 
TEST(bionic_selftest,test_success)1208 TEST(bionic_selftest, test_success) {
1209   ASSERT_EQ(1, 1);
1210 }
1211 
TEST(bionic_selftest,test_fail)1212 TEST(bionic_selftest, test_fail) {
1213   ASSERT_EQ(0, 1);
1214 }
1215 
TEST(bionic_selftest,test_time_warn)1216 TEST(bionic_selftest, test_time_warn) {
1217   sleep(4);
1218 }
1219 
TEST(bionic_selftest,test_timeout)1220 TEST(bionic_selftest, test_timeout) {
1221   while (1) {}
1222 }
1223 
TEST(bionic_selftest,test_signal_SEGV_terminated)1224 TEST(bionic_selftest, test_signal_SEGV_terminated) {
1225   char* p = reinterpret_cast<char*>(static_cast<intptr_t>(atoi("0")));
1226   *p = 3;
1227 }
1228 
1229 class bionic_selftest_DeathTest : public ::testing::Test {
1230  protected:
SetUp()1231   virtual void SetUp() {
1232     ::testing::FLAGS_gtest_death_test_style = "threadsafe";
1233   }
1234 };
1235 
deathtest_helper_success()1236 static void deathtest_helper_success() {
1237   ASSERT_EQ(1, 1);
1238   exit(0);
1239 }
1240 
TEST_F(bionic_selftest_DeathTest,success)1241 TEST_F(bionic_selftest_DeathTest, success) {
1242   ASSERT_EXIT(deathtest_helper_success(), ::testing::ExitedWithCode(0), "");
1243 }
1244 
deathtest_helper_fail()1245 static void deathtest_helper_fail() {
1246   ASSERT_EQ(1, 0);
1247 }
1248 
TEST_F(bionic_selftest_DeathTest,fail)1249 TEST_F(bionic_selftest_DeathTest, fail) {
1250   ASSERT_EXIT(deathtest_helper_fail(), ::testing::ExitedWithCode(0), "");
1251 }
1252