1 // Copyright 2013 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #include "base/test/launcher/unit_test_launcher.h"
6
7 #include <map>
8 #include <memory>
9 #include <utility>
10
11 #include "base/base_switches.h"
12 #include "base/bind.h"
13 #include "base/callback_helpers.h"
14 #include "base/command_line.h"
15 #include "base/compiler_specific.h"
16 #include "base/debug/debugger.h"
17 #include "base/files/file_util.h"
18 #include "base/files/scoped_temp_dir.h"
19 #include "base/format_macros.h"
20 #include "base/location.h"
21 #include "base/macros.h"
22 #include "base/message_loop/message_loop.h"
23 #include "base/sequence_checker.h"
24 #include "base/single_thread_task_runner.h"
25 #include "base/stl_util.h"
26 #include "base/strings/string_number_conversions.h"
27 #include "base/strings/string_util.h"
28 #include "base/sys_info.h"
29 #include "base/test/gtest_xml_util.h"
30 #include "base/test/launcher/test_launcher.h"
31 #include "base/test/test_switches.h"
32 #include "base/test/test_timeouts.h"
33 #include "base/third_party/dynamic_annotations/dynamic_annotations.h"
34 #include "base/threading/thread_checker.h"
35 #include "base/threading/thread_task_runner_handle.h"
36 #include "build/build_config.h"
37 #include "testing/gtest/include/gtest/gtest.h"
38
39 #if defined(OS_POSIX)
40 #include "base/files/file_descriptor_watcher_posix.h"
41 #endif
42
43 namespace base {
44
45 namespace {
46
47 // This constant controls how many tests are run in a single batch by default.
48 const size_t kDefaultTestBatchLimit = 10;
49
50 const char kHelpFlag[] = "help";
51
52 // Flag to run all tests in a single process.
53 const char kSingleProcessTestsFlag[] = "single-process-tests";
54
PrintUsage()55 void PrintUsage() {
56 fprintf(stdout,
57 "Runs tests using the gtest framework, each batch of tests being\n"
58 "run in their own process. Supported command-line flags:\n"
59 "\n"
60 " Common flags:\n"
61 " --gtest_filter=...\n"
62 " Runs a subset of tests (see --gtest_help for more info).\n"
63 "\n"
64 " --help\n"
65 " Shows this message.\n"
66 "\n"
67 " --gtest_help\n"
68 " Shows the gtest help message.\n"
69 "\n"
70 " --test-launcher-jobs=N\n"
71 " Sets the number of parallel test jobs to N.\n"
72 "\n"
73 " --single-process-tests\n"
74 " Runs the tests and the launcher in the same process. Useful\n"
75 " for debugging a specific test in a debugger.\n"
76 "\n"
77 " Other flags:\n"
78 " --test-launcher-filter-file=PATH\n"
79 " Like --gtest_filter, but read the test filter from PATH.\n"
80 " One pattern per line; lines starting with '-' are exclusions.\n"
81 " See also //testing/buildbot/filters/README.md file.\n"
82 "\n"
83 " --test-launcher-batch-limit=N\n"
84 " Sets the limit of test batch to run in a single process to N.\n"
85 "\n"
86 " --test-launcher-debug-launcher\n"
87 " Disables autodetection of debuggers and similar tools,\n"
88 " making it possible to use them to debug launcher itself.\n"
89 "\n"
90 " --test-launcher-retry-limit=N\n"
91 " Sets the limit of test retries on failures to N.\n"
92 "\n"
93 " --test-launcher-summary-output=PATH\n"
94 " Saves a JSON machine-readable summary of the run.\n"
95 "\n"
96 " --test-launcher-print-test-stdio=auto|always|never\n"
97 " Controls when full test output is printed.\n"
98 " auto means to print it when the test failed.\n"
99 "\n"
100 " --test-launcher-test-part-results-limit=N\n"
101 " Sets the limit of failed EXPECT/ASSERT entries in the xml and\n"
102 " JSON outputs per test to N (default N=10). Negative value \n"
103 " will disable this limit.\n"
104 "\n"
105 " --test-launcher-total-shards=N\n"
106 " Sets the total number of shards to N.\n"
107 "\n"
108 " --test-launcher-shard-index=N\n"
109 " Sets the shard index to run to N (from 0 to TOTAL - 1).\n");
110 fflush(stdout);
111 }
112
113 class DefaultUnitTestPlatformDelegate : public UnitTestPlatformDelegate {
114 public:
115 DefaultUnitTestPlatformDelegate() = default;
116
117 private:
118 // UnitTestPlatformDelegate:
GetTests(std::vector<TestIdentifier> * output)119 bool GetTests(std::vector<TestIdentifier>* output) override {
120 *output = GetCompiledInTests();
121 return true;
122 }
123
CreateResultsFile(base::FilePath * path)124 bool CreateResultsFile(base::FilePath* path) override {
125 if (!CreateNewTempDirectory(FilePath::StringType(), path))
126 return false;
127 *path = path->AppendASCII("test_results.xml");
128 return true;
129 }
130
CreateTemporaryFile(base::FilePath * path)131 bool CreateTemporaryFile(base::FilePath* path) override {
132 if (!temp_dir_.IsValid() && !temp_dir_.CreateUniqueTempDir())
133 return false;
134 return CreateTemporaryFileInDir(temp_dir_.GetPath(), path);
135 }
136
GetCommandLineForChildGTestProcess(const std::vector<std::string> & test_names,const base::FilePath & output_file,const base::FilePath & flag_file)137 CommandLine GetCommandLineForChildGTestProcess(
138 const std::vector<std::string>& test_names,
139 const base::FilePath& output_file,
140 const base::FilePath& flag_file) override {
141 CommandLine new_cmd_line(*CommandLine::ForCurrentProcess());
142
143 CHECK(base::PathExists(flag_file));
144
145 std::string long_flags(
146 std::string("--") + kGTestFilterFlag + "=" +
147 JoinString(test_names, ":"));
148 CHECK_EQ(static_cast<int>(long_flags.size()),
149 WriteFile(flag_file, long_flags.data(),
150 static_cast<int>(long_flags.size())));
151
152 new_cmd_line.AppendSwitchPath(switches::kTestLauncherOutput, output_file);
153 new_cmd_line.AppendSwitchPath(kGTestFlagfileFlag, flag_file);
154 new_cmd_line.AppendSwitch(kSingleProcessTestsFlag);
155
156 return new_cmd_line;
157 }
158
GetWrapperForChildGTestProcess()159 std::string GetWrapperForChildGTestProcess() override {
160 return std::string();
161 }
162
RelaunchTests(TestLauncher * test_launcher,const std::vector<std::string> & test_names,int launch_flags)163 void RelaunchTests(TestLauncher* test_launcher,
164 const std::vector<std::string>& test_names,
165 int launch_flags) override {
166 // Relaunch requested tests in parallel, but only use single
167 // test per batch for more precise results (crashes, etc).
168 for (const std::string& test_name : test_names) {
169 std::vector<std::string> batch;
170 batch.push_back(test_name);
171 RunUnitTestsBatch(test_launcher, this, batch, launch_flags);
172 }
173 }
174
175 ScopedTempDir temp_dir_;
176
177 DISALLOW_COPY_AND_ASSIGN(DefaultUnitTestPlatformDelegate);
178 };
179
GetSwitchValueAsInt(const std::string & switch_name,int * result)180 bool GetSwitchValueAsInt(const std::string& switch_name, int* result) {
181 if (!CommandLine::ForCurrentProcess()->HasSwitch(switch_name))
182 return true;
183
184 std::string switch_value =
185 CommandLine::ForCurrentProcess()->GetSwitchValueASCII(switch_name);
186 if (!StringToInt(switch_value, result) || *result < 0) {
187 LOG(ERROR) << "Invalid value for " << switch_name << ": " << switch_value;
188 return false;
189 }
190
191 return true;
192 }
193
LaunchUnitTestsInternal(RunTestSuiteCallback run_test_suite,size_t parallel_jobs,int default_batch_limit,bool use_job_objects,OnceClosure gtest_init)194 int LaunchUnitTestsInternal(RunTestSuiteCallback run_test_suite,
195 size_t parallel_jobs,
196 int default_batch_limit,
197 bool use_job_objects,
198 OnceClosure gtest_init) {
199 #if defined(OS_ANDROID)
200 // We can't easily fork on Android, just run the test suite directly.
201 return std::move(run_test_suite).Run();
202 #else
203 bool force_single_process = false;
204 if (CommandLine::ForCurrentProcess()->HasSwitch(
205 switches::kTestLauncherDebugLauncher)) {
206 fprintf(stdout, "Forcing test launcher debugging mode.\n");
207 fflush(stdout);
208 } else {
209 if (base::debug::BeingDebugged()) {
210 fprintf(stdout,
211 "Debugger detected, switching to single process mode.\n"
212 "Pass --test-launcher-debug-launcher to debug the launcher "
213 "itself.\n");
214 fflush(stdout);
215 force_single_process = true;
216 }
217 }
218
219 if (CommandLine::ForCurrentProcess()->HasSwitch(kGTestHelpFlag) ||
220 CommandLine::ForCurrentProcess()->HasSwitch(kGTestListTestsFlag) ||
221 CommandLine::ForCurrentProcess()->HasSwitch(kSingleProcessTestsFlag) ||
222 CommandLine::ForCurrentProcess()->HasSwitch(
223 switches::kTestChildProcess) ||
224 force_single_process) {
225 return std::move(run_test_suite).Run();
226 }
227 #endif
228
229 if (CommandLine::ForCurrentProcess()->HasSwitch(kHelpFlag)) {
230 PrintUsage();
231 return 0;
232 }
233
234 TimeTicks start_time(TimeTicks::Now());
235
236 std::move(gtest_init).Run();
237 TestTimeouts::Initialize();
238
239 int batch_limit = default_batch_limit;
240 if (!GetSwitchValueAsInt(switches::kTestLauncherBatchLimit, &batch_limit))
241 return 1;
242
243 fprintf(stdout,
244 "IMPORTANT DEBUGGING NOTE: batches of tests are run inside their\n"
245 "own process. For debugging a test inside a debugger, use the\n"
246 "--gtest_filter=<your_test_name> flag along with\n"
247 "--single-process-tests.\n");
248 fflush(stdout);
249
250 MessageLoopForIO message_loop;
251 #if defined(OS_POSIX)
252 FileDescriptorWatcher file_descriptor_watcher(&message_loop);
253 #endif
254
255 DefaultUnitTestPlatformDelegate platform_delegate;
256 UnitTestLauncherDelegate delegate(
257 &platform_delegate, batch_limit, use_job_objects);
258 TestLauncher launcher(&delegate, parallel_jobs);
259 bool success = launcher.Run();
260
261 fprintf(stdout, "Tests took %" PRId64 " seconds.\n",
262 (TimeTicks::Now() - start_time).InSeconds());
263 fflush(stdout);
264
265 return (success ? 0 : 1);
266 }
267
InitGoogleTestChar(int * argc,char ** argv)268 void InitGoogleTestChar(int* argc, char** argv) {
269 testing::InitGoogleTest(argc, argv);
270 }
271
272 #if defined(OS_WIN)
InitGoogleTestWChar(int * argc,wchar_t ** argv)273 void InitGoogleTestWChar(int* argc, wchar_t** argv) {
274 testing::InitGoogleTest(argc, argv);
275 }
276 #endif // defined(OS_WIN)
277
278 // Interprets test results and reports to the test launcher. Returns true
279 // on success.
ProcessTestResults(TestLauncher * test_launcher,const std::vector<std::string> & test_names,const base::FilePath & output_file,const std::string & output,int exit_code,bool was_timeout,std::vector<std::string> * tests_to_relaunch)280 bool ProcessTestResults(
281 TestLauncher* test_launcher,
282 const std::vector<std::string>& test_names,
283 const base::FilePath& output_file,
284 const std::string& output,
285 int exit_code,
286 bool was_timeout,
287 std::vector<std::string>* tests_to_relaunch) {
288 std::vector<TestResult> test_results;
289 bool crashed = false;
290 bool have_test_results =
291 ProcessGTestOutput(output_file, &test_results, &crashed);
292
293 bool called_any_callback = false;
294
295 if (have_test_results) {
296 // TODO(phajdan.jr): Check for duplicates and mismatches between
297 // the results we got from XML file and tests we intended to run.
298 std::map<std::string, TestResult> results_map;
299 for (size_t i = 0; i < test_results.size(); i++)
300 results_map[test_results[i].full_name] = test_results[i];
301
302 bool had_interrupted_test = false;
303
304 // Results to be reported back to the test launcher.
305 std::vector<TestResult> final_results;
306
307 for (size_t i = 0; i < test_names.size(); i++) {
308 if (ContainsKey(results_map, test_names[i])) {
309 TestResult test_result = results_map[test_names[i]];
310 if (test_result.status == TestResult::TEST_CRASH) {
311 had_interrupted_test = true;
312
313 if (was_timeout) {
314 // Fix up the test status: we forcibly kill the child process
315 // after the timeout, so from XML results it looks just like
316 // a crash.
317 test_result.status = TestResult::TEST_TIMEOUT;
318 }
319 } else if (test_result.status == TestResult::TEST_SUCCESS ||
320 test_result.status == TestResult::TEST_FAILURE) {
321 // We run multiple tests in a batch with a timeout applied
322 // to the entire batch. It is possible that with other tests
323 // running quickly some tests take longer than the per-test timeout.
324 // For consistent handling of tests independent of order and other
325 // factors, mark them as timing out.
326 if (test_result.elapsed_time >
327 TestTimeouts::test_launcher_timeout()) {
328 test_result.status = TestResult::TEST_TIMEOUT;
329 }
330 }
331 test_result.output_snippet = GetTestOutputSnippet(test_result, output);
332 final_results.push_back(test_result);
333 } else if (had_interrupted_test) {
334 tests_to_relaunch->push_back(test_names[i]);
335 } else {
336 // TODO(phajdan.jr): Explicitly pass the info that the test didn't
337 // run for a mysterious reason.
338 LOG(ERROR) << "no test result for " << test_names[i];
339 TestResult test_result;
340 test_result.full_name = test_names[i];
341 test_result.status = TestResult::TEST_UNKNOWN;
342 test_result.output_snippet = GetTestOutputSnippet(test_result, output);
343 final_results.push_back(test_result);
344 }
345 }
346
347 // TODO(phajdan.jr): Handle the case where processing XML output
348 // indicates a crash but none of the test results is marked as crashing.
349
350 if (final_results.empty())
351 return false;
352
353 bool has_non_success_test = false;
354 for (size_t i = 0; i < final_results.size(); i++) {
355 if (final_results[i].status != TestResult::TEST_SUCCESS) {
356 has_non_success_test = true;
357 break;
358 }
359 }
360
361 if (!has_non_success_test && exit_code != 0) {
362 // This is a bit surprising case: all tests are marked as successful,
363 // but the exit code was not zero. This can happen e.g. under memory
364 // tools that report leaks this way. Mark all tests as a failure on exit,
365 // and for more precise info they'd need to be retried serially.
366 for (size_t i = 0; i < final_results.size(); i++)
367 final_results[i].status = TestResult::TEST_FAILURE_ON_EXIT;
368 }
369
370 for (size_t i = 0; i < final_results.size(); i++) {
371 // Fix the output snippet after possible changes to the test result.
372 final_results[i].output_snippet =
373 GetTestOutputSnippet(final_results[i], output);
374 test_launcher->OnTestFinished(final_results[i]);
375 called_any_callback = true;
376 }
377 } else {
378 fprintf(stdout,
379 "Failed to get out-of-band test success data, "
380 "dumping full stdio below:\n%s\n",
381 output.c_str());
382 fflush(stdout);
383
384 // We do not have reliable details about test results (parsing test
385 // stdout is known to be unreliable).
386 if (test_names.size() == 1) {
387 // There is only one test. Try to determine status by exit code.
388 const std::string& test_name = test_names.front();
389 TestResult test_result;
390 test_result.full_name = test_name;
391
392 if (was_timeout) {
393 test_result.status = TestResult::TEST_TIMEOUT;
394 } else if (exit_code != 0) {
395 test_result.status = TestResult::TEST_FAILURE;
396 } else {
397 // It's strange case when test executed successfully,
398 // but we failed to read machine-readable report for it.
399 test_result.status = TestResult::TEST_UNKNOWN;
400 }
401
402 test_launcher->OnTestFinished(test_result);
403 called_any_callback = true;
404 } else {
405 // There is more than one test. Retry them individually.
406 for (const std::string& test_name : test_names)
407 tests_to_relaunch->push_back(test_name);
408 }
409 }
410
411 return called_any_callback;
412 }
413
414 class UnitTestProcessLifetimeObserver : public ProcessLifetimeObserver {
415 public:
~UnitTestProcessLifetimeObserver()416 ~UnitTestProcessLifetimeObserver() override {
417 DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
418 }
419
test_launcher()420 TestLauncher* test_launcher() { return test_launcher_; }
platform_delegate()421 UnitTestPlatformDelegate* platform_delegate() { return platform_delegate_; }
test_names()422 const std::vector<std::string>& test_names() { return test_names_; }
launch_flags()423 int launch_flags() { return launch_flags_; }
output_file()424 const FilePath& output_file() { return output_file_; }
flag_file()425 const FilePath& flag_file() { return flag_file_; }
426
427 protected:
UnitTestProcessLifetimeObserver(TestLauncher * test_launcher,UnitTestPlatformDelegate * platform_delegate,const std::vector<std::string> & test_names,int launch_flags,const FilePath & output_file,const FilePath & flag_file)428 UnitTestProcessLifetimeObserver(TestLauncher* test_launcher,
429 UnitTestPlatformDelegate* platform_delegate,
430 const std::vector<std::string>& test_names,
431 int launch_flags,
432 const FilePath& output_file,
433 const FilePath& flag_file)
434 : ProcessLifetimeObserver(),
435 test_launcher_(test_launcher),
436 platform_delegate_(platform_delegate),
437 test_names_(test_names),
438 launch_flags_(launch_flags),
439 output_file_(output_file),
440 flag_file_(flag_file) {}
441
442 SEQUENCE_CHECKER(sequence_checker_);
443
444 private:
445 TestLauncher* const test_launcher_;
446 UnitTestPlatformDelegate* const platform_delegate_;
447 const std::vector<std::string> test_names_;
448 const int launch_flags_;
449 const FilePath output_file_;
450 const FilePath flag_file_;
451
452 DISALLOW_COPY_AND_ASSIGN(UnitTestProcessLifetimeObserver);
453 };
454
455 class ParallelUnitTestProcessLifetimeObserver
456 : public UnitTestProcessLifetimeObserver {
457 public:
ParallelUnitTestProcessLifetimeObserver(TestLauncher * test_launcher,UnitTestPlatformDelegate * platform_delegate,const std::vector<std::string> & test_names,int launch_flags,const FilePath & output_file,const FilePath & flag_file)458 ParallelUnitTestProcessLifetimeObserver(
459 TestLauncher* test_launcher,
460 UnitTestPlatformDelegate* platform_delegate,
461 const std::vector<std::string>& test_names,
462 int launch_flags,
463 const FilePath& output_file,
464 const FilePath& flag_file)
465 : UnitTestProcessLifetimeObserver(test_launcher,
466 platform_delegate,
467 test_names,
468 launch_flags,
469 output_file,
470 flag_file) {}
471 ~ParallelUnitTestProcessLifetimeObserver() override = default;
472
473 private:
474 // ProcessLifetimeObserver:
475 void OnCompleted(int exit_code,
476 TimeDelta elapsed_time,
477 bool was_timeout,
478 const std::string& output) override;
479
480 DISALLOW_COPY_AND_ASSIGN(ParallelUnitTestProcessLifetimeObserver);
481 };
482
OnCompleted(int exit_code,TimeDelta elapsed_time,bool was_timeout,const std::string & output)483 void ParallelUnitTestProcessLifetimeObserver::OnCompleted(
484 int exit_code,
485 TimeDelta elapsed_time,
486 bool was_timeout,
487 const std::string& output) {
488 DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
489 std::vector<std::string> tests_to_relaunch;
490 ProcessTestResults(test_launcher(), test_names(), output_file(), output,
491 exit_code, was_timeout, &tests_to_relaunch);
492
493 if (!tests_to_relaunch.empty()) {
494 platform_delegate()->RelaunchTests(test_launcher(), tests_to_relaunch,
495 launch_flags());
496 }
497
498 // The temporary file's directory is also temporary.
499 DeleteFile(output_file().DirName(), true);
500 if (!flag_file().empty())
501 DeleteFile(flag_file(), false);
502 }
503
504 class SerialUnitTestProcessLifetimeObserver
505 : public UnitTestProcessLifetimeObserver {
506 public:
SerialUnitTestProcessLifetimeObserver(TestLauncher * test_launcher,UnitTestPlatformDelegate * platform_delegate,const std::vector<std::string> & test_names,int launch_flags,const FilePath & output_file,const FilePath & flag_file,std::vector<std::string> && next_test_names)507 SerialUnitTestProcessLifetimeObserver(
508 TestLauncher* test_launcher,
509 UnitTestPlatformDelegate* platform_delegate,
510 const std::vector<std::string>& test_names,
511 int launch_flags,
512 const FilePath& output_file,
513 const FilePath& flag_file,
514 std::vector<std::string>&& next_test_names)
515 : UnitTestProcessLifetimeObserver(test_launcher,
516 platform_delegate,
517 test_names,
518 launch_flags,
519 output_file,
520 flag_file),
521 next_test_names_(std::move(next_test_names)) {}
522 ~SerialUnitTestProcessLifetimeObserver() override = default;
523
524 private:
525 // ProcessLifetimeObserver:
526 void OnCompleted(int exit_code,
527 TimeDelta elapsed_time,
528 bool was_timeout,
529 const std::string& output) override;
530
531 std::vector<std::string> next_test_names_;
532
533 DISALLOW_COPY_AND_ASSIGN(SerialUnitTestProcessLifetimeObserver);
534 };
535
OnCompleted(int exit_code,TimeDelta elapsed_time,bool was_timeout,const std::string & output)536 void SerialUnitTestProcessLifetimeObserver::OnCompleted(
537 int exit_code,
538 TimeDelta elapsed_time,
539 bool was_timeout,
540 const std::string& output) {
541 DCHECK_CALLED_ON_VALID_SEQUENCE(sequence_checker_);
542 std::vector<std::string> tests_to_relaunch;
543 bool called_any_callbacks =
544 ProcessTestResults(test_launcher(), test_names(), output_file(), output,
545 exit_code, was_timeout, &tests_to_relaunch);
546
547 // There is only one test, there cannot be other tests to relaunch
548 // due to a crash.
549 DCHECK(tests_to_relaunch.empty());
550
551 // There is only one test, we should have called back with its result.
552 DCHECK(called_any_callbacks);
553
554 // The temporary file's directory is also temporary.
555 DeleteFile(output_file().DirName(), true);
556
557 if (!flag_file().empty())
558 DeleteFile(flag_file(), false);
559
560 ThreadTaskRunnerHandle::Get()->PostTask(
561 FROM_HERE,
562 BindOnce(&RunUnitTestsSerially, test_launcher(), platform_delegate(),
563 std::move(next_test_names_), launch_flags()));
564 }
565
566 } // namespace
567
LaunchUnitTests(int argc,char ** argv,RunTestSuiteCallback run_test_suite)568 int LaunchUnitTests(int argc,
569 char** argv,
570 RunTestSuiteCallback run_test_suite) {
571 CommandLine::Init(argc, argv);
572 size_t parallel_jobs = NumParallelJobs();
573 if (parallel_jobs == 0U) {
574 return 1;
575 }
576 return LaunchUnitTestsInternal(std::move(run_test_suite), parallel_jobs,
577 kDefaultTestBatchLimit, true,
578 BindOnce(&InitGoogleTestChar, &argc, argv));
579 }
580
LaunchUnitTestsSerially(int argc,char ** argv,RunTestSuiteCallback run_test_suite)581 int LaunchUnitTestsSerially(int argc,
582 char** argv,
583 RunTestSuiteCallback run_test_suite) {
584 CommandLine::Init(argc, argv);
585 return LaunchUnitTestsInternal(std::move(run_test_suite), 1U,
586 kDefaultTestBatchLimit, true,
587 BindOnce(&InitGoogleTestChar, &argc, argv));
588 }
589
LaunchUnitTestsWithOptions(int argc,char ** argv,size_t parallel_jobs,int default_batch_limit,bool use_job_objects,RunTestSuiteCallback run_test_suite)590 int LaunchUnitTestsWithOptions(int argc,
591 char** argv,
592 size_t parallel_jobs,
593 int default_batch_limit,
594 bool use_job_objects,
595 RunTestSuiteCallback run_test_suite) {
596 CommandLine::Init(argc, argv);
597 return LaunchUnitTestsInternal(std::move(run_test_suite), parallel_jobs,
598 default_batch_limit, use_job_objects,
599 BindOnce(&InitGoogleTestChar, &argc, argv));
600 }
601
602 #if defined(OS_WIN)
LaunchUnitTests(int argc,wchar_t ** argv,bool use_job_objects,RunTestSuiteCallback run_test_suite)603 int LaunchUnitTests(int argc,
604 wchar_t** argv,
605 bool use_job_objects,
606 RunTestSuiteCallback run_test_suite) {
607 // Windows CommandLine::Init ignores argv anyway.
608 CommandLine::Init(argc, NULL);
609 size_t parallel_jobs = NumParallelJobs();
610 if (parallel_jobs == 0U) {
611 return 1;
612 }
613 return LaunchUnitTestsInternal(std::move(run_test_suite), parallel_jobs,
614 kDefaultTestBatchLimit, use_job_objects,
615 BindOnce(&InitGoogleTestWChar, &argc, argv));
616 }
617 #endif // defined(OS_WIN)
618
RunUnitTestsSerially(TestLauncher * test_launcher,UnitTestPlatformDelegate * platform_delegate,const std::vector<std::string> & test_names,int launch_flags)619 void RunUnitTestsSerially(
620 TestLauncher* test_launcher,
621 UnitTestPlatformDelegate* platform_delegate,
622 const std::vector<std::string>& test_names,
623 int launch_flags) {
624 if (test_names.empty())
625 return;
626
627 // Create a dedicated temporary directory to store the xml result data
628 // per run to ensure clean state and make it possible to launch multiple
629 // processes in parallel.
630 FilePath output_file;
631 CHECK(platform_delegate->CreateResultsFile(&output_file));
632 FilePath flag_file;
633 platform_delegate->CreateTemporaryFile(&flag_file);
634
635 auto observer = std::make_unique<SerialUnitTestProcessLifetimeObserver>(
636 test_launcher, platform_delegate,
637 std::vector<std::string>(1, test_names.back()), launch_flags, output_file,
638 flag_file,
639 std::vector<std::string>(test_names.begin(), test_names.end() - 1));
640
641 CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
642 observer->test_names(), output_file, flag_file));
643
644 TestLauncher::LaunchOptions launch_options;
645 launch_options.flags = launch_flags;
646 test_launcher->LaunchChildGTestProcess(
647 cmd_line, platform_delegate->GetWrapperForChildGTestProcess(),
648 TestTimeouts::test_launcher_timeout(), launch_options,
649 std::move(observer));
650 }
651
RunUnitTestsBatch(TestLauncher * test_launcher,UnitTestPlatformDelegate * platform_delegate,const std::vector<std::string> & test_names,int launch_flags)652 void RunUnitTestsBatch(
653 TestLauncher* test_launcher,
654 UnitTestPlatformDelegate* platform_delegate,
655 const std::vector<std::string>& test_names,
656 int launch_flags) {
657 if (test_names.empty())
658 return;
659
660 // Create a dedicated temporary directory to store the xml result data
661 // per run to ensure clean state and make it possible to launch multiple
662 // processes in parallel.
663 FilePath output_file;
664 CHECK(platform_delegate->CreateResultsFile(&output_file));
665 FilePath flag_file;
666 platform_delegate->CreateTemporaryFile(&flag_file);
667
668 auto observer = std::make_unique<ParallelUnitTestProcessLifetimeObserver>(
669 test_launcher, platform_delegate, test_names, launch_flags, output_file,
670 flag_file);
671
672 CommandLine cmd_line(platform_delegate->GetCommandLineForChildGTestProcess(
673 test_names, output_file, flag_file));
674
675 // Adjust the timeout depending on how many tests we're running
676 // (note that e.g. the last batch of tests will be smaller).
677 // TODO(phajdan.jr): Consider an adaptive timeout, which can change
678 // depending on how many tests ran and how many remain.
679 // Note: do NOT parse child's stdout to do that, it's known to be
680 // unreliable (e.g. buffering issues can mix up the output).
681 TimeDelta timeout = test_names.size() * TestTimeouts::test_launcher_timeout();
682
683 TestLauncher::LaunchOptions options;
684 options.flags = launch_flags;
685 test_launcher->LaunchChildGTestProcess(
686 cmd_line, platform_delegate->GetWrapperForChildGTestProcess(), timeout,
687 options, std::move(observer));
688 }
689
UnitTestLauncherDelegate(UnitTestPlatformDelegate * platform_delegate,size_t batch_limit,bool use_job_objects)690 UnitTestLauncherDelegate::UnitTestLauncherDelegate(
691 UnitTestPlatformDelegate* platform_delegate,
692 size_t batch_limit,
693 bool use_job_objects)
694 : platform_delegate_(platform_delegate),
695 batch_limit_(batch_limit),
696 use_job_objects_(use_job_objects) {
697 }
698
~UnitTestLauncherDelegate()699 UnitTestLauncherDelegate::~UnitTestLauncherDelegate() {
700 DCHECK(thread_checker_.CalledOnValidThread());
701 }
702
GetTests(std::vector<TestIdentifier> * output)703 bool UnitTestLauncherDelegate::GetTests(std::vector<TestIdentifier>* output) {
704 DCHECK(thread_checker_.CalledOnValidThread());
705 return platform_delegate_->GetTests(output);
706 }
707
ShouldRunTest(const std::string & test_case_name,const std::string & test_name)708 bool UnitTestLauncherDelegate::ShouldRunTest(const std::string& test_case_name,
709 const std::string& test_name) {
710 DCHECK(thread_checker_.CalledOnValidThread());
711
712 // There is no additional logic to disable specific tests.
713 return true;
714 }
715
RunTests(TestLauncher * test_launcher,const std::vector<std::string> & test_names)716 size_t UnitTestLauncherDelegate::RunTests(
717 TestLauncher* test_launcher,
718 const std::vector<std::string>& test_names) {
719 DCHECK(thread_checker_.CalledOnValidThread());
720
721 int launch_flags = use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0;
722
723 std::vector<std::string> batch;
724 for (size_t i = 0; i < test_names.size(); i++) {
725 batch.push_back(test_names[i]);
726
727 // Use 0 to indicate unlimited batch size.
728 if (batch.size() >= batch_limit_ && batch_limit_ != 0) {
729 RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags);
730 batch.clear();
731 }
732 }
733
734 RunUnitTestsBatch(test_launcher, platform_delegate_, batch, launch_flags);
735
736 return test_names.size();
737 }
738
RetryTests(TestLauncher * test_launcher,const std::vector<std::string> & test_names)739 size_t UnitTestLauncherDelegate::RetryTests(
740 TestLauncher* test_launcher,
741 const std::vector<std::string>& test_names) {
742 ThreadTaskRunnerHandle::Get()->PostTask(
743 FROM_HERE,
744 BindOnce(&RunUnitTestsSerially, test_launcher, platform_delegate_,
745 test_names,
746 use_job_objects_ ? TestLauncher::USE_JOB_OBJECTS : 0));
747 return test_names.size();
748 }
749
750 } // namespace base
751