• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/test/launcher/test_launcher.h"
6 
7 #include <stddef.h>
8 
9 #include "base/base64.h"
10 #include "base/command_line.h"
11 #include "base/files/file_util.h"
12 #include "base/files/scoped_temp_dir.h"
13 #include "base/functional/bind.h"
14 #include "base/functional/callback_helpers.h"
15 #include "base/i18n/time_formatting.h"
16 #include "base/logging.h"
17 #include "base/no_destructor.h"
18 #include "base/process/launch.h"
19 #include "base/strings/strcat.h"
20 #include "base/strings/string_split.h"
21 #include "base/strings/string_util.h"
22 #include "base/test/gtest_xml_util.h"
23 #include "base/test/launcher/test_launcher_test_utils.h"
24 #include "base/test/launcher/unit_test_launcher.h"
25 #include "base/test/multiprocess_test.h"
26 #include "base/test/scoped_logging_settings.h"
27 #include "base/test/task_environment.h"
28 #include "base/test/test_timeouts.h"
29 #include "build/build_config.h"
30 #include "build/chromeos_buildflags.h"
31 #include "testing/gmock/include/gmock/gmock.h"
32 #include "testing/gtest/include/gtest/gtest.h"
33 #include "testing/multiprocess_func_list.h"
34 #include "third_party/abseil-cpp/absl/types/optional.h"
35 
36 namespace base {
37 namespace {
38 
39 using ::testing::_;
40 using ::testing::DoAll;
41 using ::testing::Invoke;
42 using ::testing::InvokeWithoutArgs;
43 using ::testing::Return;
44 using ::testing::ReturnPointee;
45 
GenerateTestResult(const std::string & test_name,TestResult::Status status,TimeDelta elapsed_td=Milliseconds (30),const std::string & output_snippet="output")46 TestResult GenerateTestResult(const std::string& test_name,
47                               TestResult::Status status,
48                               TimeDelta elapsed_td = Milliseconds(30),
49                               const std::string& output_snippet = "output") {
50   TestResult result;
51   result.full_name = test_name;
52   result.status = status;
53   result.elapsed_time = elapsed_td;
54   result.output_snippet = output_snippet;
55   return result;
56 }
57 
GenerateTestResultPart(TestResultPart::Type type,const std::string & file_name,int line_number,const std::string & summary,const std::string & message)58 TestResultPart GenerateTestResultPart(TestResultPart::Type type,
59                                       const std::string& file_name,
60                                       int line_number,
61                                       const std::string& summary,
62                                       const std::string& message) {
63   TestResultPart test_result_part;
64   test_result_part.type = type;
65   test_result_part.file_name = file_name;
66   test_result_part.line_number = line_number;
67   test_result_part.summary = summary;
68   test_result_part.message = message;
69   return test_result_part;
70 }
71 
72 // Mock TestLauncher to mock CreateAndStartThreadPool,
73 // unit test will provide a TaskEnvironment.
74 class MockTestLauncher : public TestLauncher {
75  public:
MockTestLauncher(TestLauncherDelegate * launcher_delegate,size_t parallel_jobs)76   MockTestLauncher(TestLauncherDelegate* launcher_delegate,
77                    size_t parallel_jobs)
78       : TestLauncher(launcher_delegate, parallel_jobs) {}
79 
CreateAndStartThreadPool(size_t parallel_jobs)80   void CreateAndStartThreadPool(size_t parallel_jobs) override {}
81 
82   MOCK_METHOD4(LaunchChildGTestProcess,
83                void(scoped_refptr<TaskRunner> task_runner,
84                     const std::vector<std::string>& test_names,
85                     const FilePath& task_temp_dir,
86                     const FilePath& child_temp_dir));
87 };
88 
89 // Simple TestLauncherDelegate mock to test TestLauncher flow.
90 class MockTestLauncherDelegate : public TestLauncherDelegate {
91  public:
92   MOCK_METHOD1(GetTests, bool(std::vector<TestIdentifier>* output));
93   MOCK_METHOD2(WillRunTest,
94                bool(const std::string& test_case_name,
95                     const std::string& test_name));
96   MOCK_METHOD2(ProcessTestResults,
97                void(std::vector<TestResult>& test_names,
98                     TimeDelta elapsed_time));
99   MOCK_METHOD3(GetCommandLine,
100                CommandLine(const std::vector<std::string>& test_names,
101                            const FilePath& temp_dir_,
102                            FilePath* output_file_));
103   MOCK_METHOD1(IsPreTask, bool(const std::vector<std::string>& test_names));
104   MOCK_METHOD0(GetWrapper, std::string());
105   MOCK_METHOD0(GetLaunchOptions, int());
106   MOCK_METHOD0(GetTimeout, TimeDelta());
107   MOCK_METHOD0(GetBatchSize, size_t());
108 };
109 
110 class MockResultWatcher : public ResultWatcher {
111  public:
MockResultWatcher(FilePath result_file,size_t num_tests)112   MockResultWatcher(FilePath result_file, size_t num_tests)
113       : ResultWatcher(result_file, num_tests) {}
114 
115   MOCK_METHOD(bool, WaitWithTimeout, (TimeDelta), (override));
116 };
117 
118 // Using MockTestLauncher to test TestLauncher.
119 // Test TestLauncher filters, and command line switches setup.
120 class TestLauncherTest : public testing::Test {
121  protected:
TestLauncherTest()122   TestLauncherTest()
123       : command_line(new CommandLine(CommandLine::NO_PROGRAM)),
124         test_launcher(&delegate, 10) {}
125 
126   // Adds tests to be returned by the delegate.
AddMockedTests(std::string test_case_name,const std::vector<std::string> & test_names)127   void AddMockedTests(std::string test_case_name,
128                       const std::vector<std::string>& test_names) {
129     for (const std::string& test_name : test_names) {
130       TestIdentifier test_data;
131       test_data.test_case_name = test_case_name;
132       test_data.test_name = test_name;
133       test_data.file = "File";
134       test_data.line = 100;
135       tests_.push_back(test_data);
136     }
137   }
138 
139   // Setup expected delegate calls, and which tests the delegate will return.
SetUpExpectCalls(size_t batch_size=10)140   void SetUpExpectCalls(size_t batch_size = 10) {
141     EXPECT_CALL(delegate, GetTests(_))
142         .WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_),
143                                    testing::Return(true)));
144     EXPECT_CALL(delegate, WillRunTest(_, _))
145         .WillRepeatedly(testing::Return(true));
146     EXPECT_CALL(delegate, ProcessTestResults(_, _)).Times(0);
147     EXPECT_CALL(delegate, GetCommandLine(_, _, _))
148         .WillRepeatedly(testing::Return(CommandLine(CommandLine::NO_PROGRAM)));
149     EXPECT_CALL(delegate, GetWrapper())
150         .WillRepeatedly(testing::Return(std::string()));
151     EXPECT_CALL(delegate, IsPreTask(_)).WillRepeatedly(testing::Return(true));
152     EXPECT_CALL(delegate, GetLaunchOptions())
153         .WillRepeatedly(testing::Return(true));
154     EXPECT_CALL(delegate, GetTimeout())
155         .WillRepeatedly(testing::Return(TimeDelta()));
156     EXPECT_CALL(delegate, GetBatchSize())
157         .WillRepeatedly(testing::Return(batch_size));
158   }
159 
160   std::unique_ptr<CommandLine> command_line;
161   MockTestLauncher test_launcher;
162   MockTestLauncherDelegate delegate;
163   base::test::TaskEnvironment task_environment{
164       base::test::TaskEnvironment::MainThreadType::IO};
165   ScopedTempDir dir;
166 
CreateFilterFile()167   FilePath CreateFilterFile() {
168     FilePath result_file = dir.GetPath().AppendASCII("test.filter");
169     WriteFile(result_file, "-Test.firstTest");
170     return result_file;
171   }
172 
173  private:
174   std::vector<TestIdentifier> tests_;
175 };
176 
177 class ResultWatcherTest : public testing::Test {
178  protected:
179   ResultWatcherTest() = default;
180 
CreateResultFile()181   FilePath CreateResultFile() {
182     FilePath result_file = dir.GetPath().AppendASCII("test_results.xml");
183     WriteFile(result_file,
184               "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
185               "<testsuites>\n"
186               "  <testsuite>\n");
187     return result_file;
188   }
189 
190   base::test::TaskEnvironment task_environment{
191       base::test::TaskEnvironment::TimeSource::MOCK_TIME};
192   ScopedTempDir dir;
193 };
194 
195 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P3(OnTestResult,launcher,full_name,status)196 ACTION_P3(OnTestResult, launcher, full_name, status) {
197   TestResult result = GenerateTestResult(full_name, status);
198   arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
199                                      Unretained(launcher), result));
200 }
201 
202 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P2(OnTestResult,launcher,result)203 ACTION_P2(OnTestResult, launcher, result) {
204   arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
205                                      Unretained(launcher), result));
206 }
207 
208 // A test and a disabled test cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTest)209 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTest) {
210   AddMockedTests("Test", {"firstTest", "DISABLED_firstTest"});
211   SetUpExpectCalls();
212   EXPECT_FALSE(test_launcher.Run(command_line.get()));
213 }
214 
215 // A test case and a disabled test case cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTestCase)216 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTestCase) {
217   AddMockedTests("DISABLED_Test", {"firstTest"});
218   AddMockedTests("Test", {"firstTest"});
219   SetUpExpectCalls();
220   EXPECT_FALSE(test_launcher.Run(command_line.get()));
221 }
222 
223 // Compiled tests should not contain an orphaned pre test.
TEST_F(TestLauncherTest,OrphanePreTest)224 TEST_F(TestLauncherTest, OrphanePreTest) {
225   AddMockedTests("Test", {"firstTest", "PRE_firstTestOrphane"});
226   SetUpExpectCalls();
227   EXPECT_FALSE(test_launcher.Run(command_line.get()));
228 }
229 
230 // When There are no tests, delegate should not be called.
TEST_F(TestLauncherTest,EmptyTestSetPasses)231 TEST_F(TestLauncherTest, EmptyTestSetPasses) {
232   SetUpExpectCalls();
233   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _)).Times(0);
234   EXPECT_TRUE(test_launcher.Run(command_line.get()));
235 }
236 
237 // Test TestLauncher filters DISABLED tests by default.
TEST_F(TestLauncherTest,FilterDisabledTestByDefault)238 TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
239   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
240   AddMockedTests("Test",
241                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
242   SetUpExpectCalls();
243   std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
244   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
245                                  _,
246                                  testing::ElementsAreArray(tests_names.cbegin(),
247                                                            tests_names.cend()),
248                                  _, _))
249       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
250                                               TestResult::TEST_SUCCESS),
251                                  OnTestResult(&test_launcher, "Test.secondTest",
252                                               TestResult::TEST_SUCCESS)));
253   EXPECT_TRUE(test_launcher.Run(command_line.get()));
254 }
255 
256 // Test TestLauncher should reorder PRE_ tests before delegate
TEST_F(TestLauncherTest,ReorderPreTests)257 TEST_F(TestLauncherTest, ReorderPreTests) {
258   AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
259   SetUpExpectCalls();
260   std::vector<std::string> tests_names = {
261       "Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
262   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
263                                  _,
264                                  testing::ElementsAreArray(tests_names.cbegin(),
265                                                            tests_names.cend()),
266                                  _, _))
267       .Times(1);
268   EXPECT_TRUE(test_launcher.Run(command_line.get()));
269 }
270 
271 // Test TestLauncher "gtest_filter" switch.
TEST_F(TestLauncherTest,UsingCommandLineFilter)272 TEST_F(TestLauncherTest, UsingCommandLineFilter) {
273   AddMockedTests("Test",
274                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
275   SetUpExpectCalls();
276   command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
277   std::vector<std::string> tests_names = {"Test.firstTest"};
278   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
279                                  _,
280                                  testing::ElementsAreArray(tests_names.cbegin(),
281                                                            tests_names.cend()),
282                                  _, _))
283       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
284                              TestResult::TEST_SUCCESS));
285   EXPECT_TRUE(test_launcher.Run(command_line.get()));
286 }
287 
288 // Test TestLauncher gtest filter will include pre tests
TEST_F(TestLauncherTest,FilterIncludePreTest)289 TEST_F(TestLauncherTest, FilterIncludePreTest) {
290   AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
291   SetUpExpectCalls();
292   command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
293   std::vector<std::string> tests_names = {"Test.PRE_firstTest",
294                                           "Test.firstTest"};
295   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
296                                  _,
297                                  testing::ElementsAreArray(tests_names.cbegin(),
298                                                            tests_names.cend()),
299                                  _, _))
300       .Times(1);
301   EXPECT_TRUE(test_launcher.Run(command_line.get()));
302 }
303 
304 // Test TestLauncher gtest filter works when both include and exclude filter
305 // are defined.
TEST_F(TestLauncherTest,FilterIncludeExclude)306 TEST_F(TestLauncherTest, FilterIncludeExclude) {
307   AddMockedTests("Test", {"firstTest", "PRE_firstTest", "secondTest",
308                           "PRE_secondTest", "thirdTest", "DISABLED_Disable1"});
309   SetUpExpectCalls();
310   command_line->AppendSwitchASCII("gtest_filter",
311                                   "Test.*Test:-Test.secondTest");
312   std::vector<std::string> tests_names = {
313       "Test.PRE_firstTest",
314       "Test.firstTest",
315       "Test.thirdTest",
316   };
317   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
318                                  _,
319                                  testing::ElementsAreArray(tests_names.cbegin(),
320                                                            tests_names.cend()),
321                                  _, _))
322       .Times(1);
323   EXPECT_TRUE(test_launcher.Run(command_line.get()));
324 }
325 
326 // Test TestLauncher "gtest_repeat" switch.
TEST_F(TestLauncherTest,RepeatTest)327 TEST_F(TestLauncherTest, RepeatTest) {
328   AddMockedTests("Test", {"firstTest"});
329   SetUpExpectCalls();
330   // Unless --gtest-break-on-failure is specified,
331   command_line->AppendSwitchASCII("gtest_repeat", "2");
332   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
333       .Times(2)
334       .WillRepeatedly(::testing::DoAll(OnTestResult(
335           &test_launcher, "Test.firstTest", TestResult::TEST_SUCCESS)));
336   EXPECT_TRUE(test_launcher.Run(command_line.get()));
337 }
338 
339 // Test TestLauncher --gtest_repeat and --gtest_break_on_failure.
TEST_F(TestLauncherTest,RunningMultipleIterationsUntilFailure)340 TEST_F(TestLauncherTest, RunningMultipleIterationsUntilFailure) {
341   AddMockedTests("Test", {"firstTest"});
342   SetUpExpectCalls();
343   // Unless --gtest-break-on-failure is specified,
344   command_line->AppendSwitchASCII("gtest_repeat", "4");
345   command_line->AppendSwitch("gtest_break_on_failure");
346   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
347       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
348                                               TestResult::TEST_SUCCESS)))
349       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
350                                               TestResult::TEST_SUCCESS)))
351       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
352                                               TestResult::TEST_FAILURE)));
353   EXPECT_FALSE(test_launcher.Run(command_line.get()));
354 }
355 
356 // Test TestLauncher will retry failed test, and stop on success.
TEST_F(TestLauncherTest,SuccessOnRetryTests)357 TEST_F(TestLauncherTest, SuccessOnRetryTests) {
358   AddMockedTests("Test", {"firstTest"});
359   SetUpExpectCalls();
360   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
361   std::vector<std::string> tests_names = {"Test.firstTest"};
362   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
363                                  _,
364                                  testing::ElementsAreArray(tests_names.cbegin(),
365                                                            tests_names.cend()),
366                                  _, _))
367       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
368                              TestResult::TEST_FAILURE))
369       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
370                              TestResult::TEST_SUCCESS));
371   EXPECT_TRUE(test_launcher.Run(command_line.get()));
372 }
373 
374 // Test TestLauncher will retry continuing failing test up to retry limit,
375 // before eventually failing and returning false.
TEST_F(TestLauncherTest,FailOnRetryTests)376 TEST_F(TestLauncherTest, FailOnRetryTests) {
377   AddMockedTests("Test", {"firstTest"});
378   SetUpExpectCalls();
379   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
380   std::vector<std::string> tests_names = {"Test.firstTest"};
381   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
382                                  _,
383                                  testing::ElementsAreArray(tests_names.cbegin(),
384                                                            tests_names.cend()),
385                                  _, _))
386       .Times(3)
387       .WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
388                                    TestResult::TEST_FAILURE));
389   EXPECT_FALSE(test_launcher.Run(command_line.get()));
390 }
391 
392 // Test TestLauncher should retry all PRE_ chained tests
TEST_F(TestLauncherTest,RetryPreTests)393 TEST_F(TestLauncherTest, RetryPreTests) {
394   AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
395   SetUpExpectCalls();
396   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
397   std::vector<TestResult> results = {
398       GenerateTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
399       GenerateTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
400       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS)};
401   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
402       .WillOnce(::testing::DoAll(
403           OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
404                        TestResult::TEST_SUCCESS),
405           OnTestResult(&test_launcher, "Test.PRE_firstTest",
406                        TestResult::TEST_FAILURE),
407           OnTestResult(&test_launcher, "Test.firstTest",
408                        TestResult::TEST_SUCCESS)));
409   std::vector<std::string> tests_names = {"Test.PRE_PRE_firstTest"};
410   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
411                                  _,
412                                  testing::ElementsAreArray(tests_names.cbegin(),
413                                                            tests_names.cend()),
414                                  _, _))
415       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
416                              TestResult::TEST_SUCCESS));
417   tests_names = {"Test.PRE_firstTest"};
418   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
419                                  _,
420                                  testing::ElementsAreArray(tests_names.cbegin(),
421                                                            tests_names.cend()),
422                                  _, _))
423       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_firstTest",
424                              TestResult::TEST_SUCCESS));
425   tests_names = {"Test.firstTest"};
426   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
427                                  _,
428                                  testing::ElementsAreArray(tests_names.cbegin(),
429                                                            tests_names.cend()),
430                                  _, _))
431       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
432                              TestResult::TEST_SUCCESS));
433   EXPECT_TRUE(test_launcher.Run(command_line.get()));
434 }
435 
436 // Test TestLauncher should fail if a PRE test fails but its non-PRE test passes
TEST_F(TestLauncherTest,PreTestFailure)437 TEST_F(TestLauncherTest, PreTestFailure) {
438   AddMockedTests("Test", {"FirstTest", "PRE_FirstTest"});
439   SetUpExpectCalls();
440   std::vector<TestResult> results = {
441       GenerateTestResult("Test.PRE_FirstTest", TestResult::TEST_FAILURE),
442       GenerateTestResult("Test.FirstTest", TestResult::TEST_SUCCESS)};
443   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
444       .WillOnce(
445           ::testing::DoAll(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
446                                         TestResult::TEST_FAILURE),
447                            OnTestResult(&test_launcher, "Test.FirstTest",
448                                         TestResult::TEST_SUCCESS)));
449   EXPECT_CALL(test_launcher,
450               LaunchChildGTestProcess(
451                   _, testing::ElementsAre("Test.PRE_FirstTest"), _, _))
452       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
453                              TestResult::TEST_FAILURE));
454   EXPECT_CALL(
455       test_launcher,
456       LaunchChildGTestProcess(_, testing::ElementsAre("Test.FirstTest"), _, _))
457       .WillOnce(OnTestResult(&test_launcher, "Test.FirstTest",
458                              TestResult::TEST_SUCCESS));
459   EXPECT_FALSE(test_launcher.Run(command_line.get()));
460 }
461 
462 // Test TestLauncher run disabled unit tests switch.
TEST_F(TestLauncherTest,RunDisabledTests)463 TEST_F(TestLauncherTest, RunDisabledTests) {
464   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
465   AddMockedTests("Test",
466                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
467   SetUpExpectCalls();
468   command_line->AppendSwitch("gtest_also_run_disabled_tests");
469   command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
470   std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
471                                           "Test.firstTest",
472                                           "Test.DISABLED_firstTestDisabled"};
473   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
474                                  _,
475                                  testing::ElementsAreArray(tests_names.cbegin(),
476                                                            tests_names.cend()),
477                                  _, _))
478       .WillOnce(::testing::DoAll(
479           OnTestResult(&test_launcher, "Test.firstTest",
480                        TestResult::TEST_SUCCESS),
481           OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
482                        TestResult::TEST_SUCCESS),
483           OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
484                        TestResult::TEST_SUCCESS)));
485   EXPECT_TRUE(test_launcher.Run(command_line.get()));
486 }
487 
488 // Test TestLauncher does not run negative tests filtered under
489 // testing/buildbot/filters.
TEST_F(TestLauncherTest,DoesRunFilteredTests)490 TEST_F(TestLauncherTest, DoesRunFilteredTests) {
491   AddMockedTests("Test", {"firstTest", "secondTest"});
492   SetUpExpectCalls();
493   ASSERT_TRUE(dir.CreateUniqueTempDir());
494   // filter file content is "-Test.firstTest"
495   FilePath path = CreateFilterFile();
496   command_line->AppendSwitchPath("test-launcher-filter-file", path);
497   std::vector<std::string> tests_names = {"Test.secondTest"};
498   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
499                                  _,
500                                  testing::ElementsAreArray(tests_names.cbegin(),
501                                                            tests_names.cend()),
502                                  _, _))
503       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.secondTest",
504                                               TestResult::TEST_SUCCESS)));
505   EXPECT_TRUE(test_launcher.Run(command_line.get()));
506 }
507 
508 // Test TestLauncher run disabled tests and negative tests filtered under
509 // testing/buildbot/filters, when gtest_also_run_disabled_tests is set.
TEST_F(TestLauncherTest,RunDisabledTestsWithFilteredTests)510 TEST_F(TestLauncherTest, RunDisabledTestsWithFilteredTests) {
511   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
512   AddMockedTests("Test", {"firstTest", "DISABLED_firstTestDisabled"});
513   SetUpExpectCalls();
514   ASSERT_TRUE(dir.CreateUniqueTempDir());
515   // filter file content is "-Test.firstTest", but Test.firstTest will still
516   // run due to gtest_also_run_disabled_tests is set.
517   FilePath path = CreateFilterFile();
518   command_line->AppendSwitchPath("test-launcher-filter-file", path);
519   command_line->AppendSwitch("gtest_also_run_disabled_tests");
520   std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
521                                           "Test.firstTest",
522                                           "Test.DISABLED_firstTestDisabled"};
523   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
524                                  _,
525                                  testing::ElementsAreArray(tests_names.cbegin(),
526                                                            tests_names.cend()),
527                                  _, _))
528       .WillOnce(::testing::DoAll(
529           OnTestResult(&test_launcher, "Test.firstTest",
530                        TestResult::TEST_SUCCESS),
531           OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
532                        TestResult::TEST_SUCCESS),
533           OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
534                        TestResult::TEST_SUCCESS)));
535   EXPECT_TRUE(test_launcher.Run(command_line.get()));
536 }
537 
538 // Disabled test should disable all pre tests
TEST_F(TestLauncherTest,DisablePreTests)539 TEST_F(TestLauncherTest, DisablePreTests) {
540   AddMockedTests("Test", {"DISABLED_firstTest", "PRE_PRE_firstTest",
541                           "PRE_firstTest", "secondTest"});
542   SetUpExpectCalls();
543   std::vector<std::string> tests_names = {"Test.secondTest"};
544   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
545                                  _,
546                                  testing::ElementsAreArray(tests_names.cbegin(),
547                                                            tests_names.cend()),
548                                  _, _))
549       .Times(1);
550   EXPECT_TRUE(test_launcher.Run(command_line.get()));
551 }
552 
553 // Test TestLauncher enforce to run tests in the exact positive filter.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFilter)554 TEST_F(TestLauncherTest, EnforceRunTestsInExactPositiveFilter) {
555   AddMockedTests("Test", {"firstTest", "secondTest", "thirdTest"});
556   SetUpExpectCalls();
557 
558   ASSERT_TRUE(dir.CreateUniqueTempDir());
559   FilePath path = dir.GetPath().AppendASCII("test.filter");
560   WriteFile(path, "Test.firstTest\nTest.thirdTest");
561   command_line->AppendSwitchPath("test-launcher-filter-file", path);
562   command_line->AppendSwitch("enforce-exact-positive-filter");
563   command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
564   command_line->AppendSwitchASCII("test-launcher-shard-index", "0");
565 
566   // Test.firstTest is in the exact positive filter, so expected to run.
567   // Test.thirdTest is launched in another shard.
568   std::vector<std::string> tests_names = {"Test.firstTest"};
569   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
570                                  _,
571                                  testing::ElementsAreArray(tests_names.cbegin(),
572                                                            tests_names.cend()),
573                                  _, _))
574       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
575                                               TestResult::TEST_SUCCESS)));
576   EXPECT_TRUE(test_launcher.Run(command_line.get()));
577 }
578 
579 // Test TestLauncher should fail if enforce-exact-positive-filter and
580 // gtest_filter both presented.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithGtestFilterFlag)581 TEST_F(TestLauncherTest,
582        EnforceRunTestsInExactPositiveFailWithGtestFilterFlag) {
583   command_line->AppendSwitch("enforce-exact-positive-filter");
584   command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest;-Test.*");
585   EXPECT_FALSE(test_launcher.Run(command_line.get()));
586 }
587 
588 // Test TestLauncher should fail if enforce-exact-positive-filter is set
589 // with negative test filters.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithNegativeFilter)590 TEST_F(TestLauncherTest, EnforceRunTestsInExactPositiveFailWithNegativeFilter) {
591   command_line->AppendSwitch("enforce-exact-positive-filter");
592   ASSERT_TRUE(dir.CreateUniqueTempDir());
593   FilePath path = CreateFilterFile();
594   command_line->AppendSwitchPath("test-launcher-filter-file", path);
595   EXPECT_FALSE(test_launcher.Run(command_line.get()));
596 }
597 
598 // Test TestLauncher should fail if enforce-exact-positive-filter is set
599 // with wildcard positive filters.
TEST_F(TestLauncherTest,EnforceRunTestsInExactPositiveFailWithWildcardPositiveFilter)600 TEST_F(TestLauncherTest,
601        EnforceRunTestsInExactPositiveFailWithWildcardPositiveFilter) {
602   command_line->AppendSwitch("enforce-exact-positive-filter");
603   ASSERT_TRUE(dir.CreateUniqueTempDir());
604   FilePath path = dir.GetPath().AppendASCII("test.filter");
605   WriteFile(path, "Test.*");
606   command_line->AppendSwitchPath("test-launcher-filter-file", path);
607   EXPECT_FALSE(test_launcher.Run(command_line.get()));
608 }
609 
610 // Tests fail if they produce too much output.
TEST_F(TestLauncherTest,ExcessiveOutput)611 TEST_F(TestLauncherTest, ExcessiveOutput) {
612   AddMockedTests("Test", {"firstTest"});
613   SetUpExpectCalls();
614   command_line->AppendSwitchASCII("test-launcher-retry-limit", "0");
615   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
616   TestResult test_result =
617       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
618                          Milliseconds(30), std::string(500000, 'a'));
619   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
620       .WillOnce(OnTestResult(&test_launcher, test_result));
621   EXPECT_FALSE(test_launcher.Run(command_line.get()));
622 }
623 
624 // Use command-line switch to allow more output.
TEST_F(TestLauncherTest,OutputLimitSwitch)625 TEST_F(TestLauncherTest, OutputLimitSwitch) {
626   AddMockedTests("Test", {"firstTest"});
627   SetUpExpectCalls();
628   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
629   command_line->AppendSwitchASCII("test-launcher-output-bytes-limit", "800000");
630   TestResult test_result =
631       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
632                          Milliseconds(30), std::string(500000, 'a'));
633   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
634       .WillOnce(OnTestResult(&test_launcher, test_result));
635   EXPECT_TRUE(test_launcher.Run(command_line.get()));
636 }
637 
638 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,FaultyShardSetup)639 TEST_F(TestLauncherTest, FaultyShardSetup) {
640   command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
641   command_line->AppendSwitchASCII("test-launcher-shard-index", "2");
642   EXPECT_FALSE(test_launcher.Run(command_line.get()));
643 }
644 
645 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,RedirectStdio)646 TEST_F(TestLauncherTest, RedirectStdio) {
647   AddMockedTests("Test", {"firstTest"});
648   SetUpExpectCalls();
649   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always");
650   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
651       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
652                              TestResult::TEST_SUCCESS));
653   EXPECT_TRUE(test_launcher.Run(command_line.get()));
654 }
655 
656 // Sharding should be stable and always selecting the same tests.
TEST_F(TestLauncherTest,StableSharding)657 TEST_F(TestLauncherTest, StableSharding) {
658   AddMockedTests("Test", {"firstTest", "secondTest", "thirdTest"});
659   SetUpExpectCalls();
660   command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
661   command_line->AppendSwitchASCII("test-launcher-shard-index", "0");
662   command_line->AppendSwitch("test-launcher-stable-sharding");
663   std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
664   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
665                                  _,
666                                  testing::ElementsAreArray(tests_names.cbegin(),
667                                                            tests_names.cend()),
668                                  _, _))
669       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
670                                               TestResult::TEST_SUCCESS),
671                                  OnTestResult(&test_launcher, "Test.secondTest",
672                                               TestResult::TEST_SUCCESS)));
673   EXPECT_TRUE(test_launcher.Run(command_line.get()));
674 }
675 
676 // Validate |iteration_data| contains one test result matching |result|.
ValidateTestResultObject(const Value::Dict & iteration_data,TestResult & test_result)677 bool ValidateTestResultObject(const Value::Dict& iteration_data,
678                               TestResult& test_result) {
679   const Value::List* results = iteration_data.FindList(test_result.full_name);
680   if (!results) {
681     ADD_FAILURE() << "Results not found";
682     return false;
683   }
684   if (1u != results->size()) {
685     ADD_FAILURE() << "Expected one result, actual: " << results->size();
686     return false;
687   }
688   const Value::Dict* dict = (*results)[0].GetIfDict();
689   if (!dict) {
690     ADD_FAILURE() << "Unexpected type";
691     return false;
692   }
693 
694   using test_launcher_utils::ValidateKeyValue;
695   bool result = ValidateKeyValue(*dict, "elapsed_time_ms",
696                                  test_result.elapsed_time.InMilliseconds());
697 
698   if (!dict->FindBool("losless_snippet").value_or(false)) {
699     ADD_FAILURE() << "losless_snippet expected to be true";
700     result = false;
701   }
702 
703   result &=
704       ValidateKeyValue(*dict, "output_snippet", test_result.output_snippet);
705 
706   std::string base64_output_snippet;
707   Base64Encode(test_result.output_snippet, &base64_output_snippet);
708   result &=
709       ValidateKeyValue(*dict, "output_snippet_base64", base64_output_snippet);
710 
711   result &= ValidateKeyValue(*dict, "status", test_result.StatusAsString());
712 
713   const Value::List* list = dict->FindList("result_parts");
714   if (test_result.test_result_parts.size() != list->size()) {
715     ADD_FAILURE() << "test_result_parts count is not valid";
716     return false;
717   }
718 
719   for (unsigned i = 0; i < test_result.test_result_parts.size(); i++) {
720     TestResultPart result_part = test_result.test_result_parts.at(i);
721     const Value::Dict& part_dict = (*list)[i].GetDict();
722 
723     result &= ValidateKeyValue(part_dict, "type", result_part.TypeAsString());
724     result &= ValidateKeyValue(part_dict, "file", result_part.file_name);
725     result &= ValidateKeyValue(part_dict, "line", result_part.line_number);
726     result &= ValidateKeyValue(part_dict, "summary", result_part.summary);
727     result &= ValidateKeyValue(part_dict, "message", result_part.message);
728   }
729   return result;
730 }
731 
732 // Validate |root| dictionary value contains a list with |values|
733 // at |key| value.
ValidateStringList(const absl::optional<Value::Dict> & root,const std::string & key,std::vector<const char * > values)734 bool ValidateStringList(const absl::optional<Value::Dict>& root,
735                         const std::string& key,
736                         std::vector<const char*> values) {
737   const Value::List* list = root->FindList(key);
738   if (!list) {
739     ADD_FAILURE() << "|root| has no list_value in key: " << key;
740     return false;
741   }
742 
743   if (values.size() != list->size()) {
744     ADD_FAILURE() << "expected size: " << values.size()
745                   << ", actual size:" << list->size();
746     return false;
747   }
748 
749   for (unsigned i = 0; i < values.size(); i++) {
750     if (!(*list)[i].is_string() &&
751         (*list)[i].GetString().compare(values.at(i))) {
752       ADD_FAILURE() << "Expected list values do not match actual list";
753       return false;
754     }
755   }
756   return true;
757 }
758 
759 // Unit tests to validate TestLauncher outputs the correct JSON file.
TEST_F(TestLauncherTest,JsonSummary)760 TEST_F(TestLauncherTest, JsonSummary) {
761   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
762   AddMockedTests("Test",
763                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
764   SetUpExpectCalls();
765 
766   ASSERT_TRUE(dir.CreateUniqueTempDir());
767   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
768   command_line->AppendSwitchPath("test-launcher-summary-output", path);
769   command_line->AppendSwitchASCII("gtest_repeat", "2");
770   // Force the repeats to run sequentially.
771   command_line->AppendSwitch("gtest_break_on_failure");
772 
773   // Setup results to be returned by the test launcher delegate.
774   TestResult first_result =
775       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
776                          Milliseconds(30), "output_first");
777   first_result.test_result_parts.push_back(GenerateTestResultPart(
778       TestResultPart::kSuccess, "TestFile", 110, "summary", "message"));
779   TestResult second_result =
780       GenerateTestResult("Test.secondTest", TestResult::TEST_SUCCESS,
781                          Milliseconds(50), "output_second");
782 
783   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
784       .Times(2)
785       .WillRepeatedly(
786           ::testing::DoAll(OnTestResult(&test_launcher, first_result),
787                            OnTestResult(&test_launcher, second_result)));
788   EXPECT_TRUE(test_launcher.Run(command_line.get()));
789 
790   // Validate the resulting JSON file is the expected output.
791   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
792   ASSERT_TRUE(root);
793   EXPECT_TRUE(
794       ValidateStringList(root, "all_tests",
795                          {"Test.firstTest", "Test.firstTestDisabled",
796                           "Test.secondTest", "TestDisabled.firstTest"}));
797   EXPECT_TRUE(
798       ValidateStringList(root, "disabled_tests",
799                          {"Test.firstTestDisabled", "TestDisabled.firstTest"}));
800 
801   const Value::Dict* dict = root->FindDict("test_locations");
802   ASSERT_TRUE(dict);
803   EXPECT_EQ(2u, dict->size());
804   ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(*dict, "Test.firstTest",
805                                                         "File", 100));
806   ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(
807       *dict, "Test.secondTest", "File", 100));
808 
809   const Value::List* list = root->FindList("per_iteration_data");
810   ASSERT_TRUE(list);
811   ASSERT_EQ(2u, list->size());
812   for (const auto& iteration_val : *list) {
813     ASSERT_TRUE(iteration_val.is_dict());
814     const base::Value::Dict& iteration_dict = iteration_val.GetDict();
815     EXPECT_EQ(2u, iteration_dict.size());
816     EXPECT_TRUE(ValidateTestResultObject(iteration_dict, first_result));
817     EXPECT_TRUE(ValidateTestResultObject(iteration_dict, second_result));
818   }
819 }
820 
821 // Validate TestLauncher outputs the correct JSON file
822 // when running disabled tests.
TEST_F(TestLauncherTest,JsonSummaryWithDisabledTests)823 TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) {
824   AddMockedTests("Test", {"DISABLED_Test"});
825   SetUpExpectCalls();
826 
827   ASSERT_TRUE(dir.CreateUniqueTempDir());
828   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
829   command_line->AppendSwitchPath("test-launcher-summary-output", path);
830   command_line->AppendSwitch("gtest_also_run_disabled_tests");
831 
832   // Setup results to be returned by the test launcher delegate.
833   TestResult test_result =
834       GenerateTestResult("Test.DISABLED_Test", TestResult::TEST_SUCCESS,
835                          Milliseconds(50), "output_second");
836 
837   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
838       .WillOnce(OnTestResult(&test_launcher, test_result));
839   EXPECT_TRUE(test_launcher.Run(command_line.get()));
840 
841   // Validate the resulting JSON file is the expected output.
842   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
843   ASSERT_TRUE(root);
844   Value::Dict* dict = root->FindDict("test_locations");
845   ASSERT_TRUE(dict);
846   EXPECT_EQ(1u, dict->size());
847   EXPECT_TRUE(test_launcher_utils::ValidateTestLocation(
848       *dict, "Test.DISABLED_Test", "File", 100));
849 
850   Value::List* list = root->FindList("per_iteration_data");
851   ASSERT_TRUE(list);
852   ASSERT_EQ(1u, list->size());
853 
854   Value::Dict* iteration_dict = (*list)[0].GetIfDict();
855   ASSERT_TRUE(iteration_dict);
856   EXPECT_EQ(1u, iteration_dict->size());
857   // We expect the result to be stripped of disabled prefix.
858   test_result.full_name = "Test.Test";
859   EXPECT_TRUE(ValidateTestResultObject(*iteration_dict, test_result));
860 }
861 
862 // Matches a std::tuple<const FilePath&, const FilePath&> where the first
863 // item is a parent of the second.
864 MATCHER(DirectoryIsParentOf, "") {
865   return std::get<0>(arg).IsParent(std::get<1>(arg));
866 }
867 
868 // Test that the launcher creates a dedicated temp dir for a child proc and
869 // cleans it up.
TEST_F(TestLauncherTest,TestChildTempDir)870 TEST_F(TestLauncherTest, TestChildTempDir) {
871   AddMockedTests("Test", {"firstTest"});
872   SetUpExpectCalls();
873   ON_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
874       .WillByDefault(OnTestResult(&test_launcher, "Test.firstTest",
875                                   TestResult::TEST_SUCCESS));
876 
877   FilePath task_temp;
878   if (TestLauncher::SupportsPerChildTempDirs()) {
879     // Platforms that support child proc temp dirs must get a |child_temp_dir|
880     // arg that exists and is within |task_temp_dir|.
881     EXPECT_CALL(
882         test_launcher,
883         LaunchChildGTestProcess(
884             _, _, _, ::testing::ResultOf(DirectoryExists, ::testing::IsTrue())))
885         .With(::testing::Args<2, 3>(DirectoryIsParentOf()))
886         .WillOnce(::testing::SaveArg<2>(&task_temp));
887   } else {
888     // Platforms that don't support child proc temp dirs must get an empty
889     // |child_temp_dir| arg.
890     EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, FilePath()))
891         .WillOnce(::testing::SaveArg<2>(&task_temp));
892   }
893 
894   EXPECT_TRUE(test_launcher.Run(command_line.get()));
895 
896   // The task's temporary directory should have been deleted.
897   EXPECT_FALSE(DirectoryExists(task_temp));
898 }
899 
900 #if BUILDFLAG(IS_FUCHSIA)
901 // Verifies that test processes have /data, /cache and /tmp available.
TEST_F(TestLauncherTest,ProvidesDataCacheAndTmpDirs)902 TEST_F(TestLauncherTest, ProvidesDataCacheAndTmpDirs) {
903   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/data")));
904   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/cache")));
905   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/tmp")));
906 }
907 #endif  // BUILDFLAG(IS_FUCHSIA)
908 
909 // Unit tests to validate UnitTestLauncherDelegate implementation.
910 class UnitTestLauncherDelegateTester : public testing::Test {
911  protected:
912   DefaultUnitTestPlatformDelegate defaultPlatform;
913   ScopedTempDir dir;
914 
915  private:
916   base::test::TaskEnvironment task_environment;
917 };
918 
919 // Validate delegate produces correct command line.
TEST_F(UnitTestLauncherDelegateTester,GetCommandLine)920 TEST_F(UnitTestLauncherDelegateTester, GetCommandLine) {
921   UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 10u, true,
922                                              DoNothing());
923   TestLauncherDelegate* delegate_ptr = &launcher_delegate;
924 
925   std::vector<std::string> test_names(5, "Tests");
926   base::FilePath temp_dir;
927   base::FilePath result_file;
928   CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
929 
930   CommandLine cmd_line =
931       delegate_ptr->GetCommandLine(test_names, temp_dir, &result_file);
932   EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
933   EXPECT_EQ(cmd_line.GetSwitchValuePath("test-launcher-output"), result_file);
934 
935   const int size = 2048;
936   std::string content;
937   ASSERT_TRUE(ReadFileToStringWithMaxSize(
938       cmd_line.GetSwitchValuePath("gtest_flagfile"), &content, size));
939   EXPECT_EQ(content.find("--gtest_filter="), 0u);
940   base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
941   std::vector<std::string> gtest_filter_tests =
942       SplitString(content, ":", TRIM_WHITESPACE, SPLIT_WANT_ALL);
943   ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
944   for (unsigned i = 0; i < test_names.size(); i++) {
945     EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
946   }
947 }
948 
949 // Verify that a result watcher can stop polling early when all tests complete.
TEST_F(ResultWatcherTest,PollCompletesQuickly)950 TEST_F(ResultWatcherTest, PollCompletesQuickly) {
951   ASSERT_TRUE(dir.CreateUniqueTempDir());
952   FilePath result_file = CreateResultFile();
953   ASSERT_TRUE(AppendToFile(
954       result_file,
955       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
956               TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
957               "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
958               "classname=\"A\" timestamp=\"",
959               TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
960               "    </testcase>\n",
961               "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
962               TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
963               "\" />\n",
964               "    <testcase name=\"C\" status=\"run\" time=\"0.500\" "
965               "classname=\"A\" timestamp=\"",
966               TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
967               "\">\n", "    </testcase>\n", "  </testsuite>\n",
968               "</testsuites>\n"})));
969 
970   MockResultWatcher result_watcher(result_file, 2);
971   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
972       .WillOnce(DoAll(InvokeWithoutArgs([&]() {
973                         task_environment.AdvanceClock(Milliseconds(1500));
974                       }),
975                       Return(true)));
976 
977   Time start = Time::Now();
978   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
979   ASSERT_EQ(Time::Now() - start, Milliseconds(1500));
980 }
981 
982 // Verify that a result watcher repeatedly checks the file for a batch of slow
983 // tests. Each test completes in 40s, which is just under the timeout of 45s.
TEST_F(ResultWatcherTest,PollCompletesSlowly)984 TEST_F(ResultWatcherTest, PollCompletesSlowly) {
985   SCOPED_TRACE(::testing::Message() << "Start ticks: " << TimeTicks::Now());
986 
987   ASSERT_TRUE(dir.CreateUniqueTempDir());
988   FilePath result_file = CreateResultFile();
989   const Time start = Time::Now();
990   ASSERT_TRUE(AppendToFile(
991       result_file,
992       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
993               TimeFormatAsIso8601(start).c_str(), "\" />\n"})));
994 
995   MockResultWatcher result_watcher(result_file, 10);
996   size_t checks = 0;
997   bool done = false;
998   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
999       .Times(10)
1000       .WillRepeatedly(
1001           DoAll(Invoke([&](TimeDelta timeout) {
1002                   task_environment.AdvanceClock(timeout);
1003                   // Append a result with "time" (duration) as 40.000s and
1004                   // "timestamp" (test start) as `Now()` - 45s.
1005                   AppendToFile(
1006                       result_file,
1007                       StrCat({"    <testcase name=\"B\" status=\"run\" "
1008                               "time=\"40.000\" classname=\"A\" timestamp=\"",
1009                               TimeFormatAsIso8601(Time::Now() - Seconds(45))
1010                                   .c_str(),
1011                               "\">\n", "    </testcase>\n"}));
1012                   checks++;
1013                   if (checks == 10) {
1014                     AppendToFile(result_file,
1015                                  "  </testsuite>\n"
1016                                  "</testsuites>\n");
1017                     done = true;
1018                   } else {
1019                     // Append a preliminary result for the next test that
1020                     // started when the last test completed (i.e., `Now()` - 45s
1021                     // + 40s).
1022                     AppendToFile(
1023                         result_file,
1024                         StrCat({"    <x-teststart name=\"B\" classname=\"A\" "
1025                                 "timestamp=\"",
1026                                 TimeFormatAsIso8601(Time::Now() - Seconds(5))
1027                                     .c_str(),
1028                                 "\" />\n"}));
1029                   }
1030                 }),
1031                 ReturnPointee(&done)));
1032 
1033   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1034   // The first check occurs 45s after the batch starts, so the sequence of
1035   // events looks like:
1036   //   00:00 - Test 1 starts
1037   //   00:40 - Test 1 completes, test 2 starts
1038   //   00:45 - Check 1 occurs
1039   //   01:20 - Test 2 completes, test 3 starts
1040   //   01:25 - Check 2 occurs
1041   //   02:00 - Test 3 completes, test 4 starts
1042   //   02:05 - Check 3 occurs
1043   //   ...
1044   ASSERT_EQ(Time::Now() - start, Seconds(45 + 40 * 9));
1045 }
1046 
1047 // Verify that the result watcher identifies when a test times out.
TEST_F(ResultWatcherTest,PollTimeout)1048 TEST_F(ResultWatcherTest, PollTimeout) {
1049   ASSERT_TRUE(dir.CreateUniqueTempDir());
1050   FilePath result_file = CreateResultFile();
1051   ASSERT_TRUE(AppendToFile(
1052       result_file,
1053       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1054               TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n"})));
1055 
1056   MockResultWatcher result_watcher(result_file, 10);
1057   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1058       .Times(2)
1059       .WillRepeatedly(
1060           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1061                 Return(false)));
1062 
1063   Time start = Time::Now();
1064   ASSERT_FALSE(result_watcher.PollUntilDone(Seconds(45)));
1065   // Include a small grace period.
1066   ASSERT_EQ(Time::Now() - start, Seconds(45) + TestTimeouts::tiny_timeout());
1067 }
1068 
1069 // Verify that the result watcher retries incomplete reads.
TEST_F(ResultWatcherTest,RetryIncompleteResultRead)1070 TEST_F(ResultWatcherTest, RetryIncompleteResultRead) {
1071   ASSERT_TRUE(dir.CreateUniqueTempDir());
1072   FilePath result_file = CreateResultFile();
1073   // Opening "<summary>" tag is not closed.
1074   ASSERT_TRUE(AppendToFile(
1075       result_file,
1076       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1077               TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
1078               "    <testcase name=\"B\" status=\"run\" time=\"40.000\" "
1079               "classname=\"A\" timestamp=\"",
1080               TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
1081               "      <summary>"})));
1082 
1083   MockResultWatcher result_watcher(result_file, 2);
1084   size_t attempts = 0;
1085   bool done = false;
1086   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1087       .Times(5)
1088       .WillRepeatedly(DoAll(Invoke([&](TimeDelta timeout) {
1089                               task_environment.AdvanceClock(timeout);
1090                               // Don't bother writing the rest of the file when
1091                               // this test completes.
1092                               done = ++attempts >= 5;
1093                             }),
1094                             ReturnPointee(&done)));
1095 
1096   Time start = Time::Now();
1097   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1098   ASSERT_EQ(Time::Now() - start,
1099             Seconds(45) + 4 * TestTimeouts::tiny_timeout());
1100 }
1101 
1102 // Verify that the result watcher continues polling with the base timeout when
1103 // the clock jumps backward.
TEST_F(ResultWatcherTest,PollWithClockJumpBackward)1104 TEST_F(ResultWatcherTest, PollWithClockJumpBackward) {
1105   ASSERT_TRUE(dir.CreateUniqueTempDir());
1106   FilePath result_file = CreateResultFile();
1107   // Cannot move the mock time source backward, so write future timestamps into
1108   // the result file instead.
1109   Time time_before_change = Time::Now() + Hours(1);
1110   ASSERT_TRUE(AppendToFile(
1111       result_file,
1112       StrCat(
1113           {"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1114            TimeFormatAsIso8601(time_before_change).c_str(), "\" />\n",
1115            "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1116            "classname=\"A\" timestamp=\"",
1117            TimeFormatAsIso8601(time_before_change).c_str(), "\">\n",
1118            "    </testcase>\n",
1119            "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1120            TimeFormatAsIso8601(time_before_change + Milliseconds(500)).c_str(),
1121            "\" />\n"})));
1122 
1123   MockResultWatcher result_watcher(result_file, 2);
1124   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1125       .WillOnce(
1126           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1127                 Return(false)))
1128       .WillOnce(
1129           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1130                 Return(true)));
1131 
1132   Time start = Time::Now();
1133   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1134   ASSERT_EQ(Time::Now() - start, Seconds(90));
1135 }
1136 
1137 // Verify that the result watcher continues polling with the base timeout when
1138 // the clock jumps forward.
TEST_F(ResultWatcherTest,PollWithClockJumpForward)1139 TEST_F(ResultWatcherTest, PollWithClockJumpForward) {
1140   ASSERT_TRUE(dir.CreateUniqueTempDir());
1141   FilePath result_file = CreateResultFile();
1142   ASSERT_TRUE(AppendToFile(
1143       result_file,
1144       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1145               TimeFormatAsIso8601(Time::Now()).c_str(), "\" />\n",
1146               "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1147               "classname=\"A\" timestamp=\"",
1148               TimeFormatAsIso8601(Time::Now()).c_str(), "\">\n",
1149               "    </testcase>\n",
1150               "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1151               TimeFormatAsIso8601(Time::Now() + Milliseconds(500)).c_str(),
1152               "\" />\n"})));
1153   task_environment.AdvanceClock(Hours(1));
1154 
1155   MockResultWatcher result_watcher(result_file, 2);
1156   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1157       .WillOnce(
1158           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1159                 Return(false)))
1160       .WillOnce(
1161           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1162                 Return(true)));
1163 
1164   Time start = Time::Now();
1165   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1166   ASSERT_EQ(Time::Now() - start, Seconds(90));
1167 }
1168 
1169 // Validate delegate sets batch size correctly.
TEST_F(UnitTestLauncherDelegateTester,BatchSize)1170 TEST_F(UnitTestLauncherDelegateTester, BatchSize) {
1171   UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 15u, true,
1172                                              DoNothing());
1173   TestLauncherDelegate* delegate_ptr = &launcher_delegate;
1174   EXPECT_EQ(delegate_ptr->GetBatchSize(), 15u);
1175 }
1176 
1177 // The following 4 tests are disabled as they are meant to only run from
1178 // |RunMockTests| to validate tests launcher output for known results. The tests
1179 // are expected to run in order within a same batch.
1180 
1181 // Basic test to pass
TEST(MockUnitTests,DISABLED_PassTest)1182 TEST(MockUnitTests, DISABLED_PassTest) {
1183   ASSERT_TRUE(true);
1184 }
1185 // Basic test to fail
TEST(MockUnitTests,DISABLED_FailTest)1186 TEST(MockUnitTests, DISABLED_FailTest) {
1187   ASSERT_TRUE(false);
1188 }
1189 // Basic test to crash
TEST(MockUnitTests,DISABLED_CrashTest)1190 TEST(MockUnitTests, DISABLED_CrashTest) {
1191   ImmediateCrash();
1192 }
1193 // Basic test will not be reached, due to the preceding crash in the same batch.
TEST(MockUnitTests,DISABLED_NoRunTest)1194 TEST(MockUnitTests, DISABLED_NoRunTest) {
1195   ASSERT_TRUE(true);
1196 }
1197 
1198 // Using TestLauncher to launch 3 basic unitests
1199 // and validate the resulting json file.
TEST_F(UnitTestLauncherDelegateTester,RunMockTests)1200 TEST_F(UnitTestLauncherDelegateTester, RunMockTests) {
1201   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1202   command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1203 
1204   ASSERT_TRUE(dir.CreateUniqueTempDir());
1205   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1206   command_line.AppendSwitchPath("test-launcher-summary-output", path);
1207   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1208   command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1209 
1210   std::string output;
1211   GetAppOutputAndError(command_line, &output);
1212 
1213   // Validate the resulting JSON file is the expected output.
1214   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1215   ASSERT_TRUE(root);
1216 
1217   const Value::Dict* dict = root->FindDict("test_locations");
1218   ASSERT_TRUE(dict);
1219   EXPECT_EQ(4u, dict->size());
1220 
1221   EXPECT_TRUE(
1222       test_launcher_utils::ValidateTestLocations(*dict, "MockUnitTests"));
1223 
1224   const Value::List* list = root->FindList("per_iteration_data");
1225   ASSERT_TRUE(list);
1226   ASSERT_EQ(1u, list->size());
1227 
1228   const Value::Dict* iteration_dict = (*list)[0].GetIfDict();
1229   ASSERT_TRUE(iteration_dict);
1230   EXPECT_EQ(4u, iteration_dict->size());
1231   // We expect the result to be stripped of disabled prefix.
1232   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1233       *iteration_dict, "MockUnitTests.PassTest", "SUCCESS", 0u));
1234   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1235       *iteration_dict, "MockUnitTests.FailTest", "FAILURE", 1u));
1236   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1237       *iteration_dict, "MockUnitTests.CrashTest", "CRASH", 0u));
1238   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1239       *iteration_dict, "MockUnitTests.NoRunTest", "NOTRUN", 0u,
1240       /*have_running_info=*/false));
1241 }
1242 
TEST(ProcessGTestOutputTest,RunMockTests)1243 TEST(ProcessGTestOutputTest, RunMockTests) {
1244   ScopedTempDir dir;
1245   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1246   command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1247 
1248   ASSERT_TRUE(dir.CreateUniqueTempDir());
1249   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.xml");
1250   command_line.AppendSwitchPath("test-launcher-output", path);
1251   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1252   command_line.AppendSwitch("single-process-tests");
1253 
1254   std::string output;
1255   GetAppOutputAndError(command_line, &output);
1256 
1257   std::vector<TestResult> test_results;
1258   bool crashed = false;
1259   bool have_test_results = ProcessGTestOutput(path, &test_results, &crashed);
1260 
1261   EXPECT_TRUE(have_test_results);
1262   EXPECT_TRUE(crashed);
1263   ASSERT_EQ(test_results.size(), 3u);
1264 
1265   EXPECT_EQ(test_results[0].full_name, "MockUnitTests.DISABLED_PassTest");
1266   EXPECT_EQ(test_results[0].status, TestResult::TEST_SUCCESS);
1267   EXPECT_EQ(test_results[0].test_result_parts.size(), 0u);
1268   ASSERT_TRUE(test_results[0].timestamp.has_value());
1269   EXPECT_GT(*test_results[0].timestamp, Time());
1270   EXPECT_FALSE(test_results[0].thread_id);
1271   EXPECT_FALSE(test_results[0].process_num);
1272 
1273   EXPECT_EQ(test_results[1].full_name, "MockUnitTests.DISABLED_FailTest");
1274   EXPECT_EQ(test_results[1].status, TestResult::TEST_FAILURE);
1275   EXPECT_EQ(test_results[1].test_result_parts.size(), 1u);
1276   ASSERT_TRUE(test_results[1].timestamp.has_value());
1277   EXPECT_GT(*test_results[1].timestamp, Time());
1278 
1279   EXPECT_EQ(test_results[2].full_name, "MockUnitTests.DISABLED_CrashTest");
1280   EXPECT_EQ(test_results[2].status, TestResult::TEST_CRASH);
1281   EXPECT_EQ(test_results[2].test_result_parts.size(), 0u);
1282   ASSERT_TRUE(test_results[2].timestamp.has_value());
1283   EXPECT_GT(*test_results[2].timestamp, Time());
1284 }
1285 
1286 // TODO(crbug.com/1498237): Enable the test once GetAppOutputAndError
1287 // can collect stdout and stderr on Fuchsia.
1288 #if !BUILDFLAG(IS_FUCHSIA)
TEST(ProcessGTestOutputTest,FoundTestCaseNotEnforced)1289 TEST(ProcessGTestOutputTest, FoundTestCaseNotEnforced) {
1290   ScopedTempDir dir;
1291   ASSERT_TRUE(dir.CreateUniqueTempDir());
1292   FilePath path = dir.GetPath().AppendASCII("test.filter");
1293   WriteFile(path, "Test.firstTest\nTest.secondTest");
1294   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1295   command_line.AppendSwitchPath("test-launcher-filter-file", path);
1296   command_line.AppendSwitch("enforce-exact-positive-filter");
1297   std::string output;
1298   // Test cases in the filter do not exist, hence test launcher should
1299   // fail and print their names.
1300   EXPECT_FALSE(GetAppOutputAndError(command_line, &output));
1301   // Banner should appear in the output.
1302   const char kBanner[] = "Found exact positive filter not enforced:";
1303   EXPECT_TRUE(Contains(output, kBanner));
1304   std::vector<std::string> lines = base::SplitString(
1305       output, "\n", base::KEEP_WHITESPACE, base::SPLIT_WANT_ALL);
1306   std::unordered_set<std::string> tests_not_enforced;
1307   bool banner_has_printed = false;
1308   for (size_t i = 0; i < lines.size(); i++) {
1309     if (Contains(lines[i], kBanner)) {
1310       // The following two lines should have the test cases not enforced
1311       // and the third line for the check failure message.
1312       EXPECT_LT(i + 3, lines.size());
1313       // Banner should only appear once.
1314       EXPECT_FALSE(banner_has_printed);
1315       banner_has_printed = true;
1316       continue;
1317     }
1318     if (banner_has_printed && tests_not_enforced.size() < 2) {
1319       // Note, gtest prints the error with datetime and file line info
1320       // ahead to the test names, e.g. below:
1321       // [1030/220237.425678:ERROR:test_launcher.cc(2123)] Test.secondTest
1322       // [1030/220237.425682:ERROR:test_launcher.cc(2123)] Test.firstTest
1323       std::vector<std::string> line_vec = base::SplitString(
1324           lines[i], "]", base::TRIM_WHITESPACE, base::SPLIT_WANT_ALL);
1325       ASSERT_EQ(line_vec.size(), 2u);
1326       tests_not_enforced.insert(line_vec[1]);
1327       continue;
1328     }
1329     if (banner_has_printed && tests_not_enforced.size() == 2) {
1330 // For official builds, they discard logs from CHECK failures, hence
1331 // the test case cannot catch the "Check failed" line.
1332 #if !defined(OFFICIAL_BUILD) || DCHECK_IS_ON()
1333       EXPECT_TRUE(Contains(lines[i],
1334                            "Check failed: "
1335                            "!found_exact_positive_filter_not_enforced."));
1336 #endif  // !defined(OFFICIAL_BUILD) || DCHECK_IS_ON()
1337       break;
1338     }
1339   }
1340   // The test case printed is not ordered, hence need UnorderedElementsAre
1341   // to compare.
1342   EXPECT_THAT(tests_not_enforced, testing::UnorderedElementsAre(
1343                                       "Test.firstTest", "Test.secondTest"));
1344 }
1345 #endif  // !BUILDFLAG(IS_FUCHSIA)
1346 
1347 // TODO(crbug.com/1094369): Enable leaked-child checks on other platforms.
1348 #if BUILDFLAG(IS_FUCHSIA)
1349 
1350 // Test that leaves a child process running. The test is DISABLED_, so it can
1351 // be launched explicitly by RunMockLeakProcessTest
1352 
MULTIPROCESS_TEST_MAIN(LeakChildProcess)1353 MULTIPROCESS_TEST_MAIN(LeakChildProcess) {
1354   while (true)
1355     PlatformThread::Sleep(base::Seconds(1));
1356 }
1357 
TEST(LeakedChildProcessTest,DISABLED_LeakChildProcess)1358 TEST(LeakedChildProcessTest, DISABLED_LeakChildProcess) {
1359   Process child_process = SpawnMultiProcessTestChild(
1360       "LeakChildProcess", GetMultiProcessTestChildBaseCommandLine(),
1361       LaunchOptions());
1362   ASSERT_TRUE(child_process.IsValid());
1363   // Don't wait for the child process to exit.
1364 }
1365 
1366 // Validate that a test that leaks a process causes the batch to have an
1367 // error exit_code.
TEST_F(UnitTestLauncherDelegateTester,LeakedChildProcess)1368 TEST_F(UnitTestLauncherDelegateTester, LeakedChildProcess) {
1369   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1370   command_line.AppendSwitchASCII(
1371       "gtest_filter", "LeakedChildProcessTest.DISABLED_LeakChildProcess");
1372 
1373   ASSERT_TRUE(dir.CreateUniqueTempDir());
1374   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1375   command_line.AppendSwitchPath("test-launcher-summary-output", path);
1376   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1377   command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1378 
1379   std::string output;
1380   int exit_code = 0;
1381   GetAppOutputWithExitCode(command_line, &output, &exit_code);
1382 
1383   // Validate that we actually ran a test.
1384   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1385   ASSERT_TRUE(root);
1386 
1387   Value::Dict* dict = root->FindDict("test_locations");
1388   ASSERT_TRUE(dict);
1389   EXPECT_EQ(1u, dict->size());
1390 
1391   EXPECT_TRUE(test_launcher_utils::ValidateTestLocations(
1392       *dict, "LeakedChildProcessTest"));
1393 
1394   // Validate that the leaked child caused the batch to error-out.
1395   EXPECT_EQ(exit_code, 1);
1396 }
1397 #endif  // BUILDFLAG(IS_FUCHSIA)
1398 
1399 // Validate GetTestOutputSnippetTest assigns correct output snippet.
TEST(TestLauncherTools,GetTestOutputSnippetTest)1400 TEST(TestLauncherTools, GetTestOutputSnippetTest) {
1401   const std::string output =
1402       "[ RUN      ] TestCase.FirstTest\n"
1403       "[       OK ] TestCase.FirstTest (0 ms)\n"
1404       "Post first test output\n"
1405       "[ RUN      ] TestCase.SecondTest\n"
1406       "[  FAILED  ] TestCase.SecondTest (0 ms)\n"
1407       "[ RUN      ] TestCase.ThirdTest\n"
1408       "[  SKIPPED ] TestCase.ThirdTest (0 ms)\n"
1409       "Post second test output";
1410   TestResult result;
1411 
1412   // test snippet of a successful test
1413   result.full_name = "TestCase.FirstTest";
1414   result.status = TestResult::TEST_SUCCESS;
1415   EXPECT_EQ(GetTestOutputSnippet(result, output),
1416             "[ RUN      ] TestCase.FirstTest\n"
1417             "[       OK ] TestCase.FirstTest (0 ms)\n");
1418 
1419   // test snippet of a failure on exit tests should include output
1420   // after test concluded, but not subsequent tests output.
1421   result.status = TestResult::TEST_FAILURE_ON_EXIT;
1422   EXPECT_EQ(GetTestOutputSnippet(result, output),
1423             "[ RUN      ] TestCase.FirstTest\n"
1424             "[       OK ] TestCase.FirstTest (0 ms)\n"
1425             "Post first test output\n");
1426 
1427   // test snippet of a failed test
1428   result.full_name = "TestCase.SecondTest";
1429   result.status = TestResult::TEST_FAILURE;
1430   EXPECT_EQ(GetTestOutputSnippet(result, output),
1431             "[ RUN      ] TestCase.SecondTest\n"
1432             "[  FAILED  ] TestCase.SecondTest (0 ms)\n");
1433 
1434   // test snippet of a skipped test. Note that the status is SUCCESS because
1435   // the gtest XML format doesn't make a difference between SUCCESS and SKIPPED
1436   result.full_name = "TestCase.ThirdTest";
1437   result.status = TestResult::TEST_SUCCESS;
1438   EXPECT_EQ(GetTestOutputSnippet(result, output),
1439             "[ RUN      ] TestCase.ThirdTest\n"
1440             "[  SKIPPED ] TestCase.ThirdTest (0 ms)\n");
1441 }
1442 
1443 MATCHER(CheckTruncationPreservesMessage, "") {
1444   // Ensure the inserted message matches the expected pattern.
1445   constexpr char kExpected[] = R"(FATAL.*message\n)";
1446   EXPECT_THAT(arg, ::testing::ContainsRegex(kExpected));
1447 
1448   const std::string snippet =
1449       base::StrCat({"[ RUN      ] SampleTestSuite.SampleTestName\n"
1450                     "Padding log message added for testing purposes\n"
1451                     "Padding log message added for testing purposes\n"
1452                     "Padding log message added for testing purposes\n"
1453                     "Padding log message added for testing purposes\n"
1454                     "Padding log message added for testing purposes\n"
1455                     "Padding log message added for testing purposes\n",
1456                     arg,
1457                     "Padding log message added for testing purposes\n"
1458                     "Padding log message added for testing purposes\n"
1459                     "Padding log message added for testing purposes\n"
1460                     "Padding log message added for testing purposes\n"
1461                     "Padding log message added for testing purposes\n"
1462                     "Padding log message added for testing purposes\n"});
1463 
1464   // Strip the stack trace off the end of message.
1465   size_t line_end_pos = arg.find("\n");
1466   std::string first_line = arg.substr(0, line_end_pos + 1);
1467 
1468   const std::string result = TruncateSnippetFocused(snippet, 300);
1469   EXPECT_TRUE(result.find(first_line) > 0);
1470   EXPECT_EQ(result.length(), 300UL);
1471   return true;
1472 }
1473 
MatchesFatalMessagesTest()1474 void MatchesFatalMessagesTest() {
1475   // Different Chrome test suites have different settings for their logs.
1476   // E.g. unit tests may not show the process ID (as they are single process),
1477   // whereas browser tests usually do (as they are multi-process). This
1478   // affects how log messages are formatted and hence how the log criticality
1479   // i.e. "FATAL", appears in the log message. We test the two extremes --
1480   // all process IDs, timestamps present, and all not present. We also test
1481   // the presence/absence of an extra logging prefix.
1482   {
1483     // Process ID, Thread ID, Timestamp and Tickcount.
1484     logging::SetLogItems(true, true, true, true);
1485     logging::SetLogPrefix(nullptr);
1486     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1487                               CheckTruncationPreservesMessage());
1488   }
1489   {
1490     logging::SetLogItems(false, false, false, false);
1491     logging::SetLogPrefix(nullptr);
1492     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1493                               CheckTruncationPreservesMessage());
1494   }
1495   {
1496     // Process ID, Thread ID, Timestamp and Tickcount.
1497     logging::SetLogItems(true, true, true, true);
1498     logging::SetLogPrefix("mylogprefix");
1499     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1500                               CheckTruncationPreservesMessage());
1501   }
1502   {
1503     logging::SetLogItems(false, false, false, false);
1504     logging::SetLogPrefix("mylogprefix");
1505     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1506                               CheckTruncationPreservesMessage());
1507   }
1508 }
1509 
1510 // Validates TestSnippetFocused correctly identifies fatal messages to
1511 // retain during truncation.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesTest)1512 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesTest) {
1513   logging::ScopedLoggingSettings scoped_logging_settings;
1514 #if BUILDFLAG(IS_CHROMEOS_ASH)
1515   scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_SYSLOG);
1516 #endif
1517   MatchesFatalMessagesTest();
1518 }
1519 
1520 #if BUILDFLAG(IS_CHROMEOS_ASH)
1521 // Validates TestSnippetFocused correctly identifies fatal messages to
1522 // retain during truncation, for ChromeOS Ash.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest)1523 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest) {
1524   logging::ScopedLoggingSettings scoped_logging_settings;
1525   scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_CHROME);
1526   MatchesFatalMessagesTest();
1527 }
1528 #endif
1529 
1530 // Validate TestSnippetFocused truncates snippets correctly, regardless of
1531 // whether fatal messages appear at the start, middle or end of the snippet.
TEST(TestLauncherTools,TruncateSnippetFocusedTest)1532 TEST(TestLauncherTools, TruncateSnippetFocusedTest) {
1533   // Test where FATAL message appears in the start of the log.
1534   const std::string snippet =
1535       "[ RUN      ] "
1536       "EndToEndTests/"
1537       "EndToEndTest.WebTransportSessionUnidirectionalStreamSentEarly/"
1538       "draft29_QBIC\n"
1539       "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1540       "yet.\n"
1541       "#0 0x55619ad1fcdb in backtrace "
1542       "/b/s/w/ir/cache/builder/src/third_party/llvm/compiler-rt/lib/asan/../"
1543       "sanitizer_common/sanitizer_common_interceptors.inc:4205:13\n"
1544       "#1 0x5561a6bdf519 in base::debug::CollectStackTrace(void**, unsigned "
1545       "long) ./../../base/debug/stack_trace_posix.cc:845:39\n"
1546       "#2 0x5561a69a1293 in StackTrace "
1547       "./../../base/debug/stack_trace.cc:200:12\n"
1548       "...\n";
1549   const std::string result = TruncateSnippetFocused(snippet, 300);
1550   EXPECT_EQ(
1551       result,
1552       "[ RUN      ] EndToEndTests/EndToEndTest.WebTransportSessionUnidirection"
1553       "alStreamSentEarly/draft29_QBIC\n"
1554       "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1555       "yet.\n"
1556       "#0 0x55619ad1fcdb in backtrace /b/s/w/ir/cache/bui\n"
1557       "<truncated (358 bytes)>\n"
1558       "Trace ./../../base/debug/stack_trace.cc:200:12\n"
1559       "...\n");
1560   EXPECT_EQ(result.length(), 300UL);
1561 
1562   // Test where FATAL message appears in the middle of the log.
1563   const std::string snippet_two =
1564       "[ RUN      ] NetworkingPrivateApiTest.CreateSharedNetwork\n"
1565       "Padding log information added for testing purposes\n"
1566       "Padding log information added for testing purposes\n"
1567       "Padding log information added for testing purposes\n"
1568       "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1569       "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1570       "#0 0x562f31dba779 base::debug::CollectStackTrace()\n"
1571       "#1 0x562f31cdf2a3 base::debug::StackTrace::StackTrace()\n"
1572       "#2 0x562f31cf4380 logging::LogMessage::~LogMessage()\n"
1573       "#3 0x562f31cf4d3e logging::LogMessage::~LogMessage()\n";
1574   const std::string result_two = TruncateSnippetFocused(snippet_two, 300);
1575   EXPECT_EQ(
1576       result_two,
1577       "[ RUN      ] NetworkingPriv\n"
1578       "<truncated (210 bytes)>\n"
1579       " added for testing purposes\n"
1580       "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1581       "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1582       "#0 0x562f31dba779 base::deb\n"
1583       "<truncated (213 bytes)>\n"
1584       ":LogMessage::~LogMessage()\n");
1585   EXPECT_EQ(result_two.length(), 300UL);
1586 
1587   // Test where FATAL message appears at end of the log.
1588   const std::string snippet_three =
1589       "[ RUN      ] All/PDFExtensionAccessibilityTreeDumpTest.Highlights/"
1590       "linux\n"
1591       "[6741:6741:0716/171816.818448:ERROR:power_monitor_device_source_stub.cc"
1592       "(11)] Not implemented reached in virtual bool base::PowerMonitorDevice"
1593       "Source::IsOnBatteryPower()\n"
1594       "[6741:6741:0716/171816.818912:INFO:content_main_runner_impl.cc(1082)]"
1595       " Chrome is running in full browser mode.\n"
1596       "libva error: va_getDriverName() failed with unknown libva error,driver"
1597       "_name=(null)\n"
1598       "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1599       "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1600       "(2147483647 vs. 2147483647)\n";
1601   const std::string result_three = TruncateSnippetFocused(snippet_three, 300);
1602   EXPECT_EQ(
1603       result_three,
1604       "[ RUN      ] All/PDFExtensionAccessibilityTreeDumpTest.Hi\n"
1605       "<truncated (432 bytes)>\n"
1606       "Name() failed with unknown libva error,driver_name=(null)\n"
1607       "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1608       "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1609       "(2147483647 vs. 2147483647)\n");
1610   EXPECT_EQ(result_three.length(), 300UL);
1611 
1612   // Test where FATAL message does not appear.
1613   const std::string snippet_four =
1614       "[ RUN      ] All/PassingTest/linux\n"
1615       "Padding log line 1 added for testing purposes\n"
1616       "Padding log line 2 added for testing purposes\n"
1617       "Padding log line 3 added for testing purposes\n"
1618       "Padding log line 4 added for testing purposes\n"
1619       "Padding log line 5 added for testing purposes\n"
1620       "Padding log line 6 added for testing purposes\n";
1621   const std::string result_four = TruncateSnippetFocused(snippet_four, 300);
1622   EXPECT_EQ(result_four,
1623             "[ RUN      ] All/PassingTest/linux\n"
1624             "Padding log line 1 added for testing purposes\n"
1625             "Padding log line 2 added for testing purposes\n"
1626             "Padding lo\n<truncated (311 bytes)>\n"
1627             "Padding log line 4 added for testing purposes\n"
1628             "Padding log line 5 added for testing purposes\n"
1629             "Padding log line 6 added for testing purposes\n");
1630   EXPECT_EQ(result_four.length(), 300UL);
1631 }
1632 
1633 }  // namespace
1634 
1635 }  // namespace base
1636