• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2019 The Chromium Authors
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #include "base/test/launcher/test_launcher.h"
6 
7 #include <stddef.h>
8 
9 #include "base/base64.h"
10 #include "base/command_line.h"
11 #include "base/files/file_util.h"
12 #include "base/files/scoped_temp_dir.h"
13 #include "base/functional/bind.h"
14 #include "base/functional/callback_helpers.h"
15 #include "base/logging.h"
16 #include "base/no_destructor.h"
17 #include "base/process/launch.h"
18 #include "base/strings/strcat.h"
19 #include "base/strings/string_split.h"
20 #include "base/strings/string_util.h"
21 #include "base/test/gtest_xml_util.h"
22 #include "base/test/launcher/test_launcher_test_utils.h"
23 #include "base/test/launcher/unit_test_launcher.h"
24 #include "base/test/multiprocess_test.h"
25 #include "base/test/scoped_logging_settings.h"
26 #include "base/test/task_environment.h"
27 #include "base/test/test_timeouts.h"
28 #include "base/time/time_to_iso8601.h"
29 #include "build/build_config.h"
30 #include "build/chromeos_buildflags.h"
31 #include "testing/gmock/include/gmock/gmock.h"
32 #include "testing/gtest/include/gtest/gtest.h"
33 #include "testing/multiprocess_func_list.h"
34 #include "third_party/abseil-cpp/absl/types/optional.h"
35 
36 namespace base {
37 namespace {
38 
39 using ::testing::_;
40 using ::testing::DoAll;
41 using ::testing::Invoke;
42 using ::testing::InvokeWithoutArgs;
43 using ::testing::Return;
44 using ::testing::ReturnPointee;
45 
GenerateTestResult(const std::string & test_name,TestResult::Status status,TimeDelta elapsed_td=Milliseconds (30),const std::string & output_snippet="output")46 TestResult GenerateTestResult(const std::string& test_name,
47                               TestResult::Status status,
48                               TimeDelta elapsed_td = Milliseconds(30),
49                               const std::string& output_snippet = "output") {
50   TestResult result;
51   result.full_name = test_name;
52   result.status = status;
53   result.elapsed_time = elapsed_td;
54   result.output_snippet = output_snippet;
55   return result;
56 }
57 
GenerateTestResultPart(TestResultPart::Type type,const std::string & file_name,int line_number,const std::string & summary,const std::string & message)58 TestResultPart GenerateTestResultPart(TestResultPart::Type type,
59                                       const std::string& file_name,
60                                       int line_number,
61                                       const std::string& summary,
62                                       const std::string& message) {
63   TestResultPart test_result_part;
64   test_result_part.type = type;
65   test_result_part.file_name = file_name;
66   test_result_part.line_number = line_number;
67   test_result_part.summary = summary;
68   test_result_part.message = message;
69   return test_result_part;
70 }
71 
72 // Mock TestLauncher to mock CreateAndStartThreadPool,
73 // unit test will provide a TaskEnvironment.
74 class MockTestLauncher : public TestLauncher {
75  public:
MockTestLauncher(TestLauncherDelegate * launcher_delegate,size_t parallel_jobs)76   MockTestLauncher(TestLauncherDelegate* launcher_delegate,
77                    size_t parallel_jobs)
78       : TestLauncher(launcher_delegate, parallel_jobs) {}
79 
CreateAndStartThreadPool(size_t parallel_jobs)80   void CreateAndStartThreadPool(size_t parallel_jobs) override {}
81 
82   MOCK_METHOD4(LaunchChildGTestProcess,
83                void(scoped_refptr<TaskRunner> task_runner,
84                     const std::vector<std::string>& test_names,
85                     const FilePath& task_temp_dir,
86                     const FilePath& child_temp_dir));
87 };
88 
89 // Simple TestLauncherDelegate mock to test TestLauncher flow.
90 class MockTestLauncherDelegate : public TestLauncherDelegate {
91  public:
92   MOCK_METHOD1(GetTests, bool(std::vector<TestIdentifier>* output));
93   MOCK_METHOD2(WillRunTest,
94                bool(const std::string& test_case_name,
95                     const std::string& test_name));
96   MOCK_METHOD2(ProcessTestResults,
97                void(std::vector<TestResult>& test_names,
98                     TimeDelta elapsed_time));
99   MOCK_METHOD3(GetCommandLine,
100                CommandLine(const std::vector<std::string>& test_names,
101                            const FilePath& temp_dir_,
102                            FilePath* output_file_));
103   MOCK_METHOD1(IsPreTask, bool(const std::vector<std::string>& test_names));
104   MOCK_METHOD0(GetWrapper, std::string());
105   MOCK_METHOD0(GetLaunchOptions, int());
106   MOCK_METHOD0(GetTimeout, TimeDelta());
107   MOCK_METHOD0(GetBatchSize, size_t());
108 };
109 
110 class MockResultWatcher : public ResultWatcher {
111  public:
MockResultWatcher(FilePath result_file,size_t num_tests)112   MockResultWatcher(FilePath result_file, size_t num_tests)
113       : ResultWatcher(result_file, num_tests) {}
114 
115   MOCK_METHOD(bool, WaitWithTimeout, (TimeDelta), (override));
116 };
117 
118 // Using MockTestLauncher to test TestLauncher.
119 // Test TestLauncher filters, and command line switches setup.
120 class TestLauncherTest : public testing::Test {
121  protected:
TestLauncherTest()122   TestLauncherTest()
123       : command_line(new CommandLine(CommandLine::NO_PROGRAM)),
124         test_launcher(&delegate, 10) {}
125 
126   // Adds tests to be returned by the delegate.
AddMockedTests(std::string test_case_name,const std::vector<std::string> & test_names)127   void AddMockedTests(std::string test_case_name,
128                       const std::vector<std::string>& test_names) {
129     for (const std::string& test_name : test_names) {
130       TestIdentifier test_data;
131       test_data.test_case_name = test_case_name;
132       test_data.test_name = test_name;
133       test_data.file = "File";
134       test_data.line = 100;
135       tests_.push_back(test_data);
136     }
137   }
138 
139   // Setup expected delegate calls, and which tests the delegate will return.
SetUpExpectCalls(size_t batch_size=10)140   void SetUpExpectCalls(size_t batch_size = 10) {
141     EXPECT_CALL(delegate, GetTests(_))
142         .WillOnce(::testing::DoAll(testing::SetArgPointee<0>(tests_),
143                                    testing::Return(true)));
144     EXPECT_CALL(delegate, WillRunTest(_, _))
145         .WillRepeatedly(testing::Return(true));
146     EXPECT_CALL(delegate, ProcessTestResults(_, _)).Times(0);
147     EXPECT_CALL(delegate, GetCommandLine(_, _, _))
148         .WillRepeatedly(testing::Return(CommandLine(CommandLine::NO_PROGRAM)));
149     EXPECT_CALL(delegate, GetWrapper())
150         .WillRepeatedly(testing::Return(std::string()));
151     EXPECT_CALL(delegate, IsPreTask(_)).WillRepeatedly(testing::Return(true));
152     EXPECT_CALL(delegate, GetLaunchOptions())
153         .WillRepeatedly(testing::Return(true));
154     EXPECT_CALL(delegate, GetTimeout())
155         .WillRepeatedly(testing::Return(TimeDelta()));
156     EXPECT_CALL(delegate, GetBatchSize())
157         .WillRepeatedly(testing::Return(batch_size));
158   }
159 
160   std::unique_ptr<CommandLine> command_line;
161   MockTestLauncher test_launcher;
162   MockTestLauncherDelegate delegate;
163   base::test::TaskEnvironment task_environment{
164       base::test::TaskEnvironment::MainThreadType::IO};
165   ScopedTempDir dir;
166 
CreateFilterFile()167   FilePath CreateFilterFile() {
168     FilePath result_file = dir.GetPath().AppendASCII("test.filter");
169     WriteFile(result_file, "-Test.firstTest");
170     return result_file;
171   }
172 
173  private:
174   std::vector<TestIdentifier> tests_;
175 };
176 
177 class ResultWatcherTest : public testing::Test {
178  protected:
179   ResultWatcherTest() = default;
180 
CreateResultFile()181   FilePath CreateResultFile() {
182     FilePath result_file = dir.GetPath().AppendASCII("test_results.xml");
183     WriteFile(result_file,
184               "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
185               "<testsuites>\n"
186               "  <testsuite>\n");
187     return result_file;
188   }
189 
190   base::test::TaskEnvironment task_environment{
191       base::test::TaskEnvironment::TimeSource::MOCK_TIME};
192   ScopedTempDir dir;
193 };
194 
195 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P3(OnTestResult,launcher,full_name,status)196 ACTION_P3(OnTestResult, launcher, full_name, status) {
197   TestResult result = GenerateTestResult(full_name, status);
198   arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
199                                      Unretained(launcher), result));
200 }
201 
202 // Action to mock delegate invoking OnTestFinish on test launcher.
ACTION_P2(OnTestResult,launcher,result)203 ACTION_P2(OnTestResult, launcher, result) {
204   arg0->PostTask(FROM_HERE, BindOnce(&TestLauncher::OnTestFinished,
205                                      Unretained(launcher), result));
206 }
207 
208 // A test and a disabled test cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTest)209 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTest) {
210   AddMockedTests("Test", {"firstTest", "DISABLED_firstTest"});
211   SetUpExpectCalls();
212   EXPECT_FALSE(test_launcher.Run(command_line.get()));
213 }
214 
215 // A test case and a disabled test case cannot share a name.
TEST_F(TestLauncherTest,TestNameSharedWithDisabledTestCase)216 TEST_F(TestLauncherTest, TestNameSharedWithDisabledTestCase) {
217   AddMockedTests("DISABLED_Test", {"firstTest"});
218   AddMockedTests("Test", {"firstTest"});
219   SetUpExpectCalls();
220   EXPECT_FALSE(test_launcher.Run(command_line.get()));
221 }
222 
223 // Compiled tests should not contain an orphaned pre test.
TEST_F(TestLauncherTest,OrphanePreTest)224 TEST_F(TestLauncherTest, OrphanePreTest) {
225   AddMockedTests("Test", {"firstTest", "PRE_firstTestOrphane"});
226   SetUpExpectCalls();
227   EXPECT_FALSE(test_launcher.Run(command_line.get()));
228 }
229 
230 // When There are no tests, delegate should not be called.
TEST_F(TestLauncherTest,EmptyTestSetPasses)231 TEST_F(TestLauncherTest, EmptyTestSetPasses) {
232   SetUpExpectCalls();
233   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _)).Times(0);
234   EXPECT_TRUE(test_launcher.Run(command_line.get()));
235 }
236 
237 // Test TestLauncher filters DISABLED tests by default.
TEST_F(TestLauncherTest,FilterDisabledTestByDefault)238 TEST_F(TestLauncherTest, FilterDisabledTestByDefault) {
239   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
240   AddMockedTests("Test",
241                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
242   SetUpExpectCalls();
243   std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
244   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
245                                  _,
246                                  testing::ElementsAreArray(tests_names.cbegin(),
247                                                            tests_names.cend()),
248                                  _, _))
249       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
250                                               TestResult::TEST_SUCCESS),
251                                  OnTestResult(&test_launcher, "Test.secondTest",
252                                               TestResult::TEST_SUCCESS)));
253   EXPECT_TRUE(test_launcher.Run(command_line.get()));
254 }
255 
256 // Test TestLauncher should reorder PRE_ tests before delegate
TEST_F(TestLauncherTest,ReorderPreTests)257 TEST_F(TestLauncherTest, ReorderPreTests) {
258   AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
259   SetUpExpectCalls();
260   std::vector<std::string> tests_names = {
261       "Test.PRE_PRE_firstTest", "Test.PRE_firstTest", "Test.firstTest"};
262   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
263                                  _,
264                                  testing::ElementsAreArray(tests_names.cbegin(),
265                                                            tests_names.cend()),
266                                  _, _))
267       .Times(1);
268   EXPECT_TRUE(test_launcher.Run(command_line.get()));
269 }
270 
271 // Test TestLauncher "gtest_filter" switch.
TEST_F(TestLauncherTest,UsingCommandLineFilter)272 TEST_F(TestLauncherTest, UsingCommandLineFilter) {
273   AddMockedTests("Test",
274                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
275   SetUpExpectCalls();
276   command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
277   std::vector<std::string> tests_names = {"Test.firstTest"};
278   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
279                                  _,
280                                  testing::ElementsAreArray(tests_names.cbegin(),
281                                                            tests_names.cend()),
282                                  _, _))
283       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
284                              TestResult::TEST_SUCCESS));
285   EXPECT_TRUE(test_launcher.Run(command_line.get()));
286 }
287 
288 // Test TestLauncher gtest filter will include pre tests
TEST_F(TestLauncherTest,FilterIncludePreTest)289 TEST_F(TestLauncherTest, FilterIncludePreTest) {
290   AddMockedTests("Test", {"firstTest", "secondTest", "PRE_firstTest"});
291   SetUpExpectCalls();
292   command_line->AppendSwitchASCII("gtest_filter", "Test.firstTest");
293   std::vector<std::string> tests_names = {"Test.PRE_firstTest",
294                                           "Test.firstTest"};
295   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
296                                  _,
297                                  testing::ElementsAreArray(tests_names.cbegin(),
298                                                            tests_names.cend()),
299                                  _, _))
300       .Times(1);
301   EXPECT_TRUE(test_launcher.Run(command_line.get()));
302 }
303 
304 // Test TestLauncher gtest filter works when both include and exclude filter
305 // are defined.
TEST_F(TestLauncherTest,FilterIncludeExclude)306 TEST_F(TestLauncherTest, FilterIncludeExclude) {
307   AddMockedTests("Test", {"firstTest", "PRE_firstTest", "secondTest",
308                           "PRE_secondTest", "thirdTest", "DISABLED_Disable1"});
309   SetUpExpectCalls();
310   command_line->AppendSwitchASCII("gtest_filter",
311                                   "Test.*Test:-Test.secondTest");
312   std::vector<std::string> tests_names = {
313       "Test.PRE_firstTest",
314       "Test.firstTest",
315       "Test.thirdTest",
316   };
317   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
318                                  _,
319                                  testing::ElementsAreArray(tests_names.cbegin(),
320                                                            tests_names.cend()),
321                                  _, _))
322       .Times(1);
323   EXPECT_TRUE(test_launcher.Run(command_line.get()));
324 }
325 
326 // Test TestLauncher "gtest_repeat" switch.
TEST_F(TestLauncherTest,RepeatTest)327 TEST_F(TestLauncherTest, RepeatTest) {
328   AddMockedTests("Test", {"firstTest"});
329   SetUpExpectCalls();
330   // Unless --gtest-break-on-failure is specified,
331   command_line->AppendSwitchASCII("gtest_repeat", "2");
332   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
333       .Times(2)
334       .WillRepeatedly(::testing::DoAll(OnTestResult(
335           &test_launcher, "Test.firstTest", TestResult::TEST_SUCCESS)));
336   EXPECT_TRUE(test_launcher.Run(command_line.get()));
337 }
338 
339 // Test TestLauncher --gtest_repeat and --gtest_break_on_failure.
TEST_F(TestLauncherTest,RunningMultipleIterationsUntilFailure)340 TEST_F(TestLauncherTest, RunningMultipleIterationsUntilFailure) {
341   AddMockedTests("Test", {"firstTest"});
342   SetUpExpectCalls();
343   // Unless --gtest-break-on-failure is specified,
344   command_line->AppendSwitchASCII("gtest_repeat", "4");
345   command_line->AppendSwitch("gtest_break_on_failure");
346   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
347       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
348                                               TestResult::TEST_SUCCESS)))
349       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
350                                               TestResult::TEST_SUCCESS)))
351       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
352                                               TestResult::TEST_FAILURE)));
353   EXPECT_FALSE(test_launcher.Run(command_line.get()));
354 }
355 
356 // Test TestLauncher will retry failed test, and stop on success.
TEST_F(TestLauncherTest,SuccessOnRetryTests)357 TEST_F(TestLauncherTest, SuccessOnRetryTests) {
358   AddMockedTests("Test", {"firstTest"});
359   SetUpExpectCalls();
360   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
361   std::vector<std::string> tests_names = {"Test.firstTest"};
362   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
363                                  _,
364                                  testing::ElementsAreArray(tests_names.cbegin(),
365                                                            tests_names.cend()),
366                                  _, _))
367       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
368                              TestResult::TEST_FAILURE))
369       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
370                              TestResult::TEST_SUCCESS));
371   EXPECT_TRUE(test_launcher.Run(command_line.get()));
372 }
373 
374 // Test TestLauncher will retry continuing failing test up to retry limit,
375 // before eventually failing and returning false.
TEST_F(TestLauncherTest,FailOnRetryTests)376 TEST_F(TestLauncherTest, FailOnRetryTests) {
377   AddMockedTests("Test", {"firstTest"});
378   SetUpExpectCalls();
379   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
380   std::vector<std::string> tests_names = {"Test.firstTest"};
381   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
382                                  _,
383                                  testing::ElementsAreArray(tests_names.cbegin(),
384                                                            tests_names.cend()),
385                                  _, _))
386       .Times(3)
387       .WillRepeatedly(OnTestResult(&test_launcher, "Test.firstTest",
388                                    TestResult::TEST_FAILURE));
389   EXPECT_FALSE(test_launcher.Run(command_line.get()));
390 }
391 
392 // Test TestLauncher should retry all PRE_ chained tests
TEST_F(TestLauncherTest,RetryPreTests)393 TEST_F(TestLauncherTest, RetryPreTests) {
394   AddMockedTests("Test", {"firstTest", "PRE_PRE_firstTest", "PRE_firstTest"});
395   SetUpExpectCalls();
396   command_line->AppendSwitchASCII("test-launcher-retry-limit", "2");
397   std::vector<TestResult> results = {
398       GenerateTestResult("Test.PRE_PRE_firstTest", TestResult::TEST_SUCCESS),
399       GenerateTestResult("Test.PRE_firstTest", TestResult::TEST_FAILURE),
400       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS)};
401   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
402       .WillOnce(::testing::DoAll(
403           OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
404                        TestResult::TEST_SUCCESS),
405           OnTestResult(&test_launcher, "Test.PRE_firstTest",
406                        TestResult::TEST_FAILURE),
407           OnTestResult(&test_launcher, "Test.firstTest",
408                        TestResult::TEST_SUCCESS)));
409   std::vector<std::string> tests_names = {"Test.PRE_PRE_firstTest"};
410   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
411                                  _,
412                                  testing::ElementsAreArray(tests_names.cbegin(),
413                                                            tests_names.cend()),
414                                  _, _))
415       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_PRE_firstTest",
416                              TestResult::TEST_SUCCESS));
417   tests_names = {"Test.PRE_firstTest"};
418   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
419                                  _,
420                                  testing::ElementsAreArray(tests_names.cbegin(),
421                                                            tests_names.cend()),
422                                  _, _))
423       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_firstTest",
424                              TestResult::TEST_SUCCESS));
425   tests_names = {"Test.firstTest"};
426   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
427                                  _,
428                                  testing::ElementsAreArray(tests_names.cbegin(),
429                                                            tests_names.cend()),
430                                  _, _))
431       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
432                              TestResult::TEST_SUCCESS));
433   EXPECT_TRUE(test_launcher.Run(command_line.get()));
434 }
435 
436 // Test TestLauncher should fail if a PRE test fails but its non-PRE test passes
TEST_F(TestLauncherTest,PreTestFailure)437 TEST_F(TestLauncherTest, PreTestFailure) {
438   AddMockedTests("Test", {"FirstTest", "PRE_FirstTest"});
439   SetUpExpectCalls();
440   std::vector<TestResult> results = {
441       GenerateTestResult("Test.PRE_FirstTest", TestResult::TEST_FAILURE),
442       GenerateTestResult("Test.FirstTest", TestResult::TEST_SUCCESS)};
443   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
444       .WillOnce(
445           ::testing::DoAll(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
446                                         TestResult::TEST_FAILURE),
447                            OnTestResult(&test_launcher, "Test.FirstTest",
448                                         TestResult::TEST_SUCCESS)));
449   EXPECT_CALL(test_launcher,
450               LaunchChildGTestProcess(
451                   _, testing::ElementsAre("Test.PRE_FirstTest"), _, _))
452       .WillOnce(OnTestResult(&test_launcher, "Test.PRE_FirstTest",
453                              TestResult::TEST_FAILURE));
454   EXPECT_CALL(
455       test_launcher,
456       LaunchChildGTestProcess(_, testing::ElementsAre("Test.FirstTest"), _, _))
457       .WillOnce(OnTestResult(&test_launcher, "Test.FirstTest",
458                              TestResult::TEST_SUCCESS));
459   EXPECT_FALSE(test_launcher.Run(command_line.get()));
460 }
461 
462 // Test TestLauncher run disabled unit tests switch.
TEST_F(TestLauncherTest,RunDisabledTests)463 TEST_F(TestLauncherTest, RunDisabledTests) {
464   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
465   AddMockedTests("Test",
466                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
467   SetUpExpectCalls();
468   command_line->AppendSwitch("gtest_also_run_disabled_tests");
469   command_line->AppendSwitchASCII("gtest_filter", "Test*.first*");
470   std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
471                                           "Test.firstTest",
472                                           "Test.DISABLED_firstTestDisabled"};
473   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
474                                  _,
475                                  testing::ElementsAreArray(tests_names.cbegin(),
476                                                            tests_names.cend()),
477                                  _, _))
478       .WillOnce(::testing::DoAll(
479           OnTestResult(&test_launcher, "Test.firstTest",
480                        TestResult::TEST_SUCCESS),
481           OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
482                        TestResult::TEST_SUCCESS),
483           OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
484                        TestResult::TEST_SUCCESS)));
485   EXPECT_TRUE(test_launcher.Run(command_line.get()));
486 }
487 
488 // Test TestLauncher does not run negative tests filtered under
489 // testing/buildbot/filters.
TEST_F(TestLauncherTest,DoesRunFilteredTests)490 TEST_F(TestLauncherTest, DoesRunFilteredTests) {
491   AddMockedTests("Test", {"firstTest", "secondTest"});
492   SetUpExpectCalls();
493   ASSERT_TRUE(dir.CreateUniqueTempDir());
494   // filter file content is "-Test.firstTest"
495   FilePath path = CreateFilterFile();
496   command_line->AppendSwitchPath("test-launcher-filter-file", path);
497   std::vector<std::string> tests_names = {"Test.secondTest"};
498   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
499                                  _,
500                                  testing::ElementsAreArray(tests_names.cbegin(),
501                                                            tests_names.cend()),
502                                  _, _))
503       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.secondTest",
504                                               TestResult::TEST_SUCCESS)));
505   EXPECT_TRUE(test_launcher.Run(command_line.get()));
506 }
507 
508 // Test TestLauncher run disabled tests and negative tests filtered under
509 // testing/buildbot/filters, when gtest_also_run_disabled_tests is set.
TEST_F(TestLauncherTest,RunDisabledTestsWithFilteredTests)510 TEST_F(TestLauncherTest, RunDisabledTestsWithFilteredTests) {
511   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
512   AddMockedTests("Test", {"firstTest", "DISABLED_firstTestDisabled"});
513   SetUpExpectCalls();
514   ASSERT_TRUE(dir.CreateUniqueTempDir());
515   // filter file content is "-Test.firstTest", but Test.firstTest will still
516   // run due to gtest_also_run_disabled_tests is set.
517   FilePath path = CreateFilterFile();
518   command_line->AppendSwitchPath("test-launcher-filter-file", path);
519   command_line->AppendSwitch("gtest_also_run_disabled_tests");
520   std::vector<std::string> tests_names = {"DISABLED_TestDisabled.firstTest",
521                                           "Test.firstTest",
522                                           "Test.DISABLED_firstTestDisabled"};
523   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
524                                  _,
525                                  testing::ElementsAreArray(tests_names.cbegin(),
526                                                            tests_names.cend()),
527                                  _, _))
528       .WillOnce(::testing::DoAll(
529           OnTestResult(&test_launcher, "Test.firstTest",
530                        TestResult::TEST_SUCCESS),
531           OnTestResult(&test_launcher, "DISABLED_TestDisabled.firstTest",
532                        TestResult::TEST_SUCCESS),
533           OnTestResult(&test_launcher, "Test.DISABLED_firstTestDisabled",
534                        TestResult::TEST_SUCCESS)));
535   EXPECT_TRUE(test_launcher.Run(command_line.get()));
536 }
537 
538 // Disabled test should disable all pre tests
TEST_F(TestLauncherTest,DisablePreTests)539 TEST_F(TestLauncherTest, DisablePreTests) {
540   AddMockedTests("Test", {"DISABLED_firstTest", "PRE_PRE_firstTest",
541                           "PRE_firstTest", "secondTest"});
542   SetUpExpectCalls();
543   std::vector<std::string> tests_names = {"Test.secondTest"};
544   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
545                                  _,
546                                  testing::ElementsAreArray(tests_names.cbegin(),
547                                                            tests_names.cend()),
548                                  _, _))
549       .Times(1);
550   EXPECT_TRUE(test_launcher.Run(command_line.get()));
551 }
552 
553 // Tests fail if they produce too much output.
TEST_F(TestLauncherTest,ExcessiveOutput)554 TEST_F(TestLauncherTest, ExcessiveOutput) {
555   AddMockedTests("Test", {"firstTest"});
556   SetUpExpectCalls();
557   command_line->AppendSwitchASCII("test-launcher-retry-limit", "0");
558   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
559   TestResult test_result =
560       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
561                          Milliseconds(30), std::string(500000, 'a'));
562   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
563       .WillOnce(OnTestResult(&test_launcher, test_result));
564   EXPECT_FALSE(test_launcher.Run(command_line.get()));
565 }
566 
567 // Use command-line switch to allow more output.
TEST_F(TestLauncherTest,OutputLimitSwitch)568 TEST_F(TestLauncherTest, OutputLimitSwitch) {
569   AddMockedTests("Test", {"firstTest"});
570   SetUpExpectCalls();
571   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "never");
572   command_line->AppendSwitchASCII("test-launcher-output-bytes-limit", "800000");
573   TestResult test_result =
574       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
575                          Milliseconds(30), std::string(500000, 'a'));
576   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
577       .WillOnce(OnTestResult(&test_launcher, test_result));
578   EXPECT_TRUE(test_launcher.Run(command_line.get()));
579 }
580 
581 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,FaultyShardSetup)582 TEST_F(TestLauncherTest, FaultyShardSetup) {
583   command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
584   command_line->AppendSwitchASCII("test-launcher-shard-index", "2");
585   EXPECT_FALSE(test_launcher.Run(command_line.get()));
586 }
587 
588 // Shard index must be lesser than total shards
TEST_F(TestLauncherTest,RedirectStdio)589 TEST_F(TestLauncherTest, RedirectStdio) {
590   AddMockedTests("Test", {"firstTest"});
591   SetUpExpectCalls();
592   command_line->AppendSwitchASCII("test-launcher-print-test-stdio", "always");
593   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
594       .WillOnce(OnTestResult(&test_launcher, "Test.firstTest",
595                              TestResult::TEST_SUCCESS));
596   EXPECT_TRUE(test_launcher.Run(command_line.get()));
597 }
598 
599 // Sharding should be stable and always selecting the same tests.
TEST_F(TestLauncherTest,StableSharding)600 TEST_F(TestLauncherTest, StableSharding) {
601   AddMockedTests("Test", {"firstTest", "secondTest", "thirdTest"});
602   SetUpExpectCalls();
603   command_line->AppendSwitchASCII("test-launcher-total-shards", "2");
604   command_line->AppendSwitchASCII("test-launcher-shard-index", "0");
605   command_line->AppendSwitch("test-launcher-stable-sharding");
606   std::vector<std::string> tests_names = {"Test.firstTest", "Test.secondTest"};
607   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(
608                                  _,
609                                  testing::ElementsAreArray(tests_names.cbegin(),
610                                                            tests_names.cend()),
611                                  _, _))
612       .WillOnce(::testing::DoAll(OnTestResult(&test_launcher, "Test.firstTest",
613                                               TestResult::TEST_SUCCESS),
614                                  OnTestResult(&test_launcher, "Test.secondTest",
615                                               TestResult::TEST_SUCCESS)));
616   EXPECT_TRUE(test_launcher.Run(command_line.get()));
617 }
618 
619 // Validate |iteration_data| contains one test result matching |result|.
ValidateTestResultObject(const Value::Dict & iteration_data,TestResult & test_result)620 bool ValidateTestResultObject(const Value::Dict& iteration_data,
621                               TestResult& test_result) {
622   const Value::List* results = iteration_data.FindList(test_result.full_name);
623   if (!results) {
624     ADD_FAILURE() << "Results not found";
625     return false;
626   }
627   if (1u != results->size()) {
628     ADD_FAILURE() << "Expected one result, actual: " << results->size();
629     return false;
630   }
631   const Value::Dict* dict = (*results)[0].GetIfDict();
632   if (!dict) {
633     ADD_FAILURE() << "Unexpected type";
634     return false;
635   }
636 
637   using test_launcher_utils::ValidateKeyValue;
638   bool result = ValidateKeyValue(*dict, "elapsed_time_ms",
639                                  test_result.elapsed_time.InMilliseconds());
640 
641   if (!dict->FindBool("losless_snippet").value_or(false)) {
642     ADD_FAILURE() << "losless_snippet expected to be true";
643     result = false;
644   }
645 
646   result &=
647       ValidateKeyValue(*dict, "output_snippet", test_result.output_snippet);
648 
649   std::string base64_output_snippet;
650   Base64Encode(test_result.output_snippet, &base64_output_snippet);
651   result &=
652       ValidateKeyValue(*dict, "output_snippet_base64", base64_output_snippet);
653 
654   result &= ValidateKeyValue(*dict, "status", test_result.StatusAsString());
655 
656   const Value::List* list = dict->FindList("result_parts");
657   if (test_result.test_result_parts.size() != list->size()) {
658     ADD_FAILURE() << "test_result_parts count is not valid";
659     return false;
660   }
661 
662   for (unsigned i = 0; i < test_result.test_result_parts.size(); i++) {
663     TestResultPart result_part = test_result.test_result_parts.at(i);
664     const Value::Dict& part_dict = (*list)[i].GetDict();
665 
666     result &= ValidateKeyValue(part_dict, "type", result_part.TypeAsString());
667     result &= ValidateKeyValue(part_dict, "file", result_part.file_name);
668     result &= ValidateKeyValue(part_dict, "line", result_part.line_number);
669     result &= ValidateKeyValue(part_dict, "summary", result_part.summary);
670     result &= ValidateKeyValue(part_dict, "message", result_part.message);
671   }
672   return result;
673 }
674 
675 // Validate |root| dictionary value contains a list with |values|
676 // at |key| value.
ValidateStringList(const absl::optional<Value::Dict> & root,const std::string & key,std::vector<const char * > values)677 bool ValidateStringList(const absl::optional<Value::Dict>& root,
678                         const std::string& key,
679                         std::vector<const char*> values) {
680   const Value::List* list = root->FindList(key);
681   if (!list) {
682     ADD_FAILURE() << "|root| has no list_value in key: " << key;
683     return false;
684   }
685 
686   if (values.size() != list->size()) {
687     ADD_FAILURE() << "expected size: " << values.size()
688                   << ", actual size:" << list->size();
689     return false;
690   }
691 
692   for (unsigned i = 0; i < values.size(); i++) {
693     if (!(*list)[i].is_string() &&
694         (*list)[i].GetString().compare(values.at(i))) {
695       ADD_FAILURE() << "Expected list values do not match actual list";
696       return false;
697     }
698   }
699   return true;
700 }
701 
702 // Unit tests to validate TestLauncher outputs the correct JSON file.
TEST_F(TestLauncherTest,JsonSummary)703 TEST_F(TestLauncherTest, JsonSummary) {
704   AddMockedTests("DISABLED_TestDisabled", {"firstTest"});
705   AddMockedTests("Test",
706                  {"firstTest", "secondTest", "DISABLED_firstTestDisabled"});
707   SetUpExpectCalls();
708 
709   ASSERT_TRUE(dir.CreateUniqueTempDir());
710   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
711   command_line->AppendSwitchPath("test-launcher-summary-output", path);
712   command_line->AppendSwitchASCII("gtest_repeat", "2");
713   // Force the repeats to run sequentially.
714   command_line->AppendSwitch("gtest_break_on_failure");
715 
716   // Setup results to be returned by the test launcher delegate.
717   TestResult first_result =
718       GenerateTestResult("Test.firstTest", TestResult::TEST_SUCCESS,
719                          Milliseconds(30), "output_first");
720   first_result.test_result_parts.push_back(GenerateTestResultPart(
721       TestResultPart::kSuccess, "TestFile", 110, "summary", "message"));
722   TestResult second_result =
723       GenerateTestResult("Test.secondTest", TestResult::TEST_SUCCESS,
724                          Milliseconds(50), "output_second");
725 
726   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
727       .Times(2)
728       .WillRepeatedly(
729           ::testing::DoAll(OnTestResult(&test_launcher, first_result),
730                            OnTestResult(&test_launcher, second_result)));
731   EXPECT_TRUE(test_launcher.Run(command_line.get()));
732 
733   // Validate the resulting JSON file is the expected output.
734   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
735   ASSERT_TRUE(root);
736   EXPECT_TRUE(
737       ValidateStringList(root, "all_tests",
738                          {"Test.firstTest", "Test.firstTestDisabled",
739                           "Test.secondTest", "TestDisabled.firstTest"}));
740   EXPECT_TRUE(
741       ValidateStringList(root, "disabled_tests",
742                          {"Test.firstTestDisabled", "TestDisabled.firstTest"}));
743 
744   const Value::Dict* dict = root->FindDict("test_locations");
745   ASSERT_TRUE(dict);
746   EXPECT_EQ(2u, dict->size());
747   ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(*dict, "Test.firstTest",
748                                                         "File", 100));
749   ASSERT_TRUE(test_launcher_utils::ValidateTestLocation(
750       *dict, "Test.secondTest", "File", 100));
751 
752   const Value::List* list = root->FindList("per_iteration_data");
753   ASSERT_TRUE(list);
754   ASSERT_EQ(2u, list->size());
755   for (const auto& iteration_val : *list) {
756     ASSERT_TRUE(iteration_val.is_dict());
757     const base::Value::Dict& iteration_dict = iteration_val.GetDict();
758     EXPECT_EQ(2u, iteration_dict.size());
759     EXPECT_TRUE(ValidateTestResultObject(iteration_dict, first_result));
760     EXPECT_TRUE(ValidateTestResultObject(iteration_dict, second_result));
761   }
762 }
763 
764 // Validate TestLauncher outputs the correct JSON file
765 // when running disabled tests.
TEST_F(TestLauncherTest,JsonSummaryWithDisabledTests)766 TEST_F(TestLauncherTest, JsonSummaryWithDisabledTests) {
767   AddMockedTests("Test", {"DISABLED_Test"});
768   SetUpExpectCalls();
769 
770   ASSERT_TRUE(dir.CreateUniqueTempDir());
771   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
772   command_line->AppendSwitchPath("test-launcher-summary-output", path);
773   command_line->AppendSwitch("gtest_also_run_disabled_tests");
774 
775   // Setup results to be returned by the test launcher delegate.
776   TestResult test_result =
777       GenerateTestResult("Test.DISABLED_Test", TestResult::TEST_SUCCESS,
778                          Milliseconds(50), "output_second");
779 
780   EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
781       .WillOnce(OnTestResult(&test_launcher, test_result));
782   EXPECT_TRUE(test_launcher.Run(command_line.get()));
783 
784   // Validate the resulting JSON file is the expected output.
785   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
786   ASSERT_TRUE(root);
787   Value::Dict* dict = root->FindDict("test_locations");
788   ASSERT_TRUE(dict);
789   EXPECT_EQ(1u, dict->size());
790   EXPECT_TRUE(test_launcher_utils::ValidateTestLocation(
791       *dict, "Test.DISABLED_Test", "File", 100));
792 
793   Value::List* list = root->FindList("per_iteration_data");
794   ASSERT_TRUE(list);
795   ASSERT_EQ(1u, list->size());
796 
797   Value::Dict* iteration_dict = (*list)[0].GetIfDict();
798   ASSERT_TRUE(iteration_dict);
799   EXPECT_EQ(1u, iteration_dict->size());
800   // We expect the result to be stripped of disabled prefix.
801   test_result.full_name = "Test.Test";
802   EXPECT_TRUE(ValidateTestResultObject(*iteration_dict, test_result));
803 }
804 
805 // Matches a std::tuple<const FilePath&, const FilePath&> where the first
806 // item is a parent of the second.
807 MATCHER(DirectoryIsParentOf, "") {
808   return std::get<0>(arg).IsParent(std::get<1>(arg));
809 }
810 
811 // Test that the launcher creates a dedicated temp dir for a child proc and
812 // cleans it up.
TEST_F(TestLauncherTest,TestChildTempDir)813 TEST_F(TestLauncherTest, TestChildTempDir) {
814   AddMockedTests("Test", {"firstTest"});
815   SetUpExpectCalls();
816   ON_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, _))
817       .WillByDefault(OnTestResult(&test_launcher, "Test.firstTest",
818                                   TestResult::TEST_SUCCESS));
819 
820   FilePath task_temp;
821   if (TestLauncher::SupportsPerChildTempDirs()) {
822     // Platforms that support child proc temp dirs must get a |child_temp_dir|
823     // arg that exists and is within |task_temp_dir|.
824     EXPECT_CALL(
825         test_launcher,
826         LaunchChildGTestProcess(
827             _, _, _, ::testing::ResultOf(DirectoryExists, ::testing::IsTrue())))
828         .With(::testing::Args<2, 3>(DirectoryIsParentOf()))
829         .WillOnce(::testing::SaveArg<2>(&task_temp));
830   } else {
831     // Platforms that don't support child proc temp dirs must get an empty
832     // |child_temp_dir| arg.
833     EXPECT_CALL(test_launcher, LaunchChildGTestProcess(_, _, _, FilePath()))
834         .WillOnce(::testing::SaveArg<2>(&task_temp));
835   }
836 
837   EXPECT_TRUE(test_launcher.Run(command_line.get()));
838 
839   // The task's temporary directory should have been deleted.
840   EXPECT_FALSE(DirectoryExists(task_temp));
841 }
842 
843 #if BUILDFLAG(IS_FUCHSIA)
844 // Verifies that test processes have /data, /cache and /tmp available.
TEST_F(TestLauncherTest,ProvidesDataCacheAndTmpDirs)845 TEST_F(TestLauncherTest, ProvidesDataCacheAndTmpDirs) {
846   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/data")));
847   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/cache")));
848   EXPECT_TRUE(base::DirectoryExists(base::FilePath("/tmp")));
849 }
850 #endif  // BUILDFLAG(IS_FUCHSIA)
851 
852 // Unit tests to validate UnitTestLauncherDelegate implementation.
853 class UnitTestLauncherDelegateTester : public testing::Test {
854  protected:
855   DefaultUnitTestPlatformDelegate defaultPlatform;
856   ScopedTempDir dir;
857 
858  private:
859   base::test::TaskEnvironment task_environment;
860 };
861 
862 // Validate delegate produces correct command line.
TEST_F(UnitTestLauncherDelegateTester,GetCommandLine)863 TEST_F(UnitTestLauncherDelegateTester, GetCommandLine) {
864   UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 10u, true,
865                                              DoNothing());
866   TestLauncherDelegate* delegate_ptr = &launcher_delegate;
867 
868   std::vector<std::string> test_names(5, "Tests");
869   base::FilePath temp_dir;
870   base::FilePath result_file;
871   CreateNewTempDirectory(FilePath::StringType(), &temp_dir);
872 
873   CommandLine cmd_line =
874       delegate_ptr->GetCommandLine(test_names, temp_dir, &result_file);
875   EXPECT_TRUE(cmd_line.HasSwitch("single-process-tests"));
876   EXPECT_EQ(cmd_line.GetSwitchValuePath("test-launcher-output"), result_file);
877 
878   const int size = 2048;
879   std::string content;
880   ASSERT_TRUE(ReadFileToStringWithMaxSize(
881       cmd_line.GetSwitchValuePath("gtest_flagfile"), &content, size));
882   EXPECT_EQ(content.find("--gtest_filter="), 0u);
883   base::ReplaceSubstringsAfterOffset(&content, 0, "--gtest_filter=", "");
884   std::vector<std::string> gtest_filter_tests =
885       SplitString(content, ":", TRIM_WHITESPACE, SPLIT_WANT_ALL);
886   ASSERT_EQ(gtest_filter_tests.size(), test_names.size());
887   for (unsigned i = 0; i < test_names.size(); i++) {
888     EXPECT_EQ(gtest_filter_tests.at(i), test_names.at(i));
889   }
890 }
891 
892 // Verify that a result watcher can stop polling early when all tests complete.
TEST_F(ResultWatcherTest,PollCompletesQuickly)893 TEST_F(ResultWatcherTest, PollCompletesQuickly) {
894   ASSERT_TRUE(dir.CreateUniqueTempDir());
895   FilePath result_file = CreateResultFile();
896   ASSERT_TRUE(AppendToFile(
897       result_file,
898       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
899               TimeToISO8601(Time::Now()).c_str(), "\" />\n",
900               "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
901               "classname=\"A\" timestamp=\"",
902               TimeToISO8601(Time::Now()).c_str(), "\">\n", "    </testcase>\n",
903               "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
904               TimeToISO8601(Time::Now() + Milliseconds(500)).c_str(), "\" />\n",
905               "    <testcase name=\"C\" status=\"run\" time=\"0.500\" "
906               "classname=\"A\" timestamp=\"",
907               TimeToISO8601(Time::Now() + Milliseconds(500)).c_str(), "\">\n",
908               "    </testcase>\n", "  </testsuite>\n", "</testsuites>\n"})));
909 
910   MockResultWatcher result_watcher(result_file, 2);
911   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
912       .WillOnce(DoAll(InvokeWithoutArgs([&]() {
913                         task_environment.AdvanceClock(Milliseconds(1500));
914                       }),
915                       Return(true)));
916 
917   Time start = Time::Now();
918   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
919   ASSERT_EQ(Time::Now() - start, Milliseconds(1500));
920 }
921 
922 // Verify that a result watcher repeatedly checks the file for a batch of slow
923 // tests. Each test completes in 40s, which is just under the timeout of 45s.
TEST_F(ResultWatcherTest,PollCompletesSlowly)924 TEST_F(ResultWatcherTest, PollCompletesSlowly) {
925   ASSERT_TRUE(dir.CreateUniqueTempDir());
926   FilePath result_file = CreateResultFile();
927   ASSERT_TRUE(AppendToFile(
928       result_file,
929       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
930               TimeToISO8601(Time::Now()).c_str(), "\" />\n"})));
931 
932   MockResultWatcher result_watcher(result_file, 10);
933   size_t checks = 0;
934   bool done = false;
935   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
936       .Times(10)
937       .WillRepeatedly(
938           DoAll(Invoke([&](TimeDelta timeout) {
939                   task_environment.AdvanceClock(timeout);
940                   // Append a result with "time" (duration) as 40.000s and
941                   // "timestamp" (test start) as `Now()` - 45s.
942                   AppendToFile(
943                       result_file,
944                       StrCat({"    <testcase name=\"B\" status=\"run\" "
945                               "time=\"40.000\" classname=\"A\" timestamp=\"",
946                               TimeToISO8601(Time::Now() - Seconds(45)).c_str(),
947                               "\">\n", "    </testcase>\n"}));
948                   checks++;
949                   if (checks == 10) {
950                     AppendToFile(result_file,
951                                  "  </testsuite>\n"
952                                  "</testsuites>\n");
953                     done = true;
954                   } else {
955                     // Append a preliminary result for the next test that
956                     // started when the last test completed (i.e., `Now()` - 45s
957                     // + 40s).
958                     AppendToFile(
959                         result_file,
960                         StrCat({"    <x-teststart name=\"B\" classname=\"A\" "
961                                 "timestamp=\"",
962                                 TimeToISO8601(Time::Now() - Seconds(5)).c_str(),
963                                 "\" />\n"}));
964                   }
965                 }),
966                 ReturnPointee(&done)));
967 
968   Time start = Time::Now();
969   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
970   // The first check occurs 45s after the batch starts, so the sequence of
971   // events looks like:
972   //   00:00 - Test 1 starts
973   //   00:40 - Test 1 completes, test 2 starts
974   //   00:45 - Check 1 occurs
975   //   01:20 - Test 2 completes, test 3 starts
976   //   01:25 - Check 2 occurs
977   //   02:00 - Test 3 completes, test 4 starts
978   //   02:05 - Check 3 occurs
979   //   ...
980   ASSERT_EQ(Time::Now() - start, Seconds(45 + 40 * 9));
981 }
982 
983 // Verify that the result watcher identifies when a test times out.
TEST_F(ResultWatcherTest,PollTimeout)984 TEST_F(ResultWatcherTest, PollTimeout) {
985   ASSERT_TRUE(dir.CreateUniqueTempDir());
986   FilePath result_file = CreateResultFile();
987   ASSERT_TRUE(AppendToFile(
988       result_file,
989       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
990               TimeToISO8601(Time::Now()).c_str(), "\" />\n"})));
991 
992   MockResultWatcher result_watcher(result_file, 10);
993   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
994       .Times(2)
995       .WillRepeatedly(
996           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
997                 Return(false)));
998 
999   Time start = Time::Now();
1000   ASSERT_FALSE(result_watcher.PollUntilDone(Seconds(45)));
1001   // Include a small grace period.
1002   ASSERT_EQ(Time::Now() - start, Seconds(45) + TestTimeouts::tiny_timeout());
1003 }
1004 
1005 // Verify that the result watcher retries incomplete reads.
TEST_F(ResultWatcherTest,RetryIncompleteResultRead)1006 TEST_F(ResultWatcherTest, RetryIncompleteResultRead) {
1007   ASSERT_TRUE(dir.CreateUniqueTempDir());
1008   FilePath result_file = CreateResultFile();
1009   // Opening "<summary>" tag is not closed.
1010   ASSERT_TRUE(AppendToFile(
1011       result_file,
1012       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1013               TimeToISO8601(Time::Now()).c_str(), "\" />\n",
1014               "    <testcase name=\"B\" status=\"run\" time=\"40.000\" "
1015               "classname=\"A\" timestamp=\"",
1016               TimeToISO8601(Time::Now()).c_str(), "\">\n",
1017               "      <summary>"})));
1018 
1019   MockResultWatcher result_watcher(result_file, 2);
1020   size_t attempts = 0;
1021   bool done = false;
1022   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1023       .Times(5)
1024       .WillRepeatedly(DoAll(Invoke([&](TimeDelta timeout) {
1025                               task_environment.AdvanceClock(timeout);
1026                               // Don't bother writing the rest of the file when
1027                               // this test completes.
1028                               done = ++attempts >= 5;
1029                             }),
1030                             ReturnPointee(&done)));
1031 
1032   Time start = Time::Now();
1033   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1034   ASSERT_EQ(Time::Now() - start,
1035             Seconds(45) + 4 * TestTimeouts::tiny_timeout());
1036 }
1037 
1038 // Verify that the result watcher continues polling with the base timeout when
1039 // the clock jumps backward.
TEST_F(ResultWatcherTest,PollWithClockJumpBackward)1040 TEST_F(ResultWatcherTest, PollWithClockJumpBackward) {
1041   ASSERT_TRUE(dir.CreateUniqueTempDir());
1042   FilePath result_file = CreateResultFile();
1043   // Cannot move the mock time source backward, so write future timestamps into
1044   // the result file instead.
1045   Time time_before_change = Time::Now() + Hours(1);
1046   ASSERT_TRUE(AppendToFile(
1047       result_file,
1048       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1049               TimeToISO8601(time_before_change).c_str(), "\" />\n",
1050               "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1051               "classname=\"A\" timestamp=\"",
1052               TimeToISO8601(time_before_change).c_str(), "\">\n",
1053               "    </testcase>\n",
1054               "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1055               TimeToISO8601(time_before_change + Milliseconds(500)).c_str(),
1056               "\" />\n"})));
1057 
1058   MockResultWatcher result_watcher(result_file, 2);
1059   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1060       .WillOnce(
1061           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1062                 Return(false)))
1063       .WillOnce(
1064           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1065                 Return(true)));
1066 
1067   Time start = Time::Now();
1068   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1069   ASSERT_EQ(Time::Now() - start, Seconds(90));
1070 }
1071 
1072 // Verify that the result watcher continues polling with the base timeout when
1073 // the clock jumps forward.
TEST_F(ResultWatcherTest,PollWithClockJumpForward)1074 TEST_F(ResultWatcherTest, PollWithClockJumpForward) {
1075   ASSERT_TRUE(dir.CreateUniqueTempDir());
1076   FilePath result_file = CreateResultFile();
1077   ASSERT_TRUE(AppendToFile(
1078       result_file,
1079       StrCat({"    <x-teststart name=\"B\" classname=\"A\" timestamp=\"",
1080               TimeToISO8601(Time::Now()).c_str(), "\" />\n",
1081               "    <testcase name=\"B\" status=\"run\" time=\"0.500\" "
1082               "classname=\"A\" timestamp=\"",
1083               TimeToISO8601(Time::Now()).c_str(), "\">\n", "    </testcase>\n",
1084               "    <x-teststart name=\"C\" classname=\"A\" timestamp=\"",
1085               TimeToISO8601(Time::Now() + Milliseconds(500)).c_str(),
1086               "\" />\n"})));
1087   task_environment.AdvanceClock(Hours(1));
1088 
1089   MockResultWatcher result_watcher(result_file, 2);
1090   EXPECT_CALL(result_watcher, WaitWithTimeout(_))
1091       .WillOnce(
1092           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1093                 Return(false)))
1094       .WillOnce(
1095           DoAll(Invoke(&task_environment, &test::TaskEnvironment::AdvanceClock),
1096                 Return(true)));
1097 
1098   Time start = Time::Now();
1099   ASSERT_TRUE(result_watcher.PollUntilDone(Seconds(45)));
1100   ASSERT_EQ(Time::Now() - start, Seconds(90));
1101 }
1102 
1103 // Validate delegate sets batch size correctly.
TEST_F(UnitTestLauncherDelegateTester,BatchSize)1104 TEST_F(UnitTestLauncherDelegateTester, BatchSize) {
1105   UnitTestLauncherDelegate launcher_delegate(&defaultPlatform, 15u, true,
1106                                              DoNothing());
1107   TestLauncherDelegate* delegate_ptr = &launcher_delegate;
1108   EXPECT_EQ(delegate_ptr->GetBatchSize(), 15u);
1109 }
1110 
1111 // The following 4 tests are disabled as they are meant to only run from
1112 // |RunMockTests| to validate tests launcher output for known results. The tests
1113 // are expected to run in order within a same batch.
1114 
1115 // Basic test to pass
TEST(MockUnitTests,DISABLED_PassTest)1116 TEST(MockUnitTests, DISABLED_PassTest) {
1117   ASSERT_TRUE(true);
1118 }
1119 // Basic test to fail
TEST(MockUnitTests,DISABLED_FailTest)1120 TEST(MockUnitTests, DISABLED_FailTest) {
1121   ASSERT_TRUE(false);
1122 }
1123 // Basic test to crash
TEST(MockUnitTests,DISABLED_CrashTest)1124 TEST(MockUnitTests, DISABLED_CrashTest) {
1125   ImmediateCrash();
1126 }
1127 // Basic test will not be reached, due to the preceding crash in the same batch.
TEST(MockUnitTests,DISABLED_NoRunTest)1128 TEST(MockUnitTests, DISABLED_NoRunTest) {
1129   ASSERT_TRUE(true);
1130 }
1131 
1132 // Using TestLauncher to launch 3 basic unitests
1133 // and validate the resulting json file.
TEST_F(UnitTestLauncherDelegateTester,RunMockTests)1134 TEST_F(UnitTestLauncherDelegateTester, RunMockTests) {
1135   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1136   command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1137 
1138   ASSERT_TRUE(dir.CreateUniqueTempDir());
1139   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1140   command_line.AppendSwitchPath("test-launcher-summary-output", path);
1141   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1142   command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1143 
1144   std::string output;
1145   GetAppOutputAndError(command_line, &output);
1146 
1147   // Validate the resulting JSON file is the expected output.
1148   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1149   ASSERT_TRUE(root);
1150 
1151   const Value::Dict* dict = root->FindDict("test_locations");
1152   ASSERT_TRUE(dict);
1153   EXPECT_EQ(4u, dict->size());
1154 
1155   EXPECT_TRUE(
1156       test_launcher_utils::ValidateTestLocations(*dict, "MockUnitTests"));
1157 
1158   const Value::List* list = root->FindList("per_iteration_data");
1159   ASSERT_TRUE(list);
1160   ASSERT_EQ(1u, list->size());
1161 
1162   const Value::Dict* iteration_dict = (*list)[0].GetIfDict();
1163   ASSERT_TRUE(iteration_dict);
1164   EXPECT_EQ(4u, iteration_dict->size());
1165   // We expect the result to be stripped of disabled prefix.
1166   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1167       *iteration_dict, "MockUnitTests.PassTest", "SUCCESS", 0u));
1168   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1169       *iteration_dict, "MockUnitTests.FailTest", "FAILURE", 1u));
1170   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1171       *iteration_dict, "MockUnitTests.CrashTest", "CRASH", 0u));
1172   EXPECT_TRUE(test_launcher_utils::ValidateTestResult(
1173       *iteration_dict, "MockUnitTests.NoRunTest", "NOTRUN", 0u,
1174       /*have_running_info=*/false));
1175 }
1176 
TEST(ProcessGTestOutputTest,RunMockTests)1177 TEST(ProcessGTestOutputTest, RunMockTests) {
1178   ScopedTempDir dir;
1179   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1180   command_line.AppendSwitchASCII("gtest_filter", "MockUnitTests.DISABLED_*");
1181 
1182   ASSERT_TRUE(dir.CreateUniqueTempDir());
1183   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.xml");
1184   command_line.AppendSwitchPath("test-launcher-output", path);
1185   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1186   command_line.AppendSwitch("single-process-tests");
1187 
1188   std::string output;
1189   GetAppOutputAndError(command_line, &output);
1190 
1191   std::vector<TestResult> test_results;
1192   bool crashed = false;
1193   bool have_test_results = ProcessGTestOutput(path, &test_results, &crashed);
1194 
1195   EXPECT_TRUE(have_test_results);
1196   EXPECT_TRUE(crashed);
1197   ASSERT_EQ(test_results.size(), 3u);
1198 
1199   EXPECT_EQ(test_results[0].full_name, "MockUnitTests.DISABLED_PassTest");
1200   EXPECT_EQ(test_results[0].status, TestResult::TEST_SUCCESS);
1201   EXPECT_EQ(test_results[0].test_result_parts.size(), 0u);
1202   ASSERT_TRUE(test_results[0].timestamp.has_value());
1203   EXPECT_GT(*test_results[0].timestamp, Time());
1204   EXPECT_FALSE(test_results[0].thread_id);
1205   EXPECT_FALSE(test_results[0].process_num);
1206 
1207   EXPECT_EQ(test_results[1].full_name, "MockUnitTests.DISABLED_FailTest");
1208   EXPECT_EQ(test_results[1].status, TestResult::TEST_FAILURE);
1209   EXPECT_EQ(test_results[1].test_result_parts.size(), 1u);
1210   ASSERT_TRUE(test_results[1].timestamp.has_value());
1211   EXPECT_GT(*test_results[1].timestamp, Time());
1212 
1213   EXPECT_EQ(test_results[2].full_name, "MockUnitTests.DISABLED_CrashTest");
1214   EXPECT_EQ(test_results[2].status, TestResult::TEST_CRASH);
1215   EXPECT_EQ(test_results[2].test_result_parts.size(), 0u);
1216   ASSERT_TRUE(test_results[2].timestamp.has_value());
1217   EXPECT_GT(*test_results[2].timestamp, Time());
1218 }
1219 
1220 // TODO(crbug.com/1094369): Enable leaked-child checks on other platforms.
1221 #if BUILDFLAG(IS_FUCHSIA)
1222 
1223 // Test that leaves a child process running. The test is DISABLED_, so it can
1224 // be launched explicitly by RunMockLeakProcessTest
1225 
MULTIPROCESS_TEST_MAIN(LeakChildProcess)1226 MULTIPROCESS_TEST_MAIN(LeakChildProcess) {
1227   while (true)
1228     PlatformThread::Sleep(base::Seconds(1));
1229 }
1230 
TEST(LeakedChildProcessTest,DISABLED_LeakChildProcess)1231 TEST(LeakedChildProcessTest, DISABLED_LeakChildProcess) {
1232   Process child_process = SpawnMultiProcessTestChild(
1233       "LeakChildProcess", GetMultiProcessTestChildBaseCommandLine(),
1234       LaunchOptions());
1235   ASSERT_TRUE(child_process.IsValid());
1236   // Don't wait for the child process to exit.
1237 }
1238 
1239 // Validate that a test that leaks a process causes the batch to have an
1240 // error exit_code.
TEST_F(UnitTestLauncherDelegateTester,LeakedChildProcess)1241 TEST_F(UnitTestLauncherDelegateTester, LeakedChildProcess) {
1242   CommandLine command_line(CommandLine::ForCurrentProcess()->GetProgram());
1243   command_line.AppendSwitchASCII(
1244       "gtest_filter", "LeakedChildProcessTest.DISABLED_LeakChildProcess");
1245 
1246   ASSERT_TRUE(dir.CreateUniqueTempDir());
1247   FilePath path = dir.GetPath().AppendASCII("SaveSummaryResult.json");
1248   command_line.AppendSwitchPath("test-launcher-summary-output", path);
1249   command_line.AppendSwitch("gtest_also_run_disabled_tests");
1250   command_line.AppendSwitchASCII("test-launcher-retry-limit", "0");
1251 
1252   std::string output;
1253   int exit_code = 0;
1254   GetAppOutputWithExitCode(command_line, &output, &exit_code);
1255 
1256   // Validate that we actually ran a test.
1257   absl::optional<Value::Dict> root = test_launcher_utils::ReadSummary(path);
1258   ASSERT_TRUE(root);
1259 
1260   Value::Dict* dict = root->FindDict("test_locations");
1261   ASSERT_TRUE(dict);
1262   EXPECT_EQ(1u, dict->size());
1263 
1264   EXPECT_TRUE(test_launcher_utils::ValidateTestLocations(
1265       *dict, "LeakedChildProcessTest"));
1266 
1267   // Validate that the leaked child caused the batch to error-out.
1268   EXPECT_EQ(exit_code, 1);
1269 }
1270 #endif  // BUILDFLAG(IS_FUCHSIA)
1271 
1272 // Validate GetTestOutputSnippetTest assigns correct output snippet.
TEST(TestLauncherTools,GetTestOutputSnippetTest)1273 TEST(TestLauncherTools, GetTestOutputSnippetTest) {
1274   const std::string output =
1275       "[ RUN      ] TestCase.FirstTest\n"
1276       "[       OK ] TestCase.FirstTest (0 ms)\n"
1277       "Post first test output\n"
1278       "[ RUN      ] TestCase.SecondTest\n"
1279       "[  FAILED  ] TestCase.SecondTest (0 ms)\n"
1280       "[ RUN      ] TestCase.ThirdTest\n"
1281       "[  SKIPPED ] TestCase.ThirdTest (0 ms)\n"
1282       "Post second test output";
1283   TestResult result;
1284 
1285   // test snippet of a successful test
1286   result.full_name = "TestCase.FirstTest";
1287   result.status = TestResult::TEST_SUCCESS;
1288   EXPECT_EQ(GetTestOutputSnippet(result, output),
1289             "[ RUN      ] TestCase.FirstTest\n"
1290             "[       OK ] TestCase.FirstTest (0 ms)\n");
1291 
1292   // test snippet of a failure on exit tests should include output
1293   // after test concluded, but not subsequent tests output.
1294   result.status = TestResult::TEST_FAILURE_ON_EXIT;
1295   EXPECT_EQ(GetTestOutputSnippet(result, output),
1296             "[ RUN      ] TestCase.FirstTest\n"
1297             "[       OK ] TestCase.FirstTest (0 ms)\n"
1298             "Post first test output\n");
1299 
1300   // test snippet of a failed test
1301   result.full_name = "TestCase.SecondTest";
1302   result.status = TestResult::TEST_FAILURE;
1303   EXPECT_EQ(GetTestOutputSnippet(result, output),
1304             "[ RUN      ] TestCase.SecondTest\n"
1305             "[  FAILED  ] TestCase.SecondTest (0 ms)\n");
1306 
1307   // test snippet of a skipped test. Note that the status is SUCCESS because
1308   // the gtest XML format doesn't make a difference between SUCCESS and SKIPPED
1309   result.full_name = "TestCase.ThirdTest";
1310   result.status = TestResult::TEST_SUCCESS;
1311   EXPECT_EQ(GetTestOutputSnippet(result, output),
1312             "[ RUN      ] TestCase.ThirdTest\n"
1313             "[  SKIPPED ] TestCase.ThirdTest (0 ms)\n");
1314 }
1315 
1316 MATCHER(CheckTruncationPreservesMessage, "") {
1317   // Ensure the inserted message matches the expected pattern.
1318   constexpr char kExpected[] = R"(FATAL.*message\n)";
1319   EXPECT_THAT(arg, ::testing::ContainsRegex(kExpected));
1320 
1321   const std::string snippet =
1322       base::StrCat({"[ RUN      ] SampleTestSuite.SampleTestName\n"
1323                     "Padding log message added for testing purposes\n"
1324                     "Padding log message added for testing purposes\n"
1325                     "Padding log message added for testing purposes\n"
1326                     "Padding log message added for testing purposes\n"
1327                     "Padding log message added for testing purposes\n"
1328                     "Padding log message added for testing purposes\n",
1329                     arg,
1330                     "Padding log message added for testing purposes\n"
1331                     "Padding log message added for testing purposes\n"
1332                     "Padding log message added for testing purposes\n"
1333                     "Padding log message added for testing purposes\n"
1334                     "Padding log message added for testing purposes\n"
1335                     "Padding log message added for testing purposes\n"});
1336 
1337   // Strip the stack trace off the end of message.
1338   size_t line_end_pos = arg.find("\n");
1339   std::string first_line = arg.substr(0, line_end_pos + 1);
1340 
1341   const std::string result = TruncateSnippetFocused(snippet, 300);
1342   EXPECT_TRUE(result.find(first_line) > 0);
1343   EXPECT_EQ(result.length(), 300UL);
1344   return true;
1345 }
1346 
MatchesFatalMessagesTest()1347 void MatchesFatalMessagesTest() {
1348   // Different Chrome test suites have different settings for their logs.
1349   // E.g. unit tests may not show the process ID (as they are single process),
1350   // whereas browser tests usually do (as they are multi-process). This
1351   // affects how log messages are formatted and hence how the log criticality
1352   // i.e. "FATAL", appears in the log message. We test the two extremes --
1353   // all process IDs, timestamps present, and all not present. We also test
1354   // the presence/absence of an extra logging prefix.
1355   {
1356     // Process ID, Thread ID, Timestamp and Tickcount.
1357     logging::SetLogItems(true, true, true, true);
1358     logging::SetLogPrefix(nullptr);
1359     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1360                               CheckTruncationPreservesMessage());
1361   }
1362   {
1363     logging::SetLogItems(false, false, false, false);
1364     logging::SetLogPrefix(nullptr);
1365     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1366                               CheckTruncationPreservesMessage());
1367   }
1368   {
1369     // Process ID, Thread ID, Timestamp and Tickcount.
1370     logging::SetLogItems(true, true, true, true);
1371     logging::SetLogPrefix("mylogprefix");
1372     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1373                               CheckTruncationPreservesMessage());
1374   }
1375   {
1376     logging::SetLogItems(false, false, false, false);
1377     logging::SetLogPrefix("mylogprefix");
1378     EXPECT_DEATH_IF_SUPPORTED(LOG(FATAL) << "message",
1379                               CheckTruncationPreservesMessage());
1380   }
1381 }
1382 
1383 // Validates TestSnippetFocused correctly identifies fatal messages to
1384 // retain during truncation.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesTest)1385 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesTest) {
1386   logging::ScopedLoggingSettings scoped_logging_settings;
1387 #if BUILDFLAG(IS_CHROMEOS_ASH)
1388   scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_SYSLOG);
1389 #endif
1390   MatchesFatalMessagesTest();
1391 }
1392 
1393 #if BUILDFLAG(IS_CHROMEOS_ASH)
1394 // Validates TestSnippetFocused correctly identifies fatal messages to
1395 // retain during truncation, for ChromeOS Ash.
TEST(TestLauncherTools,TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest)1396 TEST(TestLauncherTools, TruncateSnippetFocusedMatchesFatalMessagesCrosAshTest) {
1397   logging::ScopedLoggingSettings scoped_logging_settings;
1398   scoped_logging_settings.SetLogFormat(logging::LogFormat::LOG_FORMAT_CHROME);
1399   MatchesFatalMessagesTest();
1400 }
1401 #endif
1402 
1403 // Validate TestSnippetFocused truncates snippets correctly, regardless of
1404 // whether fatal messages appear at the start, middle or end of the snippet.
TEST(TestLauncherTools,TruncateSnippetFocusedTest)1405 TEST(TestLauncherTools, TruncateSnippetFocusedTest) {
1406   // Test where FATAL message appears in the start of the log.
1407   const std::string snippet =
1408       "[ RUN      ] "
1409       "EndToEndTests/"
1410       "EndToEndTest.WebTransportSessionUnidirectionalStreamSentEarly/"
1411       "draft29_QBIC\n"
1412       "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1413       "yet.\n"
1414       "#0 0x55619ad1fcdb in backtrace "
1415       "/b/s/w/ir/cache/builder/src/third_party/llvm/compiler-rt/lib/asan/../"
1416       "sanitizer_common/sanitizer_common_interceptors.inc:4205:13\n"
1417       "#1 0x5561a6bdf519 in base::debug::CollectStackTrace(void**, unsigned "
1418       "long) ./../../base/debug/stack_trace_posix.cc:845:39\n"
1419       "#2 0x5561a69a1293 in StackTrace "
1420       "./../../base/debug/stack_trace.cc:200:12\n"
1421       "...\n";
1422   const std::string result = TruncateSnippetFocused(snippet, 300);
1423   EXPECT_EQ(
1424       result,
1425       "[ RUN      ] EndToEndTests/EndToEndTest.WebTransportSessionUnidirection"
1426       "alStreamSentEarly/draft29_QBIC\n"
1427       "[26219:26368:FATAL:tls_handshaker.cc(293)] 1-RTT secret(s) not set "
1428       "yet.\n"
1429       "#0 0x55619ad1fcdb in backtrace /b/s/w/ir/cache/bui\n"
1430       "<truncated (358 bytes)>\n"
1431       "Trace ./../../base/debug/stack_trace.cc:200:12\n"
1432       "...\n");
1433   EXPECT_EQ(result.length(), 300UL);
1434 
1435   // Test where FATAL message appears in the middle of the log.
1436   const std::string snippet_two =
1437       "[ RUN      ] NetworkingPrivateApiTest.CreateSharedNetwork\n"
1438       "Padding log information added for testing purposes\n"
1439       "Padding log information added for testing purposes\n"
1440       "Padding log information added for testing purposes\n"
1441       "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1442       "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1443       "#0 0x562f31dba779 base::debug::CollectStackTrace()\n"
1444       "#1 0x562f31cdf2a3 base::debug::StackTrace::StackTrace()\n"
1445       "#2 0x562f31cf4380 logging::LogMessage::~LogMessage()\n"
1446       "#3 0x562f31cf4d3e logging::LogMessage::~LogMessage()\n";
1447   const std::string result_two = TruncateSnippetFocused(snippet_two, 300);
1448   EXPECT_EQ(
1449       result_two,
1450       "[ RUN      ] NetworkingPriv\n"
1451       "<truncated (210 bytes)>\n"
1452       " added for testing purposes\n"
1453       "FATAL extensions_unittests[12666:12666]: [managed_network_configuration"
1454       "_handler_impl.cc(525)] Check failed: !guid_str && !guid_str->empty().\n"
1455       "#0 0x562f31dba779 base::deb\n"
1456       "<truncated (213 bytes)>\n"
1457       ":LogMessage::~LogMessage()\n");
1458   EXPECT_EQ(result_two.length(), 300UL);
1459 
1460   // Test where FATAL message appears at end of the log.
1461   const std::string snippet_three =
1462       "[ RUN      ] All/PDFExtensionAccessibilityTreeDumpTest.Highlights/"
1463       "linux\n"
1464       "[6741:6741:0716/171816.818448:ERROR:power_monitor_device_source_stub.cc"
1465       "(11)] Not implemented reached in virtual bool base::PowerMonitorDevice"
1466       "Source::IsOnBatteryPower()\n"
1467       "[6741:6741:0716/171816.818912:INFO:content_main_runner_impl.cc(1082)]"
1468       " Chrome is running in full browser mode.\n"
1469       "libva error: va_getDriverName() failed with unknown libva error,driver"
1470       "_name=(null)\n"
1471       "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1472       "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1473       "(2147483647 vs. 2147483647)\n";
1474   const std::string result_three = TruncateSnippetFocused(snippet_three, 300);
1475   EXPECT_EQ(
1476       result_three,
1477       "[ RUN      ] All/PDFExtensionAccessibilityTreeDumpTest.Hi\n"
1478       "<truncated (432 bytes)>\n"
1479       "Name() failed with unknown libva error,driver_name=(null)\n"
1480       "[6741:6741:0716/171817.688633:FATAL:agent_scheduling_group_host.cc(290)"
1481       "] Check failed: message->routing_id() != MSG_ROUTING_CONTROL "
1482       "(2147483647 vs. 2147483647)\n");
1483   EXPECT_EQ(result_three.length(), 300UL);
1484 
1485   // Test where FATAL message does not appear.
1486   const std::string snippet_four =
1487       "[ RUN      ] All/PassingTest/linux\n"
1488       "Padding log line 1 added for testing purposes\n"
1489       "Padding log line 2 added for testing purposes\n"
1490       "Padding log line 3 added for testing purposes\n"
1491       "Padding log line 4 added for testing purposes\n"
1492       "Padding log line 5 added for testing purposes\n"
1493       "Padding log line 6 added for testing purposes\n";
1494   const std::string result_four = TruncateSnippetFocused(snippet_four, 300);
1495   EXPECT_EQ(result_four,
1496             "[ RUN      ] All/PassingTest/linux\n"
1497             "Padding log line 1 added for testing purposes\n"
1498             "Padding log line 2 added for testing purposes\n"
1499             "Padding lo\n<truncated (311 bytes)>\n"
1500             "Padding log line 4 added for testing purposes\n"
1501             "Padding log line 5 added for testing purposes\n"
1502             "Padding log line 6 added for testing purposes\n");
1503   EXPECT_EQ(result_four.length(), 300UL);
1504 }
1505 
1506 }  // namespace
1507 
1508 }  // namespace base
1509