• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 //   Basic implementation of a test harness in ANGLE.
8 
9 #include "TestSuite.h"
10 
11 #include "common/debug.h"
12 #include "common/platform.h"
13 #include "common/string_utils.h"
14 #include "common/system_utils.h"
15 #include "util/Timer.h"
16 
17 #include <stdlib.h>
18 #include <time.h>
19 
20 #include <fstream>
21 #include <unordered_map>
22 
23 #include <gtest/gtest.h>
24 #include <rapidjson/document.h>
25 #include <rapidjson/filewritestream.h>
26 #include <rapidjson/istreamwrapper.h>
27 #include <rapidjson/prettywriter.h>
28 
29 // We directly call into a function to register the parameterized tests. This saves spinning up
30 // a subprocess with a new gtest filter.
31 #include <gtest/../../src/gtest-internal-inl.h>
32 
33 namespace js = rapidjson;
34 
35 namespace angle
36 {
37 namespace
38 {
39 constexpr char kBatchId[]              = "--batch-id=";
40 constexpr char kFilterFileArg[]        = "--filter-file=";
41 constexpr char kFlakyRetries[]         = "--flaky-retries=";
42 constexpr char kGTestListTests[]       = "--gtest_list_tests";
43 constexpr char kHistogramJsonFileArg[] = "--histogram-json-file=";
44 constexpr char kListTests[]            = "--list-tests";
45 constexpr char kPrintTestStdout[]      = "--print-test-stdout";
46 constexpr char kResultFileArg[]        = "--results-file=";
47 constexpr char kTestTimeoutArg[]       = "--test-timeout=";
48 constexpr char kDisableCrashHandler[]  = "--disable-crash-handler";
49 constexpr char kIsolatedOutDir[]       = "--isolated-outdir=";
50 constexpr char kMaxFailures[]          = "--max-failures=";
51 constexpr char kRenderTestOutputDir[]  = "--render-test-output-dir=";
52 
53 constexpr char kStartedTestString[] = "[ RUN      ] ";
54 constexpr char kPassedTestString[]  = "[       OK ] ";
55 constexpr char kFailedTestString[]  = "[  FAILED  ] ";
56 constexpr char kSkippedTestString[] = "[  SKIPPED ] ";
57 
58 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
59 
60 constexpr char kTSanOptionsEnvVar[]  = "TSAN_OPTIONS";
61 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
62 
63 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
64 // Ideally we could use a separate timeout for the slow first test.
65 // Allow sanitized tests to run more slowly.
66 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
67 constexpr int kDefaultTestTimeout = 60;
68 #else
69 constexpr int kDefaultTestTimeout  = 120;
70 #endif
71 constexpr int kSlowTestTimeoutScale = 3;
72 #if defined(NDEBUG)
73 constexpr int kDefaultBatchTimeout = 300;
74 #else
75 constexpr int kDefaultBatchTimeout = 600;
76 #endif
77 constexpr int kDefaultBatchSize      = 256;
78 constexpr double kIdleMessageTimeout = 15.0;
79 constexpr int kDefaultMaxProcesses   = 16;
80 constexpr int kDefaultMaxFailures    = 100;
81 
ParseFlagValue(const char * flag,const char * argument)82 const char *ParseFlagValue(const char *flag, const char *argument)
83 {
84     if (strstr(argument, flag) == argument)
85     {
86         return argument + strlen(flag);
87     }
88 
89     return nullptr;
90 }
91 
ParseIntArg(const char * flag,const char * argument,int * valueOut)92 bool ParseIntArg(const char *flag, const char *argument, int *valueOut)
93 {
94     const char *value = ParseFlagValue(flag, argument);
95     if (!value)
96     {
97         return false;
98     }
99 
100     char *end            = nullptr;
101     const long longValue = strtol(value, &end, 10);
102 
103     if (*end != '\0')
104     {
105         printf("Error parsing integer flag value.\n");
106         exit(EXIT_FAILURE);
107     }
108 
109     if (longValue == LONG_MAX || longValue == LONG_MIN || static_cast<int>(longValue) != longValue)
110     {
111         printf("Overflow when parsing integer flag value.\n");
112         exit(EXIT_FAILURE);
113     }
114 
115     *valueOut = static_cast<int>(longValue);
116     return true;
117 }
118 
ParseIntArgNoDelete(const char * flag,const char * argument,int * valueOut)119 bool ParseIntArgNoDelete(const char *flag, const char *argument, int *valueOut)
120 {
121     ParseIntArg(flag, argument, valueOut);
122     return false;
123 }
124 
ParseFlag(const char * expected,const char * actual,bool * flagOut)125 bool ParseFlag(const char *expected, const char *actual, bool *flagOut)
126 {
127     if (strcmp(expected, actual) == 0)
128     {
129         *flagOut = true;
130         return true;
131     }
132     return false;
133 }
134 
ParseStringArg(const char * flag,const char * argument,std::string * valueOut)135 bool ParseStringArg(const char *flag, const char *argument, std::string *valueOut)
136 {
137     const char *value = ParseFlagValue(flag, argument);
138     if (!value)
139     {
140         return false;
141     }
142 
143     *valueOut = value;
144     return true;
145 }
146 
DeleteArg(int * argc,char ** argv,int argIndex)147 void DeleteArg(int *argc, char **argv, int argIndex)
148 {
149     // Shift the remainder of the argv list left by one.  Note that argv has (*argc + 1) elements,
150     // the last one always being NULL.  The following loop moves the trailing NULL element as well.
151     for (int index = argIndex; index < *argc; ++index)
152     {
153         argv[index] = argv[index + 1];
154     }
155     (*argc)--;
156 }
157 
AddArg(int * argc,char ** argv,const char * arg)158 void AddArg(int *argc, char **argv, const char *arg)
159 {
160     // This unsafe const_cast is necessary to work around gtest limitations.
161     argv[*argc]     = const_cast<char *>(arg);
162     argv[*argc + 1] = nullptr;
163     (*argc)++;
164 }
165 
ResultTypeToString(TestResultType type)166 const char *ResultTypeToString(TestResultType type)
167 {
168     switch (type)
169     {
170         case TestResultType::Crash:
171             return "CRASH";
172         case TestResultType::Fail:
173             return "FAIL";
174         case TestResultType::NoResult:
175             return "NOTRUN";
176         case TestResultType::Pass:
177             return "PASS";
178         case TestResultType::Skip:
179             return "SKIP";
180         case TestResultType::Timeout:
181             return "TIMEOUT";
182         case TestResultType::Unknown:
183         default:
184             return "UNKNOWN";
185     }
186 }
187 
GetResultTypeFromString(const std::string & str)188 TestResultType GetResultTypeFromString(const std::string &str)
189 {
190     if (str == "CRASH")
191         return TestResultType::Crash;
192     if (str == "FAIL")
193         return TestResultType::Fail;
194     if (str == "PASS")
195         return TestResultType::Pass;
196     if (str == "NOTRUN")
197         return TestResultType::NoResult;
198     if (str == "SKIP")
199         return TestResultType::Skip;
200     if (str == "TIMEOUT")
201         return TestResultType::Timeout;
202     return TestResultType::Unknown;
203 }
204 
IsFailedResult(TestResultType resultType)205 bool IsFailedResult(TestResultType resultType)
206 {
207     return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
208 }
209 
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)210 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
211 {
212     js::Value jsName;
213     jsName.SetString(ResultTypeToString(type), *allocator);
214     return jsName;
215 }
216 
WriteJsonFile(const std::string & outputFile,js::Document * doc)217 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
218 {
219     FILE *fp = fopen(outputFile.c_str(), "w");
220     if (!fp)
221     {
222         return false;
223     }
224 
225     constexpr size_t kBufferSize = 0xFFFF;
226     std::vector<char> writeBuffer(kBufferSize);
227     js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
228     js::PrettyWriter<js::FileWriteStream> writer(os);
229     if (!doc->Accept(writer))
230     {
231         fclose(fp);
232         return false;
233     }
234     fclose(fp);
235     return true;
236 }
237 
238 // Writes out a TestResults to the Chromium JSON Test Results format.
239 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile,const char * testSuiteName)240 void WriteResultsFile(bool interrupted,
241                       const TestResults &testResults,
242                       const std::string &outputFile,
243                       const char *testSuiteName)
244 {
245     time_t ltime;
246     time(&ltime);
247     struct tm *timeinfo = gmtime(&ltime);
248     ltime               = mktime(timeinfo);
249 
250     uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
251 
252     js::Document doc;
253     doc.SetObject();
254 
255     js::Document::AllocatorType &allocator = doc.GetAllocator();
256 
257     doc.AddMember("interrupted", interrupted, allocator);
258     doc.AddMember("path_delimiter", ".", allocator);
259     doc.AddMember("version", 3, allocator);
260     doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
261 
262     js::Value tests;
263     tests.SetObject();
264 
265     // If we have any test artifacts, make a fake test to house them.
266     if (!testResults.testArtifactPaths.empty())
267     {
268         js::Value artifactsTest;
269         artifactsTest.SetObject();
270 
271         artifactsTest.AddMember("actual", "PASS", allocator);
272         artifactsTest.AddMember("expected", "PASS", allocator);
273 
274         js::Value artifacts;
275         artifacts.SetObject();
276 
277         for (const std::string &testArtifactPath : testResults.testArtifactPaths)
278         {
279             std::vector<std::string> pieces =
280                 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
281                             SplitResult::SPLIT_WANT_NONEMPTY);
282             ASSERT(!pieces.empty());
283 
284             js::Value basename;
285             basename.SetString(pieces.back(), allocator);
286 
287             js::Value artifactPath;
288             artifactPath.SetString(testArtifactPath, allocator);
289 
290             js::Value artifactArray;
291             artifactArray.SetArray();
292             artifactArray.PushBack(artifactPath, allocator);
293 
294             artifacts.AddMember(basename, artifactArray, allocator);
295         }
296 
297         artifactsTest.AddMember("artifacts", artifacts, allocator);
298 
299         js::Value fakeTestName;
300         fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
301         tests.AddMember(fakeTestName, artifactsTest, allocator);
302     }
303 
304     std::map<TestResultType, uint32_t> counts;
305 
306     for (const auto &resultIter : testResults.results)
307     {
308         const TestIdentifier &id = resultIter.first;
309         const TestResult &result = resultIter.second;
310 
311         js::Value jsResult;
312         jsResult.SetObject();
313 
314         counts[result.type]++;
315 
316         std::string actualResult;
317         for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
318         {
319             actualResult += "FAIL ";
320         }
321 
322         actualResult += ResultTypeToString(result.type);
323 
324         std::string expectedResult = "PASS";
325         if (result.type == TestResultType::Skip)
326         {
327             expectedResult = "SKIP";
328         }
329 
330         // Handle flaky passing tests.
331         if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
332         {
333             expectedResult = "FAIL PASS";
334             jsResult.AddMember("is_flaky", true, allocator);
335         }
336 
337         jsResult.AddMember("actual", actualResult, allocator);
338         jsResult.AddMember("expected", expectedResult, allocator);
339 
340         if (IsFailedResult(result.type))
341         {
342             jsResult.AddMember("is_unexpected", true, allocator);
343         }
344 
345         js::Value times;
346         times.SetArray();
347         times.PushBack(result.elapsedTimeSeconds, allocator);
348 
349         jsResult.AddMember("times", times, allocator);
350 
351         char testName[500];
352         id.sprintfName(testName);
353         js::Value jsName;
354         jsName.SetString(testName, allocator);
355 
356         tests.AddMember(jsName, jsResult, allocator);
357     }
358 
359     js::Value numFailuresByType;
360     numFailuresByType.SetObject();
361 
362     for (const auto &countIter : counts)
363     {
364         TestResultType type = countIter.first;
365         uint32_t count      = countIter.second;
366 
367         js::Value jsCount(count);
368         numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
369     }
370 
371     doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
372 
373     doc.AddMember("tests", tests, allocator);
374 
375     printf("Writing test results to %s\n", outputFile.c_str());
376 
377     if (!WriteJsonFile(outputFile, &doc))
378     {
379         printf("Error writing test results file.\n");
380     }
381 }
382 
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile,const char * testSuiteName)383 void WriteHistogramJson(const HistogramWriter &histogramWriter,
384                         const std::string &outputFile,
385                         const char *testSuiteName)
386 {
387     js::Document doc;
388     doc.SetArray();
389 
390     histogramWriter.getAsJSON(&doc);
391 
392     printf("Writing histogram json to %s\n", outputFile.c_str());
393 
394     if (!WriteJsonFile(outputFile, &doc))
395     {
396         printf("Error writing histogram json file.\n");
397     }
398 }
399 
WriteOutputFiles(bool interrupted,const TestResults & testResults,const std::string & resultsFile,const HistogramWriter & histogramWriter,const std::string & histogramJsonOutputFile,const char * testSuiteName)400 void WriteOutputFiles(bool interrupted,
401                       const TestResults &testResults,
402                       const std::string &resultsFile,
403                       const HistogramWriter &histogramWriter,
404                       const std::string &histogramJsonOutputFile,
405                       const char *testSuiteName)
406 {
407     if (!resultsFile.empty())
408     {
409         WriteResultsFile(interrupted, testResults, resultsFile, testSuiteName);
410     }
411 
412     if (!histogramJsonOutputFile.empty())
413     {
414         WriteHistogramJson(histogramWriter, histogramJsonOutputFile, testSuiteName);
415     }
416 }
417 
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)418 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
419 {
420     TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
421 
422     // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
423     if (resultIn.Skipped())
424     {
425         resultOut.type = TestResultType::Skip;
426     }
427     else if (resultIn.Failed())
428     {
429         resultOut.type = TestResultType::Fail;
430     }
431     else
432     {
433         resultOut.type = TestResultType::Pass;
434     }
435 
436     resultOut.elapsedTimeSeconds = resultsOut->currentTestTimer.getElapsedWallClockTime();
437 }
438 
GetTestIdentifier(const testing::TestInfo & testInfo)439 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
440 {
441     return {testInfo.test_suite_name(), testInfo.name()};
442 }
443 
444 class TestEventListener : public testing::EmptyTestEventListener
445 {
446   public:
447     // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(const std::string & resultsFile,const std::string & histogramJsonFile,const char * testSuiteName,TestResults * testResults,HistogramWriter * histogramWriter)448     TestEventListener(const std::string &resultsFile,
449                       const std::string &histogramJsonFile,
450                       const char *testSuiteName,
451                       TestResults *testResults,
452                       HistogramWriter *histogramWriter)
453         : mResultsFile(resultsFile),
454           mHistogramJsonFile(histogramJsonFile),
455           mTestSuiteName(testSuiteName),
456           mTestResults(testResults),
457           mHistogramWriter(histogramWriter)
458     {}
459 
OnTestStart(const testing::TestInfo & testInfo)460     void OnTestStart(const testing::TestInfo &testInfo) override
461     {
462         std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
463         mTestResults->currentTest = GetTestIdentifier(testInfo);
464         mTestResults->currentTestTimer.start();
465     }
466 
OnTestEnd(const testing::TestInfo & testInfo)467     void OnTestEnd(const testing::TestInfo &testInfo) override
468     {
469         std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
470         mTestResults->currentTestTimer.stop();
471         const testing::TestResult &resultIn = *testInfo.result();
472         UpdateCurrentTestResult(resultIn, mTestResults);
473         mTestResults->currentTest = TestIdentifier();
474     }
475 
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)476     void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
477     {
478         std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
479         mTestResults->allDone = true;
480         WriteOutputFiles(false, *mTestResults, mResultsFile, *mHistogramWriter, mHistogramJsonFile,
481                          mTestSuiteName);
482     }
483 
484   private:
485     std::string mResultsFile;
486     std::string mHistogramJsonFile;
487     const char *mTestSuiteName;
488     TestResults *mTestResults;
489     HistogramWriter *mHistogramWriter;
490 };
491 
IsTestDisabled(const testing::TestInfo & testInfo)492 bool IsTestDisabled(const testing::TestInfo &testInfo)
493 {
494     return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
495 }
496 
497 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
498 
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)499 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
500                                         TestIdentifierFilter filter,
501                                         bool alsoRunDisabledTests)
502 {
503     std::vector<TestIdentifier> tests;
504 
505     const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
506     for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
507     {
508         const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
509         for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
510         {
511             const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
512             TestIdentifier id                 = GetTestIdentifier(testInfo);
513             if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
514             {
515                 tests.emplace_back(id);
516 
517                 if (fileLinesOut)
518                 {
519                     (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
520                 }
521             }
522         }
523     }
524 
525     return tests;
526 }
527 
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)528 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
529                                              bool alsoRunDisabledTests)
530 {
531     TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
532         return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
533     };
534 
535     return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
536 }
537 
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)538 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
539                                           int shardIndex,
540                                           int shardCount,
541                                           std::map<TestIdentifier, FileLine> *fileLinesOut,
542                                           bool alsoRunDisabledTests)
543 {
544     std::vector<TestIdentifier> shardTests;
545 
546     for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
547          testIndex += shardCount)
548     {
549         shardTests.emplace_back(allTests[testIndex]);
550     }
551 
552     return shardTests;
553 }
554 
GetTestFilter(const std::vector<TestIdentifier> & tests)555 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
556 {
557     std::stringstream filterStream;
558 
559     filterStream << "--gtest_filter=";
560 
561     for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
562     {
563         if (testIndex != 0)
564         {
565             filterStream << ":";
566         }
567 
568         filterStream << tests[testIndex];
569     }
570 
571     return filterStream.str();
572 }
573 
ParseTestSuiteName(const char * executable)574 std::string ParseTestSuiteName(const char *executable)
575 {
576     const char *baseNameStart = strrchr(executable, GetPathSeparator());
577     if (!baseNameStart)
578     {
579         baseNameStart = executable;
580     }
581     else
582     {
583         baseNameStart++;
584     }
585 
586     const char *suffix = GetExecutableExtension();
587     size_t suffixLen   = strlen(suffix);
588     if (suffixLen == 0)
589     {
590         return baseNameStart;
591     }
592 
593     if (!EndsWith(baseNameStart, suffix))
594     {
595         return baseNameStart;
596     }
597 
598     return std::string(baseNameStart, baseNameStart + strlen(baseNameStart) - suffixLen);
599 }
600 
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)601 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
602                               std::vector<std::string> *testArtifactPathsOut)
603 {
604     if (!obj.HasMember("artifacts"))
605     {
606         printf("No artifacts member.\n");
607         return false;
608     }
609 
610     const js::Value &jsArtifacts = obj["artifacts"];
611     if (!jsArtifacts.IsObject())
612     {
613         printf("Artifacts are not an object.\n");
614         return false;
615     }
616 
617     const js::Value::ConstObject &artifacts = jsArtifacts.GetObject();
618     for (const auto &artifactMember : artifacts)
619     {
620         const js::Value &artifact = artifactMember.value;
621         if (!artifact.IsArray())
622         {
623             printf("Artifact is not an array of strings of size 1.\n");
624             return false;
625         }
626 
627         const js::Value::ConstArray &artifactArray = artifact.GetArray();
628         if (artifactArray.Size() != 1)
629         {
630             printf("Artifact is not an array of strings of size 1.\n");
631             return false;
632         }
633 
634         const js::Value &artifactName = artifactArray[0];
635         if (!artifactName.IsString())
636         {
637             printf("Artifact is not an array of strings of size 1.\n");
638             return false;
639         }
640 
641         testArtifactPathsOut->push_back(artifactName.GetString());
642     }
643 
644     return true;
645 }
646 
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)647 bool GetSingleTestResultFromJSON(const js::Value &name,
648                                  const js::Value::ConstObject &obj,
649                                  TestResults *resultsOut)
650 {
651 
652     TestIdentifier id;
653     if (!TestIdentifier::ParseFromString(name.GetString(), &id))
654     {
655         printf("Could not parse test identifier.\n");
656         return false;
657     }
658 
659     if (!obj.HasMember("expected") || !obj.HasMember("actual"))
660     {
661         printf("No expected or actual member.\n");
662         return false;
663     }
664 
665     const js::Value &expected = obj["expected"];
666     const js::Value &actual   = obj["actual"];
667 
668     if (!expected.IsString() || !actual.IsString())
669     {
670         printf("Expected or actual member is not a string.\n");
671         return false;
672     }
673 
674     const std::string actualStr = actual.GetString();
675 
676     TestResultType resultType = TestResultType::Unknown;
677     int flakyFailures         = 0;
678     if (actualStr.find(' '))
679     {
680         std::istringstream strstr(actualStr);
681         std::string token;
682         while (std::getline(strstr, token, ' '))
683         {
684             resultType = GetResultTypeFromString(token);
685             if (resultType == TestResultType::Unknown)
686             {
687                 printf("Failed to parse result type.\n");
688                 return false;
689             }
690             if (IsFailedResult(resultType))
691             {
692                 flakyFailures++;
693             }
694         }
695     }
696     else
697     {
698         resultType = GetResultTypeFromString(actualStr);
699         if (resultType == TestResultType::Unknown)
700         {
701             printf("Failed to parse result type.\n");
702             return false;
703         }
704     }
705 
706     double elapsedTimeSeconds = 0.0;
707     if (obj.HasMember("times"))
708     {
709         const js::Value &times = obj["times"];
710         if (!times.IsArray())
711         {
712             return false;
713         }
714 
715         const js::Value::ConstArray &timesArray = times.GetArray();
716         if (timesArray.Size() != 1 || !timesArray[0].IsDouble())
717         {
718             return false;
719         }
720 
721         elapsedTimeSeconds = timesArray[0].GetDouble();
722     }
723 
724     TestResult &result        = resultsOut->results[id];
725     result.elapsedTimeSeconds = elapsedTimeSeconds;
726     result.type               = resultType;
727     result.flakyFailures      = flakyFailures;
728     return true;
729 }
730 
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)731 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
732 {
733     if (!document.HasMember("tests") || !document["tests"].IsObject())
734     {
735         printf("JSON document has no tests member.\n");
736         return false;
737     }
738 
739     const js::Value::ConstObject &tests = document["tests"].GetObject();
740     for (const auto &testMember : tests)
741     {
742         // Get test identifier.
743         const js::Value &name = testMember.name;
744         if (!name.IsString())
745         {
746             printf("Name is not a string.\n");
747             return false;
748         }
749 
750         // Get test result.
751         const js::Value &value = testMember.value;
752         if (!value.IsObject())
753         {
754             printf("Test result is not an object.\n");
755             return false;
756         }
757 
758         const js::Value::ConstObject &obj = value.GetObject();
759 
760         if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
761         {
762             if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
763             {
764                 return false;
765             }
766         }
767         else
768         {
769             if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
770             {
771                 return false;
772             }
773         }
774     }
775 
776     return true;
777 }
778 
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)779 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
780 {
781     for (auto &resultsIter : input->results)
782     {
783         const TestIdentifier &id = resultsIter.first;
784         TestResult &inputResult  = resultsIter.second;
785         TestResult &outputResult = output->results[id];
786 
787         if (inputResult.type != TestResultType::NoResult)
788         {
789             if (outputResult.type != TestResultType::NoResult)
790             {
791                 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
792                        id.testName.c_str());
793                 return false;
794             }
795 
796             // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
797             // attempt the test again.
798             uint32_t runCount = outputResult.flakyFailures + 1;
799             if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
800             {
801                 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
802                        id.testName.c_str());
803                 inputResult.type = TestResultType::NoResult;
804                 outputResult.flakyFailures++;
805             }
806             else
807             {
808                 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
809                 outputResult.type               = inputResult.type;
810             }
811         }
812     }
813 
814     output->testArtifactPaths.insert(output->testArtifactPaths.end(),
815                                      input->testArtifactPaths.begin(),
816                                      input->testArtifactPaths.end());
817 
818     return true;
819 }
820 
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)821 void PrintTestOutputSnippet(const TestIdentifier &id,
822                             const TestResult &result,
823                             const std::string &fullOutput)
824 {
825     std::stringstream nameStream;
826     nameStream << id;
827     std::string fullName = nameStream.str();
828 
829     size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
830     if (runPos == std::string::npos)
831     {
832         printf("Cannot locate test output snippet.\n");
833         return;
834     }
835 
836     size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
837     // Only clip the snippet to the "OK" message if the test really
838     // succeeded. It still might have e.g. crashed after printing it.
839     if (endPos == std::string::npos && result.type == TestResultType::Pass)
840     {
841         endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
842     }
843     if (endPos != std::string::npos)
844     {
845         size_t newline_pos = fullOutput.find("\n", endPos);
846         if (newline_pos != std::string::npos)
847             endPos = newline_pos + 1;
848     }
849 
850     std::cout << "\n";
851     if (endPos != std::string::npos)
852     {
853         std::cout << fullOutput.substr(runPos, endPos - runPos);
854     }
855     else
856     {
857         std::cout << fullOutput.substr(runPos);
858     }
859 }
860 
GetConfigNameFromTestIdentifier(const TestIdentifier & id)861 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
862 {
863     size_t slashPos = id.testName.find('/');
864     if (slashPos == std::string::npos)
865     {
866         return "default";
867     }
868 
869     size_t doubleUnderscorePos = id.testName.find("__");
870     if (doubleUnderscorePos == std::string::npos)
871     {
872         std::string configName = id.testName.substr(slashPos + 1);
873 
874         if (!BeginsWith(configName, "ES"))
875         {
876             return "default";
877         }
878 
879         return configName;
880     }
881     else
882     {
883         return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
884     }
885 }
886 
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)887 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
888 {
889     // First sort tests by configuration.
890     angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
891     for (const TestIdentifier &id : tests)
892     {
893         std::string config = GetConfigNameFromTestIdentifier(id);
894         testsSortedByConfig[config].push_back(id);
895     }
896 
897     // Then group into batches by 'batchSize'.
898     TestQueue testQueue;
899     for (const auto &configAndIds : testsSortedByConfig)
900     {
901         const std::vector<TestIdentifier> &configTests = configAndIds.second;
902 
903         // Count the number of batches needed for this config.
904         int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
905 
906         // Create batches with striping to split up slow tests.
907         for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
908         {
909             std::vector<TestIdentifier> batchTests;
910             for (size_t testIndex = batchIndex; testIndex < configTests.size();
911                  testIndex += batchesForConfig)
912             {
913                 batchTests.push_back(configTests[testIndex]);
914             }
915             testQueue.emplace(std::move(batchTests));
916             ASSERT(batchTests.empty());
917         }
918     }
919 
920     return testQueue;
921 }
922 
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)923 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
924 {
925     std::cout << "Tests list:\n";
926 
927     for (const auto &resultIt : resultsMap)
928     {
929         const TestIdentifier &id = resultIt.first;
930         std::cout << id << "\n";
931     }
932 
933     std::cout << "End tests list.\n";
934 }
935 
936 // Prints the names of the tests matching the user-specified filter flag.
937 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
938 // See http://anglebug.com/5164
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)939 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
940 {
941     std::map<std::string, std::vector<std::string>> suites;
942 
943     for (const auto &resultIt : resultsMap)
944     {
945         const TestIdentifier &id = resultIt.first;
946         suites[id.testSuiteName].push_back(id.testName);
947     }
948 
949     for (const auto &testSuiteIt : suites)
950     {
951         bool printedTestSuiteName = false;
952 
953         const std::string &suiteName              = testSuiteIt.first;
954         const std::vector<std::string> &testNames = testSuiteIt.second;
955 
956         for (const std::string &testName : testNames)
957         {
958             if (!printedTestSuiteName)
959             {
960                 printedTestSuiteName = true;
961                 printf("%s.\n", suiteName.c_str());
962             }
963             printf("  %s\n", testName.c_str());
964         }
965     }
966 }
967 }  // namespace
968 
969 // static
970 TestSuite *TestSuite::mInstance = nullptr;
971 
972 TestIdentifier::TestIdentifier() = default;
973 
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)974 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
975     : testSuiteName(suiteNameIn), testName(nameIn)
976 {}
977 
978 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
979 
980 TestIdentifier::~TestIdentifier() = default;
981 
982 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
983 
sprintfName(char * outBuffer) const984 void TestIdentifier::sprintfName(char *outBuffer) const
985 {
986     sprintf(outBuffer, "%s.%s", testSuiteName.c_str(), testName.c_str());
987 }
988 
989 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)990 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
991 {
992     size_t separator = str.find(".");
993     if (separator == std::string::npos)
994     {
995         return false;
996     }
997 
998     idOut->testSuiteName = str.substr(0, separator);
999     idOut->testName      = str.substr(separator + 1, str.length() - separator - 1);
1000     return true;
1001 }
1002 
1003 TestResults::TestResults() = default;
1004 
1005 TestResults::~TestResults() = default;
1006 
1007 ProcessInfo::ProcessInfo() = default;
1008 
operator =(ProcessInfo && rhs)1009 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
1010 {
1011     process         = std::move(rhs.process);
1012     testsInBatch    = std::move(rhs.testsInBatch);
1013     resultsFileName = std::move(rhs.resultsFileName);
1014     filterFileName  = std::move(rhs.filterFileName);
1015     commandLine     = std::move(rhs.commandLine);
1016     filterString    = std::move(rhs.filterString);
1017     return *this;
1018 }
1019 
1020 ProcessInfo::~ProcessInfo() = default;
1021 
ProcessInfo(ProcessInfo && other)1022 ProcessInfo::ProcessInfo(ProcessInfo &&other)
1023 {
1024     *this = std::move(other);
1025 }
1026 
TestSuite(int * argc,char ** argv)1027 TestSuite::TestSuite(int *argc, char **argv)
1028     : mShardCount(-1),
1029       mShardIndex(-1),
1030       mBotMode(false),
1031       mDebugTestGroups(false),
1032       mGTestListTests(false),
1033       mListTests(false),
1034       mPrintTestStdout(false),
1035       mDisableCrashHandler(false),
1036       mBatchSize(kDefaultBatchSize),
1037       mCurrentResultCount(0),
1038       mTotalResultCount(0),
1039       mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
1040       mTestTimeout(kDefaultTestTimeout),
1041       mBatchTimeout(kDefaultBatchTimeout),
1042       mBatchId(-1),
1043       mFlakyRetries(0),
1044       mMaxFailures(kDefaultMaxFailures),
1045       mFailureCount(0),
1046       mModifiedPreferredDevice(false)
1047 {
1048     ASSERT(mInstance == nullptr);
1049     mInstance = this;
1050 
1051     Optional<int> filterArgIndex;
1052     bool alsoRunDisabledTests = false;
1053 
1054 #if defined(ANGLE_PLATFORM_MACOS)
1055     // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
1056     // file access.
1057     angle::InitMetalFileAPIHooking(*argc, argv);
1058 #endif
1059 
1060 #if defined(ANGLE_PLATFORM_WINDOWS)
1061     testing::GTEST_FLAG(catch_exceptions) = false;
1062 #endif
1063 
1064     if (*argc <= 0)
1065     {
1066         printf("Missing test arguments.\n");
1067         exit(EXIT_FAILURE);
1068     }
1069 
1070     mTestExecutableName = argv[0];
1071     mTestSuiteName      = ParseTestSuiteName(mTestExecutableName.c_str());
1072 
1073     for (int argIndex = 1; argIndex < *argc;)
1074     {
1075         if (parseSingleArg(argv[argIndex]))
1076         {
1077             DeleteArg(argc, argv, argIndex);
1078             continue;
1079         }
1080 
1081         if (ParseFlagValue("--gtest_filter=", argv[argIndex]))
1082         {
1083             filterArgIndex = argIndex;
1084         }
1085         else
1086         {
1087             // Don't include disabled tests in test lists unless the user asks for them.
1088             if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1089             {
1090                 alsoRunDisabledTests = true;
1091             }
1092 
1093             mChildProcessArgs.push_back(argv[argIndex]);
1094         }
1095         ++argIndex;
1096     }
1097 
1098     mTestResults.currentTestTimeout = mTestTimeout;
1099 
1100 #if defined(ANGLE_PLATFORM_ANDROID)
1101     // Workaround for the Android test runner requiring a GTest test list.
1102     if (mListTests && filterArgIndex.valid())
1103     {
1104         DeleteArg(argc, argv, filterArgIndex.value());
1105     }
1106 #endif  // defined(ANGLE_PLATFORM_ANDROID)
1107 
1108     if (!mDisableCrashHandler)
1109     {
1110         // Note that the crash callback must be owned and not use global constructors.
1111         mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1112         InitCrashHandler(&mCrashCallback);
1113     }
1114 
1115     std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1116     if (!envShardIndex.empty())
1117     {
1118         angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1119         if (mShardIndex == -1)
1120         {
1121             std::stringstream shardIndexStream(envShardIndex);
1122             shardIndexStream >> mShardIndex;
1123         }
1124     }
1125 
1126     std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1127     if (!envTotalShards.empty())
1128     {
1129         angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1130         if (mShardCount == -1)
1131         {
1132             std::stringstream shardCountStream(envTotalShards);
1133             shardCountStream >> mShardCount;
1134         }
1135     }
1136 
1137     // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1138     // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1139     // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1140     // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1141     const GPUTestConfig testConfig      = GPUTestConfig();
1142     const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1143     if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1144     {
1145         mModifiedPreferredDevice                        = true;
1146         const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1147         if (conditions[GPUTestConfig::kConditionAMD])
1148         {
1149             SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1150         }
1151         else if (conditions[GPUTestConfig::kConditionNVIDIA])
1152         {
1153             SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1154         }
1155         else if (conditions[GPUTestConfig::kConditionIntel])
1156         {
1157             SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1158         }
1159         else if (conditions[GPUTestConfig::kConditionApple])
1160         {
1161             SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1162         }
1163     }
1164 
1165     // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1166     if (IsTSan())
1167     {
1168         std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1169         tsanOptions += " halt_on_error=1";
1170         SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1171     }
1172 
1173     if (IsUBSan())
1174     {
1175         std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1176         ubsanOptions += " halt_on_error=1";
1177         SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1178     }
1179 
1180     if ((mShardIndex == -1) != (mShardCount == -1))
1181     {
1182         printf("Shard index and shard count must be specified together.\n");
1183         exit(EXIT_FAILURE);
1184     }
1185 
1186     if (!mFilterFile.empty())
1187     {
1188         if (filterArgIndex.valid())
1189         {
1190             printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1191             exit(EXIT_FAILURE);
1192         }
1193 
1194         uint32_t fileSize = 0;
1195         if (!GetFileSize(mFilterFile.c_str(), &fileSize))
1196         {
1197             printf("Error getting filter file size: %s\n", mFilterFile.c_str());
1198             exit(EXIT_FAILURE);
1199         }
1200 
1201         std::vector<char> fileContents(fileSize + 1, 0);
1202         if (!ReadEntireFileToString(mFilterFile.c_str(), fileContents.data(), fileSize))
1203         {
1204             printf("Error loading filter file: %s\n", mFilterFile.c_str());
1205             exit(EXIT_FAILURE);
1206         }
1207         mFilterString.assign(fileContents.data());
1208 
1209         if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1210         {
1211             printf("Filter file must start with \"--gtest_filter=\".\n");
1212             exit(EXIT_FAILURE);
1213         }
1214 
1215         // Note that we only add a filter string if we previously deleted a shader filter file
1216         // argument. So we will have space for the new filter string in argv.
1217         AddArg(argc, argv, mFilterString.c_str());
1218     }
1219 
1220     // Call into gtest internals to force parameterized test name registration.
1221     testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1222     impl->RegisterParameterizedTests();
1223 
1224     // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1225     testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1226 
1227     std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1228 
1229     if (mShardCount == 0)
1230     {
1231         printf("Shard count must be > 0.\n");
1232         exit(EXIT_FAILURE);
1233     }
1234     else if (mShardCount > 0)
1235     {
1236         if (mShardIndex >= mShardCount)
1237         {
1238             printf("Shard index must be less than shard count.\n");
1239             exit(EXIT_FAILURE);
1240         }
1241 
1242         // If there's only one shard, we can use the testSet as defined above.
1243         if (mShardCount > 1)
1244         {
1245             testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1246                                     alsoRunDisabledTests);
1247 
1248             if (!mBotMode)
1249             {
1250                 mFilterString = GetTestFilter(testSet);
1251 
1252                 if (filterArgIndex.valid())
1253                 {
1254                     argv[filterArgIndex.value()] = const_cast<char *>(mFilterString.c_str());
1255                 }
1256                 else
1257                 {
1258                     // Note that we only add a filter string if we previously deleted a shard
1259                     // index/count argument. So we will have space for the new filter string in
1260                     // argv.
1261                     AddArg(argc, argv, mFilterString.c_str());
1262                 }
1263 
1264                 // Force-re-initialize GoogleTest flags to load the shard filter.
1265                 testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1266             }
1267         }
1268     }
1269 
1270     {
1271         std::stringstream fakeTestName;
1272         fakeTestName << kArtifactsFakeTestName;
1273         if (mShardIndex != -1)
1274         {
1275             fakeTestName << "-Shard" << std::setfill('0') << std::setw(2) << mShardIndex;
1276         }
1277         mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1278     }
1279 
1280     if (mBotMode)
1281     {
1282         // Split up test batches.
1283         mTestQueue = BatchTests(testSet, mBatchSize);
1284 
1285         if (mDebugTestGroups)
1286         {
1287             std::cout << "Test Groups:\n";
1288 
1289             while (!mTestQueue.empty())
1290             {
1291                 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1292                 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1293                           << static_cast<int>(tests.size()) << ")\n";
1294                 mTestQueue.pop();
1295             }
1296 
1297             exit(EXIT_SUCCESS);
1298         }
1299     }
1300 
1301     testing::InitGoogleTest(argc, argv);
1302 
1303     mTotalResultCount = testSet.size();
1304 
1305     if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1306     {
1307         // Create a default output file in bot mode.
1308         mResultsFile = "output.json";
1309     }
1310 
1311     if (!mResultsDirectory.empty())
1312     {
1313         std::stringstream resultFileName;
1314         resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1315         mResultsFile = resultFileName.str();
1316     }
1317 
1318     if (!mBotMode)
1319     {
1320         testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1321         listeners.Append(new TestEventListener(mResultsFile, mHistogramJsonFile,
1322                                                mTestSuiteName.c_str(), &mTestResults,
1323                                                &mHistogramWriter));
1324 
1325         for (const TestIdentifier &id : testSet)
1326         {
1327             mTestResults.results[id].type = TestResultType::NoResult;
1328         }
1329     }
1330 }
1331 
~TestSuite()1332 TestSuite::~TestSuite()
1333 {
1334     const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1335     if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1336     {
1337         angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1338     }
1339 
1340     if (mWatchdogThread.joinable())
1341     {
1342         mWatchdogThread.detach();
1343     }
1344     TerminateCrashHandler();
1345 }
1346 
parseSingleArg(const char * argument)1347 bool TestSuite::parseSingleArg(const char *argument)
1348 {
1349     // Note: Flags should be documented in README.md.
1350     return (ParseIntArg("--shard-count=", argument, &mShardCount) ||
1351             ParseIntArg("--shard-index=", argument, &mShardIndex) ||
1352             ParseIntArg("--batch-size=", argument, &mBatchSize) ||
1353             ParseIntArg("--max-processes=", argument, &mMaxProcesses) ||
1354             ParseIntArg(kTestTimeoutArg, argument, &mTestTimeout) ||
1355             ParseIntArg("--batch-timeout=", argument, &mBatchTimeout) ||
1356             ParseIntArg(kFlakyRetries, argument, &mFlakyRetries) ||
1357             ParseIntArg(kMaxFailures, argument, &mMaxFailures) ||
1358             // Other test functions consume the batch ID, so keep it in the list.
1359             ParseIntArgNoDelete(kBatchId, argument, &mBatchId) ||
1360             ParseStringArg("--results-directory=", argument, &mResultsDirectory) ||
1361             ParseStringArg(kResultFileArg, argument, &mResultsFile) ||
1362             ParseStringArg("--isolated-script-test-output=", argument, &mResultsFile) ||
1363             ParseStringArg(kFilterFileArg, argument, &mFilterFile) ||
1364             ParseStringArg(kHistogramJsonFileArg, argument, &mHistogramJsonFile) ||
1365             // We need these overloads to work around technical debt in the Android test runner.
1366             ParseStringArg("--isolated-script-test-perf-output=", argument, &mHistogramJsonFile) ||
1367             ParseStringArg("--isolated_script_test_perf_output=", argument, &mHistogramJsonFile) ||
1368             ParseStringArg(kRenderTestOutputDir, argument, &mTestArtifactDirectory) ||
1369             ParseStringArg(kIsolatedOutDir, argument, &mTestArtifactDirectory) ||
1370             ParseFlag("--bot-mode", argument, &mBotMode) ||
1371             ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) ||
1372             ParseFlag(kGTestListTests, argument, &mGTestListTests) ||
1373             ParseFlag(kListTests, argument, &mListTests) ||
1374             ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) ||
1375             ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler));
1376 }
1377 
onCrashOrTimeout(TestResultType crashOrTimeout)1378 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1379 {
1380     std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1381     if (mTestResults.currentTest.valid())
1382     {
1383         TestResult &result        = mTestResults.results[mTestResults.currentTest];
1384         result.type               = crashOrTimeout;
1385         result.elapsedTimeSeconds = mTestResults.currentTestTimer.getElapsedWallClockTime();
1386     }
1387 
1388     if (mResultsFile.empty())
1389     {
1390         printf("No results file specified.\n");
1391         return;
1392     }
1393 
1394     WriteOutputFiles(true, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1395                      mTestSuiteName.c_str());
1396 }
1397 
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1398 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1399                                        const std::vector<TestIdentifier> &testsInBatch)
1400 {
1401     constexpr uint32_t kMaxPath = 1000;
1402 
1403     // Create a temporary file to store the test list
1404     ProcessInfo processInfo;
1405 
1406     char filterBuffer[kMaxPath] = {};
1407     if (!CreateTemporaryFile(filterBuffer, kMaxPath))
1408     {
1409         std::cerr << "Error creating temporary file for test list.\n";
1410         return false;
1411     }
1412     processInfo.filterFileName.assign(filterBuffer);
1413 
1414     std::string filterString = GetTestFilter(testsInBatch);
1415 
1416     FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1417     if (!fp)
1418     {
1419         std::cerr << "Error opening temporary file for test list.\n";
1420         return false;
1421     }
1422     fprintf(fp, "%s", filterString.c_str());
1423     fclose(fp);
1424 
1425     processInfo.filterString = filterString;
1426 
1427     std::string filterFileArg = kFilterFileArg + processInfo.filterFileName;
1428 
1429     // Create a temporary file to store the test output.
1430     char resultsBuffer[kMaxPath] = {};
1431     if (!CreateTemporaryFile(resultsBuffer, kMaxPath))
1432     {
1433         std::cerr << "Error creating temporary file for test list.\n";
1434         return false;
1435     }
1436     processInfo.resultsFileName.assign(resultsBuffer);
1437 
1438     std::string resultsFileArg = kResultFileArg + processInfo.resultsFileName;
1439 
1440     // Construct command line for child process.
1441     std::vector<const char *> args;
1442 
1443     args.push_back(mTestExecutableName.c_str());
1444     args.push_back(filterFileArg.c_str());
1445     args.push_back(resultsFileArg.c_str());
1446 
1447     std::stringstream batchIdStream;
1448     batchIdStream << kBatchId << batchId;
1449     std::string batchIdString = batchIdStream.str();
1450     args.push_back(batchIdString.c_str());
1451 
1452     for (const std::string &arg : mChildProcessArgs)
1453     {
1454         args.push_back(arg.c_str());
1455     }
1456 
1457     if (mDisableCrashHandler)
1458     {
1459         args.push_back(kDisableCrashHandler);
1460     }
1461 
1462     std::string timeoutStr;
1463     if (mTestTimeout != kDefaultTestTimeout)
1464     {
1465         std::stringstream timeoutStream;
1466         timeoutStream << kTestTimeoutArg << mTestTimeout;
1467         timeoutStr = timeoutStream.str();
1468         args.push_back(timeoutStr.c_str());
1469     }
1470 
1471     std::string artifactsDir;
1472     if (!mTestArtifactDirectory.empty())
1473     {
1474         std::stringstream artifactsDirStream;
1475         artifactsDirStream << kIsolatedOutDir << mTestArtifactDirectory;
1476         artifactsDir = artifactsDirStream.str();
1477         args.push_back(artifactsDir.c_str());
1478     }
1479 
1480     // Launch child process and wait for completion.
1481     processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1482 
1483     if (!processInfo.process->started())
1484     {
1485         std::cerr << "Error launching child process.\n";
1486         return false;
1487     }
1488 
1489     std::stringstream commandLineStr;
1490     for (const char *arg : args)
1491     {
1492         commandLineStr << arg << " ";
1493     }
1494 
1495     processInfo.commandLine  = commandLineStr.str();
1496     processInfo.testsInBatch = testsInBatch;
1497     mCurrentProcesses.emplace_back(std::move(processInfo));
1498     return true;
1499 }
1500 
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1501 void ParseTestIdentifierAndSetResult(const std::string &testName,
1502                                      TestResultType result,
1503                                      TestResults *results)
1504 {
1505     // Trim off any whitespace + extra stuff at the end of the string.
1506     std::string modifiedTestName = testName.substr(0, testName.find(' '));
1507     modifiedTestName             = modifiedTestName.substr(0, testName.find('\r'));
1508     TestIdentifier id;
1509     bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1510     ASSERT(ok);
1511     results->results[id] = {result};
1512 }
1513 
finishProcess(ProcessInfo * processInfo)1514 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1515 {
1516     // Get test results and merge into main list.
1517     TestResults batchResults;
1518 
1519     if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1520     {
1521         std::cerr << "Warning: could not find test results file from child process.\n";
1522 
1523         // First assume all tests get skipped.
1524         for (const TestIdentifier &id : processInfo->testsInBatch)
1525         {
1526             batchResults.results[id] = {TestResultType::NoResult};
1527         }
1528 
1529         // Attempt to reconstruct passing list from stdout snippets.
1530         const std::string &batchStdout = processInfo->process->getStdout();
1531         std::istringstream linesStream(batchStdout);
1532 
1533         std::string line;
1534         while (std::getline(linesStream, line))
1535         {
1536             size_t startPos   = line.find(kStartedTestString);
1537             size_t failPos    = line.find(kFailedTestString);
1538             size_t passPos    = line.find(kPassedTestString);
1539             size_t skippedPos = line.find(kSkippedTestString);
1540 
1541             if (startPos != std::string::npos)
1542             {
1543                 // Assume a test that's started crashed until we see it completed.
1544                 std::string testName = line.substr(strlen(kStartedTestString));
1545                 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1546             }
1547             else if (failPos != std::string::npos)
1548             {
1549                 std::string testName = line.substr(strlen(kFailedTestString));
1550                 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1551             }
1552             else if (passPos != std::string::npos)
1553             {
1554                 std::string testName = line.substr(strlen(kPassedTestString));
1555                 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1556             }
1557             else if (skippedPos != std::string::npos)
1558             {
1559                 std::string testName = line.substr(strlen(kSkippedTestString));
1560                 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1561             }
1562         }
1563     }
1564 
1565     if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1566     {
1567         std::cerr << "Error merging batch test results.\n";
1568         return false;
1569     }
1570 
1571     if (!batchResults.results.empty())
1572     {
1573         const TestIdentifier &id = batchResults.results.begin()->first;
1574         std::string config       = GetConfigNameFromTestIdentifier(id);
1575         printf("Completed batch with config: %s\n", config.c_str());
1576 
1577         for (const auto &resultIter : batchResults.results)
1578         {
1579             const TestResult &result = resultIter.second;
1580             if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1581             {
1582                 printf("To reproduce the batch, use filter:\n%s\n",
1583                        processInfo->filterString.c_str());
1584                 break;
1585             }
1586         }
1587     }
1588 
1589     // Process results and print unexpected errors.
1590     for (const auto &resultIter : batchResults.results)
1591     {
1592         const TestIdentifier &id = resultIter.first;
1593         const TestResult &result = resultIter.second;
1594 
1595         // Skip results aren't procesed since they're added back to the test queue below.
1596         if (result.type == TestResultType::NoResult)
1597         {
1598             continue;
1599         }
1600 
1601         mCurrentResultCount++;
1602 
1603         printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1604                id.testName.c_str());
1605 
1606         if (mPrintTestStdout)
1607         {
1608             const std::string &batchStdout = processInfo->process->getStdout();
1609             PrintTestOutputSnippet(id, result, batchStdout);
1610         }
1611         else if (result.type == TestResultType::Pass)
1612         {
1613             printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds * 1000.0);
1614         }
1615         else if (result.type == TestResultType::Skip)
1616         {
1617             printf(" (skipped)\n");
1618         }
1619         else if (result.type == TestResultType::Timeout)
1620         {
1621             printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds);
1622             mFailureCount++;
1623         }
1624         else
1625         {
1626             printf(" (%s)\n", ResultTypeToString(result.type));
1627             mFailureCount++;
1628 
1629             const std::string &batchStdout = processInfo->process->getStdout();
1630             PrintTestOutputSnippet(id, result, batchStdout);
1631         }
1632     }
1633 
1634     // On unexpected exit, re-queue any unfinished tests.
1635     std::vector<TestIdentifier> unfinishedTests;
1636     for (const auto &resultIter : batchResults.results)
1637     {
1638         const TestIdentifier &id = resultIter.first;
1639         const TestResult &result = resultIter.second;
1640 
1641         if (result.type == TestResultType::NoResult)
1642         {
1643             unfinishedTests.push_back(id);
1644         }
1645     }
1646 
1647     if (!unfinishedTests.empty())
1648     {
1649         mTestQueue.emplace(std::move(unfinishedTests));
1650     }
1651 
1652     // Clean up any dirty temporary files.
1653     for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1654     {
1655         // Note: we should be aware that this cleanup won't happen if the harness itself
1656         // crashes. If this situation comes up in the future we should add crash cleanup to the
1657         // harness.
1658         if (!angle::DeleteSystemFile(tempFile.c_str()))
1659         {
1660             std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1661         }
1662     }
1663 
1664     processInfo->process.reset();
1665     return true;
1666 }
1667 
run()1668 int TestSuite::run()
1669 {
1670 #if defined(ANGLE_PLATFORM_ANDROID)
1671     if (mListTests && mGTestListTests)
1672     {
1673         // Workaround for the Android test runner requiring a GTest test list.
1674         printf("PlaceholderTest.\n  Placeholder\n");
1675         return EXIT_SUCCESS;
1676     }
1677 #endif  // defined(ANGLE_PLATFORM_ANDROID)
1678 
1679     if (mListTests)
1680     {
1681         ListTests(mTestResults.results);
1682 
1683 #if defined(ANGLE_PLATFORM_ANDROID)
1684         // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1685         // tricks to get the test list output. We add placeholder output for a single test to trick
1686         // the test runner into thinking it ran the tests successfully. We also add an end marker
1687         // for the tests list so we can parse the list from the more spammy Android stdout log.
1688         static constexpr char kPlaceholderTestTest[] = R"(
1689 [==========] Running 1 test from 1 test suite.
1690 [----------] Global test environment set-up.
1691 [----------] 1 test from PlaceholderTest
1692 [ RUN      ] PlaceholderTest.Placeholder
1693 [       OK ] PlaceholderTest.Placeholder (0 ms)
1694 [----------] 1 test from APITest (0 ms total)
1695 
1696 [----------] Global test environment tear-down
1697 [==========] 1 test from 1 test suite ran. (24 ms total)
1698 [  PASSED  ] 1 test.
1699 )";
1700         printf(kPlaceholderTestTest);
1701 #endif  // defined(ANGLE_PLATFORM_ANDROID)
1702 
1703         return EXIT_SUCCESS;
1704     }
1705 
1706     if (mGTestListTests)
1707     {
1708         GTestListTests(mTestResults.results);
1709         return EXIT_SUCCESS;
1710     }
1711 
1712     // Run tests serially.
1713     if (!mBotMode)
1714     {
1715         // Only start the watchdog if the debugger is not attached and we're a child process.
1716         if (!angle::IsDebuggerAttached() && mBatchId != -1)
1717         {
1718             startWatchdog();
1719         }
1720 
1721         int retVal = RUN_ALL_TESTS();
1722         {
1723             std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1724             mTestResults.allDone = true;
1725         }
1726 
1727         if (mWatchdogThread.joinable())
1728         {
1729             mWatchdogThread.join();
1730         }
1731         return retVal;
1732     }
1733 
1734     Timer totalRunTime;
1735     totalRunTime.start();
1736 
1737     Timer messageTimer;
1738     messageTimer.start();
1739 
1740     uint32_t batchId = 0;
1741 
1742     while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1743     {
1744         bool progress = false;
1745 
1746         // Spawn a process if needed and possible.
1747         if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1748         {
1749             std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1750             mTestQueue.pop();
1751 
1752             if (!launchChildTestProcess(++batchId, testsInBatch))
1753             {
1754                 return 1;
1755             }
1756 
1757             progress = true;
1758         }
1759 
1760         // Check for process completion.
1761         uint32_t totalTestCount = 0;
1762         for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1763         {
1764             ProcessInfo &processInfo = *processIter;
1765             if (processInfo.process->finished())
1766             {
1767                 if (!finishProcess(&processInfo))
1768                 {
1769                     return 1;
1770                 }
1771                 processIter = mCurrentProcesses.erase(processIter);
1772                 progress    = true;
1773             }
1774             else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1775             {
1776                 // Terminate the process and record timeouts for the batch.
1777                 // Because we can't determine which sub-test caused a timeout, record the whole
1778                 // batch as a timeout failure. Can be improved by using socket message passing.
1779                 if (!processInfo.process->kill())
1780                 {
1781                     return 1;
1782                 }
1783 
1784                 const std::string &batchStdout = processInfo.process->getStdout();
1785                 std::vector<std::string> lines =
1786                     SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1787                                 SplitResult::SPLIT_WANT_NONEMPTY);
1788                 constexpr size_t kKeepLines = 10;
1789                 printf("Last %d lines of batch stdout:\n", static_cast<int>(kKeepLines));
1790                 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1791                      lineNo < lines.size(); ++lineNo)
1792                 {
1793                     printf("%s\n", lines[lineNo].c_str());
1794                 }
1795 
1796                 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1797                 {
1798                     // Because the whole batch failed we can't know how long each test took.
1799                     mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1800                     mFailureCount++;
1801                 }
1802 
1803                 processIter = mCurrentProcesses.erase(processIter);
1804                 progress    = true;
1805             }
1806             else
1807             {
1808                 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1809                 processIter++;
1810             }
1811         }
1812 
1813         if (progress)
1814         {
1815             messageTimer.start();
1816         }
1817         else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1818         {
1819             const ProcessInfo &processInfo = mCurrentProcesses[0];
1820             double processTime             = processInfo.process->getElapsedTimeSeconds();
1821             printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1822                    static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1823             messageTimer.start();
1824         }
1825 
1826         // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1827         if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1828         {
1829             printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1830             TestQueue emptyTestQueue;
1831             std::swap(mTestQueue, emptyTestQueue);
1832         }
1833 
1834         // Sleep briefly and continue.
1835         angle::Sleep(100);
1836     }
1837 
1838     // Dump combined results.
1839     if (mFailureCount > mMaxFailures)
1840     {
1841         printf(
1842             "Omitted results files because the failure count (%d) exceeded the maximum number of "
1843             "failures (%d).\n",
1844             mFailureCount, mMaxFailures);
1845     }
1846     else
1847     {
1848         WriteOutputFiles(false, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1849                          mTestSuiteName.c_str());
1850     }
1851 
1852     totalRunTime.stop();
1853     printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1854 
1855     return printFailuresAndReturnCount() == 0 ? 0 : 1;
1856 }
1857 
printFailuresAndReturnCount() const1858 int TestSuite::printFailuresAndReturnCount() const
1859 {
1860     std::vector<std::string> failures;
1861     uint32_t skipCount = 0;
1862 
1863     for (const auto &resultIter : mTestResults.results)
1864     {
1865         const TestIdentifier &id = resultIter.first;
1866         const TestResult &result = resultIter.second;
1867 
1868         if (result.type == TestResultType::Skip)
1869         {
1870             skipCount++;
1871         }
1872         else if (result.type != TestResultType::Pass)
1873         {
1874             const FileLine &fileLine = mTestFileLines.find(id)->second;
1875 
1876             std::stringstream failureMessage;
1877             failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1878                            << ResultTypeToString(result.type) << ")";
1879             failures.emplace_back(failureMessage.str());
1880         }
1881     }
1882 
1883     if (failures.empty())
1884         return 0;
1885 
1886     printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1887     for (const std::string &failure : failures)
1888     {
1889         printf("    %s\n", failure.c_str());
1890     }
1891     if (skipCount > 0)
1892     {
1893         printf("%u tests skipped.\n", skipCount);
1894     }
1895 
1896     return static_cast<int>(failures.size());
1897 }
1898 
startWatchdog()1899 void TestSuite::startWatchdog()
1900 {
1901     auto watchdogMain = [this]() {
1902         do
1903         {
1904             {
1905                 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1906                 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1907                     mTestResults.currentTestTimeout)
1908                 {
1909                     break;
1910                 }
1911 
1912                 if (mTestResults.allDone)
1913                     return;
1914             }
1915 
1916             angle::Sleep(500);
1917         } while (true);
1918         onCrashOrTimeout(TestResultType::Timeout);
1919         ::_Exit(EXIT_FAILURE);
1920     };
1921     mWatchdogThread = std::thread(watchdogMain);
1922 }
1923 
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1924 void TestSuite::addHistogramSample(const std::string &measurement,
1925                                    const std::string &story,
1926                                    double value,
1927                                    const std::string &units)
1928 {
1929     mHistogramWriter.addSample(measurement, story, value, units);
1930 }
1931 
hasTestArtifactsDirectory() const1932 bool TestSuite::hasTestArtifactsDirectory() const
1933 {
1934     return !mTestArtifactDirectory.empty();
1935 }
1936 
reserveTestArtifactPath(const std::string & artifactName)1937 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1938 {
1939     mTestResults.testArtifactPaths.push_back(artifactName);
1940 
1941     if (mTestArtifactDirectory.empty())
1942     {
1943         return artifactName;
1944     }
1945 
1946     std::stringstream pathStream;
1947     pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1948     return pathStream.str();
1949 }
1950 
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1951 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1952 {
1953     std::ifstream ifs(fileName);
1954     if (!ifs.is_open())
1955     {
1956         std::cerr << "Error opening " << fileName << "\n";
1957         return false;
1958     }
1959 
1960     js::IStreamWrapper ifsWrapper(ifs);
1961     js::Document document;
1962     document.ParseStream(ifsWrapper);
1963 
1964     if (document.HasParseError())
1965     {
1966         std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1967         return false;
1968     }
1969 
1970     if (!GetTestResultsFromJSON(document, resultsOut))
1971     {
1972         std::cerr << "Error getting test results from JSON.\n";
1973         return false;
1974     }
1975 
1976     return true;
1977 }
1978 
dumpTestExpectationsErrorMessages()1979 void TestSuite::dumpTestExpectationsErrorMessages()
1980 {
1981     std::stringstream errorMsgStream;
1982     for (const auto &message : mTestExpectationsParser.getErrorMessages())
1983     {
1984         errorMsgStream << std::endl << " " << message;
1985     }
1986 
1987     std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1988 }
1989 
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)1990 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1991                                                        const std::string &fileName)
1992 {
1993     if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1994     {
1995         dumpTestExpectationsErrorMessages();
1996         return false;
1997     }
1998     return true;
1999 }
2000 
loadAllTestExpectationsFromFile(const std::string & fileName)2001 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
2002 {
2003     if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
2004     {
2005         dumpTestExpectationsErrorMessages();
2006         return false;
2007     }
2008     return true;
2009 }
2010 
logAnyUnusedTestExpectations()2011 bool TestSuite::logAnyUnusedTestExpectations()
2012 {
2013     std::stringstream unusedMsgStream;
2014     bool anyUnused = false;
2015     for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
2016     {
2017         anyUnused = true;
2018         unusedMsgStream << std::endl << " " << message;
2019     }
2020     if (anyUnused)
2021     {
2022         std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl;
2023         return true;
2024     }
2025     return false;
2026 }
2027 
getTestExpectation(const std::string & testName)2028 int32_t TestSuite::getTestExpectation(const std::string &testName)
2029 {
2030     return mTestExpectationsParser.getTestExpectation(testName);
2031 }
2032 
maybeUpdateTestTimeout(uint32_t testExpectation)2033 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
2034 {
2035     double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
2036                              ? getSlowTestTimeout()
2037                              : mTestTimeout;
2038     std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
2039     mTestResults.currentTestTimeout = testTimeout;
2040 }
2041 
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)2042 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
2043                                                                 const std::string &testName)
2044 {
2045     uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
2046     maybeUpdateTestTimeout(expectation);
2047     return expectation;
2048 }
2049 
getSlowTestTimeout() const2050 int TestSuite::getSlowTestTimeout() const
2051 {
2052     return mTestTimeout * kSlowTestTimeoutScale;
2053 }
2054 
TestResultTypeToString(TestResultType type)2055 const char *TestResultTypeToString(TestResultType type)
2056 {
2057     switch (type)
2058     {
2059         case TestResultType::Crash:
2060             return "Crash";
2061         case TestResultType::Fail:
2062             return "Fail";
2063         case TestResultType::NoResult:
2064             return "NoResult";
2065         case TestResultType::Pass:
2066             return "Pass";
2067         case TestResultType::Skip:
2068             return "Skip";
2069         case TestResultType::Timeout:
2070             return "Timeout";
2071         case TestResultType::Unknown:
2072         default:
2073             return "Unknown";
2074     }
2075 }
2076 }  // namespace angle
2077