• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 //   Basic implementation of a test harness in ANGLE.
8 
9 #include "TestSuite.h"
10 
11 #include "common/debug.h"
12 #include "common/platform.h"
13 #include "common/string_utils.h"
14 #include "common/system_utils.h"
15 #include "util/Timer.h"
16 
17 #include <stdlib.h>
18 #include <time.h>
19 
20 #include <fstream>
21 #include <unordered_map>
22 
23 #include <gtest/gtest.h>
24 #include <rapidjson/document.h>
25 #include <rapidjson/filewritestream.h>
26 #include <rapidjson/istreamwrapper.h>
27 #include <rapidjson/prettywriter.h>
28 
29 // We directly call into a function to register the parameterized tests. This saves spinning up
30 // a subprocess with a new gtest filter.
31 #include <gtest/../../src/gtest-internal-inl.h>
32 
33 namespace js = rapidjson;
34 
35 namespace angle
36 {
37 namespace
38 {
39 constexpr char kBatchId[]             = "--batch-id";
40 constexpr char kFilterFileArg[]       = "--filter-file";
41 constexpr char kResultFileArg[]       = "--results-file";
42 constexpr char kTestTimeoutArg[]      = "--test-timeout";
43 constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
44 constexpr char kIsolatedOutDir[]      = "--isolated-outdir";
45 
46 constexpr char kStartedTestString[] = "[ RUN      ] ";
47 constexpr char kPassedTestString[]  = "[       OK ] ";
48 constexpr char kFailedTestString[]  = "[  FAILED  ] ";
49 constexpr char kSkippedTestString[] = "[  SKIPPED ] ";
50 
51 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
52 
53 constexpr char kTSanOptionsEnvVar[]  = "TSAN_OPTIONS";
54 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
55 
56 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
57 // Ideally we could use a separate timeout for the slow first test.
58 // Allow sanitized tests to run more slowly.
59 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
60 constexpr int kDefaultTestTimeout  = 60;
61 constexpr int kDefaultBatchTimeout = 300;
62 #else
63 constexpr int kDefaultTestTimeout  = 120;
64 constexpr int kDefaultBatchTimeout = 700;
65 #endif
66 constexpr int kSlowTestTimeoutScale  = 3;
67 constexpr int kDefaultBatchSize      = 256;
68 constexpr double kIdleMessageTimeout = 15.0;
69 constexpr int kDefaultMaxProcesses   = 16;
70 constexpr int kDefaultMaxFailures    = 100;
71 
ResultTypeToString(TestResultType type)72 const char *ResultTypeToString(TestResultType type)
73 {
74     switch (type)
75     {
76         case TestResultType::Crash:
77             return "CRASH";
78         case TestResultType::Fail:
79             return "FAIL";
80         case TestResultType::NoResult:
81             return "NOTRUN";
82         case TestResultType::Pass:
83             return "PASS";
84         case TestResultType::Skip:
85             return "SKIP";
86         case TestResultType::Timeout:
87             return "TIMEOUT";
88         case TestResultType::Unknown:
89         default:
90             return "UNKNOWN";
91     }
92 }
93 
GetResultTypeFromString(const std::string & str)94 TestResultType GetResultTypeFromString(const std::string &str)
95 {
96     if (str == "CRASH")
97         return TestResultType::Crash;
98     if (str == "FAIL")
99         return TestResultType::Fail;
100     if (str == "PASS")
101         return TestResultType::Pass;
102     if (str == "NOTRUN")
103         return TestResultType::NoResult;
104     if (str == "SKIP")
105         return TestResultType::Skip;
106     if (str == "TIMEOUT")
107         return TestResultType::Timeout;
108     return TestResultType::Unknown;
109 }
110 
IsFailedResult(TestResultType resultType)111 bool IsFailedResult(TestResultType resultType)
112 {
113     return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
114 }
115 
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)116 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
117 {
118     js::Value jsName;
119     jsName.SetString(ResultTypeToString(type), *allocator);
120     return jsName;
121 }
122 
WriteJsonFile(const std::string & outputFile,js::Document * doc)123 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
124 {
125     FILE *fp = fopen(outputFile.c_str(), "w");
126     if (!fp)
127     {
128         return false;
129     }
130 
131     constexpr size_t kBufferSize = 0xFFFF;
132     std::vector<char> writeBuffer(kBufferSize);
133     js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
134     js::PrettyWriter<js::FileWriteStream> writer(os);
135     if (!doc->Accept(writer))
136     {
137         fclose(fp);
138         return false;
139     }
140     fclose(fp);
141     return true;
142 }
143 
144 // Writes out a TestResults to the Chromium JSON Test Results format.
145 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile)146 void WriteResultsFile(bool interrupted,
147                       const TestResults &testResults,
148                       const std::string &outputFile)
149 {
150     time_t ltime;
151     time(&ltime);
152     struct tm *timeinfo = gmtime(&ltime);
153     ltime               = mktime(timeinfo);
154 
155     uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
156 
157     js::Document doc;
158     doc.SetObject();
159 
160     js::Document::AllocatorType &allocator = doc.GetAllocator();
161 
162     doc.AddMember("interrupted", interrupted, allocator);
163     doc.AddMember("path_delimiter", ".", allocator);
164     doc.AddMember("version", 3, allocator);
165     doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
166 
167     js::Value tests;
168     tests.SetObject();
169 
170     // If we have any test artifacts, make a fake test to house them.
171     if (!testResults.testArtifactPaths.empty())
172     {
173         js::Value artifactsTest;
174         artifactsTest.SetObject();
175 
176         artifactsTest.AddMember("actual", "PASS", allocator);
177         artifactsTest.AddMember("expected", "PASS", allocator);
178 
179         js::Value artifacts;
180         artifacts.SetObject();
181 
182         for (const std::string &testArtifactPath : testResults.testArtifactPaths)
183         {
184             std::vector<std::string> pieces =
185                 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
186                             SplitResult::SPLIT_WANT_NONEMPTY);
187             ASSERT(!pieces.empty());
188 
189             js::Value basename;
190             basename.SetString(pieces.back(), allocator);
191 
192             js::Value artifactPath;
193             artifactPath.SetString(testArtifactPath, allocator);
194 
195             js::Value artifactArray;
196             artifactArray.SetArray();
197             artifactArray.PushBack(artifactPath, allocator);
198 
199             artifacts.AddMember(basename, artifactArray, allocator);
200         }
201 
202         artifactsTest.AddMember("artifacts", artifacts, allocator);
203 
204         js::Value fakeTestName;
205         fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
206         tests.AddMember(fakeTestName, artifactsTest, allocator);
207     }
208 
209     std::map<TestResultType, uint32_t> counts;
210 
211     for (const auto &resultIter : testResults.results)
212     {
213         const TestIdentifier &id = resultIter.first;
214         const TestResult &result = resultIter.second;
215 
216         js::Value jsResult;
217         jsResult.SetObject();
218 
219         counts[result.type]++;
220 
221         std::string actualResult;
222         for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
223         {
224             actualResult += "FAIL ";
225         }
226 
227         actualResult += ResultTypeToString(result.type);
228 
229         std::string expectedResult = "PASS";
230         if (result.type == TestResultType::Skip)
231         {
232             expectedResult = "SKIP";
233         }
234 
235         // Handle flaky passing tests.
236         if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
237         {
238             expectedResult = "FAIL PASS";
239             jsResult.AddMember("is_flaky", true, allocator);
240         }
241 
242         jsResult.AddMember("actual", actualResult, allocator);
243         jsResult.AddMember("expected", expectedResult, allocator);
244 
245         if (IsFailedResult(result.type))
246         {
247             jsResult.AddMember("is_unexpected", true, allocator);
248         }
249 
250         js::Value times;
251         times.SetArray();
252         for (double elapsedTimeSeconds : result.elapsedTimeSeconds)
253         {
254             times.PushBack(elapsedTimeSeconds, allocator);
255         }
256 
257         jsResult.AddMember("times", times, allocator);
258 
259         char testName[500];
260         id.snprintfName(testName, sizeof(testName));
261         js::Value jsName;
262         jsName.SetString(testName, allocator);
263 
264         tests.AddMember(jsName, jsResult, allocator);
265     }
266 
267     js::Value numFailuresByType;
268     numFailuresByType.SetObject();
269 
270     for (const auto &countIter : counts)
271     {
272         TestResultType type = countIter.first;
273         uint32_t count      = countIter.second;
274 
275         js::Value jsCount(count);
276         numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
277     }
278 
279     doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
280 
281     doc.AddMember("tests", tests, allocator);
282 
283     printf("Writing test results to %s\n", outputFile.c_str());
284 
285     if (!WriteJsonFile(outputFile, &doc))
286     {
287         printf("Error writing test results file.\n");
288     }
289 }
290 
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile)291 void WriteHistogramJson(const HistogramWriter &histogramWriter, const std::string &outputFile)
292 {
293     js::Document doc;
294     doc.SetArray();
295 
296     histogramWriter.getAsJSON(&doc);
297 
298     printf("Writing histogram json to %s\n", outputFile.c_str());
299 
300     if (!WriteJsonFile(outputFile, &doc))
301     {
302         printf("Error writing histogram json file.\n");
303     }
304 }
305 
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)306 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
307 {
308     TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
309 
310     // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
311     if (resultIn.Skipped())
312     {
313         resultOut.type = TestResultType::Skip;
314     }
315     else if (resultIn.Failed())
316     {
317         resultOut.type = TestResultType::Fail;
318     }
319     else
320     {
321         resultOut.type = TestResultType::Pass;
322     }
323 
324     resultOut.elapsedTimeSeconds.back() = resultsOut->currentTestTimer.getElapsedWallClockTime();
325 }
326 
GetTestIdentifier(const testing::TestInfo & testInfo)327 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
328 {
329     return {testInfo.test_suite_name(), testInfo.name()};
330 }
331 
IsTestDisabled(const testing::TestInfo & testInfo)332 bool IsTestDisabled(const testing::TestInfo &testInfo)
333 {
334     return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
335 }
336 
337 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
338 
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)339 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
340                                         TestIdentifierFilter filter,
341                                         bool alsoRunDisabledTests)
342 {
343     std::vector<TestIdentifier> tests;
344 
345     const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
346     for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
347     {
348         const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
349         for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
350         {
351             const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
352             TestIdentifier id                 = GetTestIdentifier(testInfo);
353             if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
354             {
355                 tests.emplace_back(id);
356 
357                 if (fileLinesOut)
358                 {
359                     (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
360                 }
361             }
362         }
363     }
364 
365     return tests;
366 }
367 
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)368 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
369                                              bool alsoRunDisabledTests)
370 {
371     TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
372         return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
373     };
374 
375     return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
376 }
377 
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)378 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
379                                           int shardIndex,
380                                           int shardCount,
381                                           std::map<TestIdentifier, FileLine> *fileLinesOut,
382                                           bool alsoRunDisabledTests)
383 {
384     std::vector<TestIdentifier> shardTests;
385 
386     for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
387          testIndex += shardCount)
388     {
389         shardTests.emplace_back(allTests[testIndex]);
390     }
391 
392     return shardTests;
393 }
394 
GetTestFilter(const std::vector<TestIdentifier> & tests)395 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
396 {
397     std::stringstream filterStream;
398 
399     filterStream << "--gtest_filter=";
400 
401     for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
402     {
403         if (testIndex != 0)
404         {
405             filterStream << ":";
406         }
407 
408         filterStream << ReplaceDashesWithQuestionMark(tests[testIndex].testSuiteName) << "."
409                      << ReplaceDashesWithQuestionMark(tests[testIndex].testName);
410     }
411 
412     return filterStream.str();
413 }
414 
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)415 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
416                               std::vector<std::string> *testArtifactPathsOut)
417 {
418     if (!obj.HasMember("artifacts"))
419     {
420         printf("No artifacts member.\n");
421         return false;
422     }
423 
424     const js::Value &jsArtifacts = obj["artifacts"];
425     if (!jsArtifacts.IsObject())
426     {
427         printf("Artifacts are not an object.\n");
428         return false;
429     }
430 
431     const js::Value::ConstObject &artifacts = jsArtifacts.GetObj();
432     for (const auto &artifactMember : artifacts)
433     {
434         const js::Value &artifact = artifactMember.value;
435         if (!artifact.IsArray())
436         {
437             printf("Artifact is not an array of strings of size 1.\n");
438             return false;
439         }
440 
441         const js::Value::ConstArray &artifactArray = artifact.GetArray();
442         if (artifactArray.Size() != 1)
443         {
444             printf("Artifact is not an array of strings of size 1.\n");
445             return false;
446         }
447 
448         const js::Value &artifactName = artifactArray[0];
449         if (!artifactName.IsString())
450         {
451             printf("Artifact is not an array of strings of size 1.\n");
452             return false;
453         }
454 
455         testArtifactPathsOut->push_back(artifactName.GetString());
456     }
457 
458     return true;
459 }
460 
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)461 bool GetSingleTestResultFromJSON(const js::Value &name,
462                                  const js::Value::ConstObject &obj,
463                                  TestResults *resultsOut)
464 {
465 
466     TestIdentifier id;
467     if (!TestIdentifier::ParseFromString(name.GetString(), &id))
468     {
469         printf("Could not parse test identifier.\n");
470         return false;
471     }
472 
473     if (!obj.HasMember("expected") || !obj.HasMember("actual"))
474     {
475         printf("No expected or actual member.\n");
476         return false;
477     }
478 
479     const js::Value &expected = obj["expected"];
480     const js::Value &actual   = obj["actual"];
481 
482     if (!expected.IsString() || !actual.IsString())
483     {
484         printf("Expected or actual member is not a string.\n");
485         return false;
486     }
487 
488     const std::string actualStr = actual.GetString();
489 
490     TestResultType resultType = TestResultType::Unknown;
491     int flakyFailures         = 0;
492     if (actualStr.find(' '))
493     {
494         std::istringstream strstr(actualStr);
495         std::string token;
496         while (std::getline(strstr, token, ' '))
497         {
498             resultType = GetResultTypeFromString(token);
499             if (resultType == TestResultType::Unknown)
500             {
501                 printf("Failed to parse result type.\n");
502                 return false;
503             }
504             if (IsFailedResult(resultType))
505             {
506                 flakyFailures++;
507             }
508         }
509     }
510     else
511     {
512         resultType = GetResultTypeFromString(actualStr);
513         if (resultType == TestResultType::Unknown)
514         {
515             printf("Failed to parse result type.\n");
516             return false;
517         }
518     }
519 
520     std::vector<double> elapsedTimeSeconds;
521     if (obj.HasMember("times"))
522     {
523         const js::Value &times = obj["times"];
524         if (!times.IsArray())
525         {
526             return false;
527         }
528 
529         const js::Value::ConstArray &timesArray = times.GetArray();
530         if (timesArray.Size() < 1)
531         {
532             return false;
533         }
534         for (const js::Value &time : timesArray)
535         {
536             if (!time.IsDouble())
537             {
538                 return false;
539             }
540 
541             elapsedTimeSeconds.push_back(time.GetDouble());
542         }
543     }
544 
545     TestResult &result        = resultsOut->results[id];
546     result.elapsedTimeSeconds = elapsedTimeSeconds;
547     result.type               = resultType;
548     result.flakyFailures      = flakyFailures;
549     return true;
550 }
551 
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)552 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
553 {
554     if (!document.HasMember("tests") || !document["tests"].IsObject())
555     {
556         printf("JSON document has no tests member.\n");
557         return false;
558     }
559 
560     const js::Value::ConstObject &tests = document["tests"].GetObj();
561     for (const auto &testMember : tests)
562     {
563         // Get test identifier.
564         const js::Value &name = testMember.name;
565         if (!name.IsString())
566         {
567             printf("Name is not a string.\n");
568             return false;
569         }
570 
571         // Get test result.
572         const js::Value &value = testMember.value;
573         if (!value.IsObject())
574         {
575             printf("Test result is not an object.\n");
576             return false;
577         }
578 
579         const js::Value::ConstObject &obj = value.GetObj();
580 
581         if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
582         {
583             if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
584             {
585                 return false;
586             }
587         }
588         else
589         {
590             if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
591             {
592                 return false;
593             }
594         }
595     }
596 
597     return true;
598 }
599 
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)600 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
601 {
602     for (auto &resultsIter : input->results)
603     {
604         const TestIdentifier &id = resultsIter.first;
605         TestResult &inputResult  = resultsIter.second;
606         TestResult &outputResult = output->results[id];
607 
608         if (inputResult.type != TestResultType::NoResult)
609         {
610             if (outputResult.type != TestResultType::NoResult)
611             {
612                 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
613                        id.testName.c_str());
614                 return false;
615             }
616 
617             // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
618             // attempt the test again.
619             uint32_t runCount = outputResult.flakyFailures + 1;
620             if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
621             {
622                 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
623                        id.testName.c_str());
624                 inputResult.type = TestResultType::NoResult;
625                 outputResult.flakyFailures++;
626             }
627             else
628             {
629                 outputResult.type = inputResult.type;
630             }
631             if (runCount == 1)
632             {
633                 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
634             }
635             else
636             {
637                 outputResult.elapsedTimeSeconds.insert(outputResult.elapsedTimeSeconds.end(),
638                                                        inputResult.elapsedTimeSeconds.begin(),
639                                                        inputResult.elapsedTimeSeconds.end());
640             }
641         }
642     }
643 
644     output->testArtifactPaths.insert(output->testArtifactPaths.end(),
645                                      input->testArtifactPaths.begin(),
646                                      input->testArtifactPaths.end());
647 
648     return true;
649 }
650 
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)651 void PrintTestOutputSnippet(const TestIdentifier &id,
652                             const TestResult &result,
653                             const std::string &fullOutput)
654 {
655     std::stringstream nameStream;
656     nameStream << id;
657     std::string fullName = nameStream.str();
658 
659     size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
660     if (runPos == std::string::npos)
661     {
662         printf("Cannot locate test output snippet.\n");
663         return;
664     }
665 
666     size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
667     // Only clip the snippet to the "OK" message if the test really
668     // succeeded. It still might have e.g. crashed after printing it.
669     if (endPos == std::string::npos && result.type == TestResultType::Pass)
670     {
671         endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
672     }
673     if (endPos != std::string::npos)
674     {
675         size_t newline_pos = fullOutput.find("\n", endPos);
676         if (newline_pos != std::string::npos)
677             endPos = newline_pos + 1;
678     }
679 
680     std::cout << "\n";
681     if (endPos != std::string::npos)
682     {
683         std::cout << fullOutput.substr(runPos, endPos - runPos);
684     }
685     else
686     {
687         std::cout << fullOutput.substr(runPos);
688     }
689 }
690 
GetConfigNameFromTestIdentifier(const TestIdentifier & id)691 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
692 {
693     size_t slashPos = id.testName.find('/');
694     if (slashPos == std::string::npos)
695     {
696         return "default";
697     }
698 
699     size_t doubleUnderscorePos = id.testName.find("__");
700     if (doubleUnderscorePos == std::string::npos)
701     {
702         std::string configName = id.testName.substr(slashPos + 1);
703 
704         if (!BeginsWith(configName, "ES"))
705         {
706             return "default";
707         }
708 
709         return configName;
710     }
711     else
712     {
713         return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
714     }
715 }
716 
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)717 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
718 {
719     // First sort tests by configuration.
720     angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
721     for (const TestIdentifier &id : tests)
722     {
723         std::string config = GetConfigNameFromTestIdentifier(id);
724         testsSortedByConfig[config].push_back(id);
725     }
726 
727     // Then group into batches by 'batchSize'.
728     TestQueue testQueue;
729     for (const auto &configAndIds : testsSortedByConfig)
730     {
731         const std::vector<TestIdentifier> &configTests = configAndIds.second;
732 
733         // Count the number of batches needed for this config.
734         int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
735 
736         // Create batches with striping to split up slow tests.
737         for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
738         {
739             std::vector<TestIdentifier> batchTests;
740             for (size_t testIndex = batchIndex; testIndex < configTests.size();
741                  testIndex += batchesForConfig)
742             {
743                 batchTests.push_back(configTests[testIndex]);
744             }
745             testQueue.emplace(std::move(batchTests));
746             ASSERT(batchTests.empty());
747         }
748     }
749 
750     return testQueue;
751 }
752 
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)753 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
754 {
755     std::cout << "Tests list:\n";
756 
757     for (const auto &resultIt : resultsMap)
758     {
759         const TestIdentifier &id = resultIt.first;
760         std::cout << id << "\n";
761     }
762 
763     std::cout << "End tests list.\n";
764 }
765 
766 // Prints the names of the tests matching the user-specified filter flag.
767 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
768 // See http://anglebug.com/5164
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)769 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
770 {
771     std::map<std::string, std::vector<std::string>> suites;
772 
773     for (const auto &resultIt : resultsMap)
774     {
775         const TestIdentifier &id = resultIt.first;
776         suites[id.testSuiteName].push_back(id.testName);
777     }
778 
779     for (const auto &testSuiteIt : suites)
780     {
781         bool printedTestSuiteName = false;
782 
783         const std::string &suiteName              = testSuiteIt.first;
784         const std::vector<std::string> &testNames = testSuiteIt.second;
785 
786         for (const std::string &testName : testNames)
787         {
788             if (!printedTestSuiteName)
789             {
790                 printedTestSuiteName = true;
791                 printf("%s.\n", suiteName.c_str());
792             }
793             printf("  %s\n", testName.c_str());
794         }
795     }
796 }
797 
798 // On Android, batching is done on the host, i.e. externally.
799 // TestSuite executes on the device and should just passthrough all args to GTest.
UsesExternalBatching()800 bool UsesExternalBatching()
801 {
802 #if defined(ANGLE_PLATFORM_ANDROID)
803     return true;
804 #else
805     return false;
806 #endif
807 }
808 }  // namespace
809 
enable(const std::string & testArtifactDirectory)810 void MetricWriter::enable(const std::string &testArtifactDirectory)
811 {
812     mPath = testArtifactDirectory + GetPathSeparator() + "angle_metrics";
813 }
814 
writeInfo(const std::string & name,const std::string & backend,const std::string & story,const std::string & metric,const std::string & units)815 void MetricWriter::writeInfo(const std::string &name,
816                              const std::string &backend,
817                              const std::string &story,
818                              const std::string &metric,
819                              const std::string &units)
820 {
821     if (mPath.empty())
822     {
823         return;
824     }
825 
826     if (mFile == nullptr)
827     {
828         mFile = fopen(mPath.c_str(), "w");
829     }
830     ASSERT(mFile != nullptr);
831 
832     fprintf(mFile, "{\"name\":\"%s\",", name.c_str());
833     fprintf(mFile, "\"backend\":\"%s\",", backend.c_str());
834     fprintf(mFile, "\"story\":\"%s\",", story.c_str());
835     fprintf(mFile, "\"metric\":\"%s\",", metric.c_str());
836     fprintf(mFile, "\"units\":\"%s\",", units.c_str());
837     // followed by writing value, so no closing bracket yet
838 }
839 
writeDoubleValue(double value)840 void MetricWriter::writeDoubleValue(double value)
841 {
842     if (mFile != nullptr)
843     {
844         fprintf(mFile, "\"value\":\"%lf\"}\n", value);
845     }
846 }
847 
writeIntegerValue(size_t value)848 void MetricWriter::writeIntegerValue(size_t value)
849 {
850     if (mFile != nullptr)
851     {
852         fprintf(mFile, "\"value\":\"%zu\"}\n", value);
853     }
854 }
855 
close()856 void MetricWriter::close()
857 {
858     if (mFile != nullptr)
859     {
860         fclose(mFile);
861         mFile = nullptr;
862     }
863 }
864 
865 // static
866 TestSuite *TestSuite::mInstance = nullptr;
867 
868 TestIdentifier::TestIdentifier() = default;
869 
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)870 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
871     : testSuiteName(suiteNameIn), testName(nameIn)
872 {}
873 
874 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
875 
876 TestIdentifier::~TestIdentifier() = default;
877 
878 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
879 
snprintfName(char * outBuffer,size_t maxLen) const880 void TestIdentifier::snprintfName(char *outBuffer, size_t maxLen) const
881 {
882     snprintf(outBuffer, maxLen, "%s.%s", testSuiteName.c_str(), testName.c_str());
883 }
884 
885 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)886 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
887 {
888     size_t separator = str.find(".");
889     if (separator == std::string::npos)
890     {
891         return false;
892     }
893 
894     idOut->testSuiteName = str.substr(0, separator);
895     idOut->testName      = str.substr(separator + 1, str.length() - separator - 1);
896     return true;
897 }
898 
899 TestResults::TestResults() = default;
900 
901 TestResults::~TestResults() = default;
902 
903 ProcessInfo::ProcessInfo() = default;
904 
operator =(ProcessInfo && rhs)905 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
906 {
907     process         = std::move(rhs.process);
908     testsInBatch    = std::move(rhs.testsInBatch);
909     resultsFileName = std::move(rhs.resultsFileName);
910     filterFileName  = std::move(rhs.filterFileName);
911     commandLine     = std::move(rhs.commandLine);
912     filterString    = std::move(rhs.filterString);
913     return *this;
914 }
915 
916 ProcessInfo::~ProcessInfo() = default;
917 
ProcessInfo(ProcessInfo && other)918 ProcessInfo::ProcessInfo(ProcessInfo &&other)
919 {
920     *this = std::move(other);
921 }
922 
923 class TestSuite::TestEventListener : public testing::EmptyTestEventListener
924 {
925   public:
926     // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(TestSuite * testSuite)927     TestEventListener(TestSuite *testSuite) : mTestSuite(testSuite) {}
928 
OnTestStart(const testing::TestInfo & testInfo)929     void OnTestStart(const testing::TestInfo &testInfo) override
930     {
931         std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
932         mTestSuite->mTestResults.currentTest = GetTestIdentifier(testInfo);
933         mTestSuite->mTestResults.currentTestTimer.start();
934     }
935 
OnTestEnd(const testing::TestInfo & testInfo)936     void OnTestEnd(const testing::TestInfo &testInfo) override
937     {
938         std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
939         mTestSuite->mTestResults.currentTestTimer.stop();
940         const testing::TestResult &resultIn = *testInfo.result();
941         UpdateCurrentTestResult(resultIn, &mTestSuite->mTestResults);
942         mTestSuite->mTestResults.currentTest = TestIdentifier();
943     }
944 
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)945     void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
946     {
947         std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
948         mTestSuite->mTestResults.allDone = true;
949         mTestSuite->writeOutputFiles(false);
950     }
951 
952   private:
953     TestSuite *mTestSuite;
954 };
955 
TestSuite(int * argc,char ** argv)956 TestSuite::TestSuite(int *argc, char **argv) : TestSuite(argc, argv, []() {}) {}
957 
TestSuite(int * argc,char ** argv,std::function<void ()> registerTestsCallback)958 TestSuite::TestSuite(int *argc, char **argv, std::function<void()> registerTestsCallback)
959     : mShardCount(-1),
960       mShardIndex(-1),
961       mBotMode(false),
962       mDebugTestGroups(false),
963       mGTestListTests(false),
964       mListTests(false),
965       mPrintTestStdout(false),
966       mDisableCrashHandler(false),
967       mBatchSize(kDefaultBatchSize),
968       mCurrentResultCount(0),
969       mTotalResultCount(0),
970       mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
971       mTestTimeout(kDefaultTestTimeout),
972       mBatchTimeout(kDefaultBatchTimeout),
973       mBatchId(-1),
974       mFlakyRetries(0),
975       mMaxFailures(kDefaultMaxFailures),
976       mFailureCount(0),
977       mModifiedPreferredDevice(false)
978 {
979     ASSERT(mInstance == nullptr);
980     mInstance = this;
981 
982     Optional<int> filterArgIndex;
983     bool alsoRunDisabledTests = false;
984 
985 #if defined(ANGLE_PLATFORM_MACOS)
986     // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
987     // file access.
988     angle::InitMetalFileAPIHooking(*argc, argv);
989 #endif
990 
991 #if defined(ANGLE_PLATFORM_WINDOWS)
992     testing::GTEST_FLAG(catch_exceptions) = false;
993 #endif
994 
995     if (*argc <= 0)
996     {
997         printf("Missing test arguments.\n");
998         exit(EXIT_FAILURE);
999     }
1000 
1001     mTestExecutableName = argv[0];
1002 
1003     for (int argIndex = 1; argIndex < *argc;)
1004     {
1005         if (parseSingleArg(argc, argv, argIndex))
1006         {
1007             continue;
1008         }
1009 
1010         if (strstr(argv[argIndex], "--gtest_filter=") == argv[argIndex])
1011         {
1012             filterArgIndex = argIndex;
1013         }
1014         else
1015         {
1016             // Don't include disabled tests in test lists unless the user asks for them.
1017             if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1018             {
1019                 alsoRunDisabledTests = true;
1020             }
1021 
1022             mChildProcessArgs.push_back(argv[argIndex]);
1023         }
1024         ++argIndex;
1025     }
1026 
1027     if (mTestArtifactDirectory.empty())
1028     {
1029         mTestArtifactDirectory = GetEnvironmentVar("ISOLATED_OUTDIR");
1030     }
1031 
1032 #if defined(ANGLE_PLATFORM_FUCHSIA)
1033     if (mBotMode)
1034     {
1035         printf("Note: Bot mode is not available on Fuchsia. See http://anglebug.com/7312\n");
1036         mBotMode = false;
1037     }
1038 #endif
1039 
1040     if (UsesExternalBatching() && mBotMode)
1041     {
1042         printf("Bot mode is mutually exclusive with external batching.\n");
1043         exit(EXIT_FAILURE);
1044     }
1045 
1046     mTestResults.currentTestTimeout = mTestTimeout;
1047 
1048     if (!mDisableCrashHandler)
1049     {
1050         // Note that the crash callback must be owned and not use global constructors.
1051         mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1052         InitCrashHandler(&mCrashCallback);
1053     }
1054 
1055     registerTestsCallback();
1056 
1057     std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1058     if (!envShardIndex.empty())
1059     {
1060         angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1061         if (mShardIndex == -1)
1062         {
1063             std::stringstream shardIndexStream(envShardIndex);
1064             shardIndexStream >> mShardIndex;
1065         }
1066     }
1067 
1068     std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1069     if (!envTotalShards.empty())
1070     {
1071         angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1072         if (mShardCount == -1)
1073         {
1074             std::stringstream shardCountStream(envTotalShards);
1075             shardCountStream >> mShardCount;
1076         }
1077     }
1078 
1079     // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1080     // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1081     // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1082     // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1083     const GPUTestConfig testConfig      = GPUTestConfig();
1084     const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1085     if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1086     {
1087         mModifiedPreferredDevice                        = true;
1088         const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1089         if (conditions[GPUTestConfig::kConditionAMD])
1090         {
1091             SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1092         }
1093         else if (conditions[GPUTestConfig::kConditionNVIDIA])
1094         {
1095             SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1096         }
1097         else if (conditions[GPUTestConfig::kConditionIntel])
1098         {
1099             SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1100         }
1101         else if (conditions[GPUTestConfig::kConditionApple])
1102         {
1103             SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1104         }
1105     }
1106 
1107     // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1108     if (IsTSan())
1109     {
1110         std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1111         tsanOptions += " halt_on_error=1";
1112         SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1113     }
1114 
1115     if (IsUBSan())
1116     {
1117         std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1118         ubsanOptions += " halt_on_error=1";
1119         SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1120     }
1121 
1122     if ((mShardIndex == -1) != (mShardCount == -1))
1123     {
1124         printf("Shard index and shard count must be specified together.\n");
1125         exit(EXIT_FAILURE);
1126     }
1127 
1128     if (!mFilterFile.empty())
1129     {
1130         if (filterArgIndex.valid())
1131         {
1132             printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1133             exit(EXIT_FAILURE);
1134         }
1135 
1136         std::string fileContents;
1137         if (!ReadEntireFileToString(mFilterFile.c_str(), &fileContents))
1138         {
1139             printf("Error loading filter file: %s\n", mFilterFile.c_str());
1140             exit(EXIT_FAILURE);
1141         }
1142         mFilterString.assign(fileContents.data());
1143 
1144         if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1145         {
1146             printf("Filter file must start with \"--gtest_filter=\".\n");
1147             exit(EXIT_FAILURE);
1148         }
1149 
1150         // Note that we only add a filter string if we previously deleted a shader filter file
1151         // argument. So we will have space for the new filter string in argv.
1152         AddArg(argc, argv, mFilterString.c_str());
1153     }
1154 
1155     // Call into gtest internals to force parameterized test name registration.
1156     testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1157     impl->RegisterParameterizedTests();
1158 
1159     // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1160     testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1161 
1162     std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1163 
1164     if (mShardCount == 0)
1165     {
1166         printf("Shard count must be > 0.\n");
1167         exit(EXIT_FAILURE);
1168     }
1169     else if (mShardCount > 0)
1170     {
1171         if (mShardIndex >= mShardCount)
1172         {
1173             printf("Shard index must be less than shard count.\n");
1174             exit(EXIT_FAILURE);
1175         }
1176 
1177         // If there's only one shard, we can use the testSet as defined above.
1178         if (mShardCount > 1)
1179         {
1180             if (!mBotMode && !UsesExternalBatching())
1181             {
1182                 printf("Sharding is only supported in bot mode or external batching.\n");
1183                 exit(EXIT_FAILURE);
1184             }
1185             // With external batching, we must use exactly the testSet as defined externally.
1186             // But when listing tests, we do need to apply sharding ourselves,
1187             // since we use our own implementation for listing tests and not GTest directly.
1188             if (!UsesExternalBatching() || mGTestListTests || mListTests)
1189             {
1190                 testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1191                                         alsoRunDisabledTests);
1192             }
1193         }
1194     }
1195 
1196     if (!testSet.empty())
1197     {
1198         std::stringstream fakeTestName;
1199         fakeTestName << kArtifactsFakeTestName << '-' << testSet[0].testName;
1200         mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1201     }
1202 
1203     if (mBotMode)
1204     {
1205         // Split up test batches.
1206         mTestQueue = BatchTests(testSet, mBatchSize);
1207 
1208         if (mDebugTestGroups)
1209         {
1210             std::cout << "Test Groups:\n";
1211 
1212             while (!mTestQueue.empty())
1213             {
1214                 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1215                 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1216                           << static_cast<int>(tests.size()) << ")\n";
1217                 mTestQueue.pop();
1218             }
1219 
1220             exit(EXIT_SUCCESS);
1221         }
1222     }
1223 
1224     testing::InitGoogleTest(argc, argv);
1225 
1226     mTotalResultCount = testSet.size();
1227 
1228     if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1229     {
1230         // Create a default output file in bot mode.
1231         mResultsFile = "output.json";
1232     }
1233 
1234     if (!mResultsDirectory.empty())
1235     {
1236         std::stringstream resultFileName;
1237         resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1238         mResultsFile = resultFileName.str();
1239     }
1240 
1241     if (!mTestArtifactDirectory.empty())
1242     {
1243         mMetricWriter.enable(mTestArtifactDirectory);
1244     }
1245 
1246     if (!mBotMode)
1247     {
1248         testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1249         listeners.Append(new TestEventListener(this));
1250 
1251         for (const TestIdentifier &id : testSet)
1252         {
1253             mTestResults.results[id].type = TestResultType::NoResult;
1254         }
1255     }
1256 }
1257 
~TestSuite()1258 TestSuite::~TestSuite()
1259 {
1260     const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1261     if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1262     {
1263         angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1264     }
1265 
1266     if (mWatchdogThread.joinable())
1267     {
1268         mWatchdogThread.detach();
1269     }
1270     TerminateCrashHandler();
1271 }
1272 
parseSingleArg(int * argc,char ** argv,int argIndex)1273 bool TestSuite::parseSingleArg(int *argc, char **argv, int argIndex)
1274 {
1275     // Note: Flags should be documented in README.md.
1276     return ParseIntArg("--shard-count", argc, argv, argIndex, &mShardCount) ||
1277            ParseIntArg("--shard-index", argc, argv, argIndex, &mShardIndex) ||
1278            ParseIntArg("--batch-size", argc, argv, argIndex, &mBatchSize) ||
1279            ParseIntArg("--max-processes", argc, argv, argIndex, &mMaxProcesses) ||
1280            ParseIntArg(kTestTimeoutArg, argc, argv, argIndex, &mTestTimeout) ||
1281            ParseIntArg("--batch-timeout", argc, argv, argIndex, &mBatchTimeout) ||
1282            ParseIntArg("--flaky-retries", argc, argv, argIndex, &mFlakyRetries) ||
1283            ParseIntArg("--max-failures", argc, argv, argIndex, &mMaxFailures) ||
1284            // Other test functions consume the batch ID, so keep it in the list.
1285            ParseIntArgWithHandling(kBatchId, argc, argv, argIndex, &mBatchId,
1286                                    ArgHandling::Preserve) ||
1287            ParseStringArg("--results-directory", argc, argv, argIndex, &mResultsDirectory) ||
1288            ParseStringArg(kResultFileArg, argc, argv, argIndex, &mResultsFile) ||
1289            ParseStringArg("--isolated-script-test-output", argc, argv, argIndex, &mResultsFile) ||
1290            ParseStringArg(kFilterFileArg, argc, argv, argIndex, &mFilterFile) ||
1291            ParseStringArg("--histogram-json-file", argc, argv, argIndex, &mHistogramJsonFile) ||
1292            // We need these overloads to work around technical debt in the Android test runner.
1293            ParseStringArg("--isolated-script-test-perf-output", argc, argv, argIndex,
1294                           &mHistogramJsonFile) ||
1295            ParseStringArg("--isolated_script_test_perf_output", argc, argv, argIndex,
1296                           &mHistogramJsonFile) ||
1297            ParseStringArg("--render-test-output-dir", argc, argv, argIndex,
1298                           &mTestArtifactDirectory) ||
1299            ParseStringArg("--isolated-outdir", argc, argv, argIndex, &mTestArtifactDirectory) ||
1300            ParseFlag("--test-launcher-bot-mode", argc, argv, argIndex, &mBotMode) ||
1301            ParseFlag("--bot-mode", argc, argv, argIndex, &mBotMode) ||
1302            ParseFlag("--debug-test-groups", argc, argv, argIndex, &mDebugTestGroups) ||
1303            ParseFlag("--gtest_list_tests", argc, argv, argIndex, &mGTestListTests) ||
1304            ParseFlag("--list-tests", argc, argv, argIndex, &mListTests) ||
1305            ParseFlag("--print-test-stdout", argc, argv, argIndex, &mPrintTestStdout) ||
1306            ParseFlag(kDisableCrashHandler, argc, argv, argIndex, &mDisableCrashHandler);
1307 }
1308 
onCrashOrTimeout(TestResultType crashOrTimeout)1309 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1310 {
1311     std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1312     if (mTestResults.currentTest.valid())
1313     {
1314         TestResult &result               = mTestResults.results[mTestResults.currentTest];
1315         result.type                      = crashOrTimeout;
1316         result.elapsedTimeSeconds.back() = mTestResults.currentTestTimer.getElapsedWallClockTime();
1317     }
1318 
1319     if (mResultsFile.empty())
1320     {
1321         printf("No results file specified.\n");
1322         return;
1323     }
1324 
1325     writeOutputFiles(true);
1326 }
1327 
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1328 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1329                                        const std::vector<TestIdentifier> &testsInBatch)
1330 {
1331     // Create a temporary file to store the test list
1332     ProcessInfo processInfo;
1333 
1334     Optional<std::string> filterBuffer = CreateTemporaryFile();
1335     if (!filterBuffer.valid())
1336     {
1337         std::cerr << "Error creating temporary file for test list.\n";
1338         return false;
1339     }
1340     processInfo.filterFileName.assign(filterBuffer.value());
1341 
1342     std::string filterString = GetTestFilter(testsInBatch);
1343 
1344     FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1345     if (!fp)
1346     {
1347         std::cerr << "Error opening temporary file for test list.\n";
1348         return false;
1349     }
1350     fprintf(fp, "%s", filterString.c_str());
1351     fclose(fp);
1352 
1353     processInfo.filterString = filterString;
1354 
1355     std::string filterFileArg = kFilterFileArg + std::string("=") + processInfo.filterFileName;
1356 
1357     // Create a temporary file to store the test output.
1358     Optional<std::string> resultsBuffer = CreateTemporaryFile();
1359     if (!resultsBuffer.valid())
1360     {
1361         std::cerr << "Error creating temporary file for test list.\n";
1362         return false;
1363     }
1364     processInfo.resultsFileName.assign(resultsBuffer.value());
1365 
1366     std::string resultsFileArg = kResultFileArg + std::string("=") + processInfo.resultsFileName;
1367 
1368     // Construct command line for child process.
1369     std::vector<const char *> args;
1370 
1371     args.push_back(mTestExecutableName.c_str());
1372     args.push_back(filterFileArg.c_str());
1373     args.push_back(resultsFileArg.c_str());
1374 
1375     std::stringstream batchIdStream;
1376     batchIdStream << kBatchId << "=" << batchId;
1377     std::string batchIdString = batchIdStream.str();
1378     args.push_back(batchIdString.c_str());
1379 
1380     for (const std::string &arg : mChildProcessArgs)
1381     {
1382         args.push_back(arg.c_str());
1383     }
1384 
1385     if (mDisableCrashHandler)
1386     {
1387         args.push_back(kDisableCrashHandler);
1388     }
1389 
1390     std::string timeoutStr;
1391     if (mTestTimeout != kDefaultTestTimeout)
1392     {
1393         std::stringstream timeoutStream;
1394         timeoutStream << kTestTimeoutArg << "=" << mTestTimeout;
1395         timeoutStr = timeoutStream.str();
1396         args.push_back(timeoutStr.c_str());
1397     }
1398 
1399     std::string artifactsDir;
1400     if (!mTestArtifactDirectory.empty())
1401     {
1402         std::stringstream artifactsDirStream;
1403         artifactsDirStream << kIsolatedOutDir << "=" << mTestArtifactDirectory;
1404         artifactsDir = artifactsDirStream.str();
1405         args.push_back(artifactsDir.c_str());
1406     }
1407 
1408     // Launch child process and wait for completion.
1409     processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1410 
1411     if (!processInfo.process->started())
1412     {
1413         std::cerr << "Error launching child process.\n";
1414         return false;
1415     }
1416 
1417     std::stringstream commandLineStr;
1418     for (const char *arg : args)
1419     {
1420         commandLineStr << arg << " ";
1421     }
1422 
1423     processInfo.commandLine  = commandLineStr.str();
1424     processInfo.testsInBatch = testsInBatch;
1425     mCurrentProcesses.emplace_back(std::move(processInfo));
1426     return true;
1427 }
1428 
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1429 void ParseTestIdentifierAndSetResult(const std::string &testName,
1430                                      TestResultType result,
1431                                      TestResults *results)
1432 {
1433     // Trim off any whitespace + extra stuff at the end of the string.
1434     std::string modifiedTestName = testName.substr(0, testName.find(' '));
1435     modifiedTestName             = modifiedTestName.substr(0, testName.find('\r'));
1436     TestIdentifier id;
1437     bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1438     ASSERT(ok);
1439     results->results[id] = {result};
1440 }
1441 
finishProcess(ProcessInfo * processInfo)1442 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1443 {
1444     // Get test results and merge into main list.
1445     TestResults batchResults;
1446 
1447     if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1448     {
1449         std::cerr << "Warning: could not find test results file from child process.\n";
1450 
1451         // First assume all tests get skipped.
1452         for (const TestIdentifier &id : processInfo->testsInBatch)
1453         {
1454             batchResults.results[id] = {TestResultType::NoResult};
1455         }
1456 
1457         // Attempt to reconstruct passing list from stdout snippets.
1458         const std::string &batchStdout = processInfo->process->getStdout();
1459         std::istringstream linesStream(batchStdout);
1460 
1461         std::string line;
1462         while (std::getline(linesStream, line))
1463         {
1464             size_t startPos   = line.find(kStartedTestString);
1465             size_t failPos    = line.find(kFailedTestString);
1466             size_t passPos    = line.find(kPassedTestString);
1467             size_t skippedPos = line.find(kSkippedTestString);
1468 
1469             if (startPos != std::string::npos)
1470             {
1471                 // Assume a test that's started crashed until we see it completed.
1472                 std::string testName = line.substr(strlen(kStartedTestString));
1473                 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1474             }
1475             else if (failPos != std::string::npos)
1476             {
1477                 std::string testName = line.substr(strlen(kFailedTestString));
1478                 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1479             }
1480             else if (passPos != std::string::npos)
1481             {
1482                 std::string testName = line.substr(strlen(kPassedTestString));
1483                 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1484             }
1485             else if (skippedPos != std::string::npos)
1486             {
1487                 std::string testName = line.substr(strlen(kSkippedTestString));
1488                 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1489             }
1490         }
1491     }
1492 
1493     if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1494     {
1495         std::cerr << "Error merging batch test results.\n";
1496         return false;
1497     }
1498 
1499     if (!batchResults.results.empty())
1500     {
1501         const TestIdentifier &id = batchResults.results.begin()->first;
1502         std::string config       = GetConfigNameFromTestIdentifier(id);
1503         printf("Completed batch with config: %s\n", config.c_str());
1504 
1505         for (const auto &resultIter : batchResults.results)
1506         {
1507             const TestResult &result = resultIter.second;
1508             if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1509             {
1510                 printf("To reproduce the batch, use filter:\n%s\n",
1511                        processInfo->filterString.c_str());
1512                 break;
1513             }
1514         }
1515     }
1516 
1517     // Process results and print unexpected errors.
1518     for (const auto &resultIter : batchResults.results)
1519     {
1520         const TestIdentifier &id = resultIter.first;
1521         const TestResult &result = resultIter.second;
1522 
1523         // Skip results aren't procesed since they're added back to the test queue below.
1524         if (result.type == TestResultType::NoResult)
1525         {
1526             continue;
1527         }
1528 
1529         mCurrentResultCount++;
1530 
1531         printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1532                id.testName.c_str());
1533 
1534         if (mPrintTestStdout)
1535         {
1536             const std::string &batchStdout = processInfo->process->getStdout();
1537             PrintTestOutputSnippet(id, result, batchStdout);
1538         }
1539         else if (result.type == TestResultType::Pass)
1540         {
1541             printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds.back() * 1000.0);
1542         }
1543         else if (result.type == TestResultType::Skip)
1544         {
1545             printf(" (skipped)\n");
1546         }
1547         else if (result.type == TestResultType::Timeout)
1548         {
1549             printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds.back());
1550             mFailureCount++;
1551         }
1552         else
1553         {
1554             printf(" (%s)\n", ResultTypeToString(result.type));
1555             mFailureCount++;
1556 
1557             const std::string &batchStdout = processInfo->process->getStdout();
1558             PrintTestOutputSnippet(id, result, batchStdout);
1559         }
1560     }
1561 
1562     // On unexpected exit, re-queue any unfinished tests.
1563     std::vector<TestIdentifier> unfinishedTests;
1564     for (const auto &resultIter : batchResults.results)
1565     {
1566         const TestIdentifier &id = resultIter.first;
1567         const TestResult &result = resultIter.second;
1568 
1569         if (result.type == TestResultType::NoResult)
1570         {
1571             unfinishedTests.push_back(id);
1572         }
1573     }
1574 
1575     if (!unfinishedTests.empty())
1576     {
1577         mTestQueue.emplace(std::move(unfinishedTests));
1578     }
1579 
1580     // Clean up any dirty temporary files.
1581     for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1582     {
1583         // Note: we should be aware that this cleanup won't happen if the harness itself
1584         // crashes. If this situation comes up in the future we should add crash cleanup to the
1585         // harness.
1586         if (!angle::DeleteSystemFile(tempFile.c_str()))
1587         {
1588             std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1589         }
1590     }
1591 
1592     processInfo->process.reset();
1593     return true;
1594 }
1595 
run()1596 int TestSuite::run()
1597 {
1598 #if defined(ANGLE_PLATFORM_ANDROID)
1599     if (mListTests && mGTestListTests)
1600     {
1601         // Workaround for the Android test runner requiring a GTest test list.
1602         printf("PlaceholderTest.\n  Placeholder\n");
1603         return EXIT_SUCCESS;
1604     }
1605 #endif  // defined(ANGLE_PLATFORM_ANDROID)
1606 
1607     if (mListTests)
1608     {
1609         ListTests(mTestResults.results);
1610 
1611 #if defined(ANGLE_PLATFORM_ANDROID)
1612         // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1613         // tricks to get the test list output. We add placeholder output for a single test to trick
1614         // the test runner into thinking it ran the tests successfully. We also add an end marker
1615         // for the tests list so we can parse the list from the more spammy Android stdout log.
1616         static constexpr char kPlaceholderTestTest[] = R"(
1617 [==========] Running 1 test from 1 test suite.
1618 [----------] Global test environment set-up.
1619 [----------] 1 test from PlaceholderTest
1620 [ RUN      ] PlaceholderTest.Placeholder
1621 [       OK ] PlaceholderTest.Placeholder (0 ms)
1622 [----------] 1 test from APITest (0 ms total)
1623 
1624 [----------] Global test environment tear-down
1625 [==========] 1 test from 1 test suite ran. (24 ms total)
1626 [  PASSED  ] 1 test.
1627 )";
1628         printf(kPlaceholderTestTest);
1629 #endif  // defined(ANGLE_PLATFORM_ANDROID)
1630 
1631         return EXIT_SUCCESS;
1632     }
1633 
1634     if (mGTestListTests)
1635     {
1636         GTestListTests(mTestResults.results);
1637         return EXIT_SUCCESS;
1638     }
1639 
1640     // Run tests serially.
1641     if (!mBotMode)
1642     {
1643         // Only start the watchdog if the debugger is not attached and we're a child process.
1644         if (!angle::IsDebuggerAttached() && mBatchId != -1)
1645         {
1646             startWatchdog();
1647         }
1648 
1649         int retVal = RUN_ALL_TESTS();
1650         {
1651             std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1652             mTestResults.allDone = true;
1653         }
1654 
1655         if (mWatchdogThread.joinable())
1656         {
1657             mWatchdogThread.join();
1658         }
1659         return retVal;
1660     }
1661 
1662     Timer totalRunTime;
1663     totalRunTime.start();
1664 
1665     Timer messageTimer;
1666     messageTimer.start();
1667 
1668     uint32_t batchId = 0;
1669 
1670     while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1671     {
1672         bool progress = false;
1673 
1674         // Spawn a process if needed and possible.
1675         if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1676         {
1677             std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1678             mTestQueue.pop();
1679 
1680             if (!launchChildTestProcess(++batchId, testsInBatch))
1681             {
1682                 return 1;
1683             }
1684 
1685             progress = true;
1686         }
1687 
1688         // Check for process completion.
1689         uint32_t totalTestCount = 0;
1690         for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1691         {
1692             ProcessInfo &processInfo = *processIter;
1693             if (processInfo.process->finished())
1694             {
1695                 if (!finishProcess(&processInfo))
1696                 {
1697                     return 1;
1698                 }
1699                 processIter = mCurrentProcesses.erase(processIter);
1700                 progress    = true;
1701             }
1702             else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1703             {
1704                 // Terminate the process and record timeouts for the batch.
1705                 // Because we can't determine which sub-test caused a timeout, record the whole
1706                 // batch as a timeout failure. Can be improved by using socket message passing.
1707                 if (!processInfo.process->kill())
1708                 {
1709                     return 1;
1710                 }
1711 
1712                 const std::string &batchStdout = processInfo.process->getStdout();
1713                 std::vector<std::string> lines =
1714                     SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1715                                 SplitResult::SPLIT_WANT_NONEMPTY);
1716                 constexpr size_t kKeepLines = 10;
1717                 printf("\nBatch timeout! Last %zu lines of batch stdout:\n", kKeepLines);
1718                 printf("---------------------------------------------\n");
1719                 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1720                      lineNo < lines.size(); ++lineNo)
1721                 {
1722                     printf("%s\n", lines[lineNo].c_str());
1723                 }
1724                 printf("---------------------------------------------\n\n");
1725 
1726                 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1727                 {
1728                     // Because the whole batch failed we can't know how long each test took.
1729                     mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1730                     mFailureCount++;
1731                 }
1732 
1733                 processIter = mCurrentProcesses.erase(processIter);
1734                 progress    = true;
1735             }
1736             else
1737             {
1738                 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1739                 processIter++;
1740             }
1741         }
1742 
1743         if (progress)
1744         {
1745             messageTimer.start();
1746         }
1747         else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1748         {
1749             const ProcessInfo &processInfo = mCurrentProcesses[0];
1750             double processTime             = processInfo.process->getElapsedTimeSeconds();
1751             printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1752                    static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1753             messageTimer.start();
1754         }
1755 
1756         // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1757         if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1758         {
1759             printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1760             TestQueue emptyTestQueue;
1761             std::swap(mTestQueue, emptyTestQueue);
1762         }
1763 
1764         // Sleep briefly and continue.
1765         angle::Sleep(100);
1766     }
1767 
1768     // Dump combined results.
1769     if (mFailureCount > mMaxFailures)
1770     {
1771         printf(
1772             "Omitted results files because the failure count (%d) exceeded the maximum number of "
1773             "failures (%d).\n",
1774             mFailureCount, mMaxFailures);
1775     }
1776     else
1777     {
1778         writeOutputFiles(false);
1779     }
1780 
1781     totalRunTime.stop();
1782     printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1783 
1784     return printFailuresAndReturnCount() == 0 ? 0 : 1;
1785 }
1786 
printFailuresAndReturnCount() const1787 int TestSuite::printFailuresAndReturnCount() const
1788 {
1789     std::vector<std::string> failures;
1790     uint32_t skipCount = 0;
1791 
1792     for (const auto &resultIter : mTestResults.results)
1793     {
1794         const TestIdentifier &id = resultIter.first;
1795         const TestResult &result = resultIter.second;
1796 
1797         if (result.type == TestResultType::Skip)
1798         {
1799             skipCount++;
1800         }
1801         else if (result.type != TestResultType::Pass)
1802         {
1803             const FileLine &fileLine = mTestFileLines.find(id)->second;
1804 
1805             std::stringstream failureMessage;
1806             failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1807                            << ResultTypeToString(result.type) << ")";
1808             failures.emplace_back(failureMessage.str());
1809         }
1810     }
1811 
1812     if (failures.empty())
1813         return 0;
1814 
1815     printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1816     for (const std::string &failure : failures)
1817     {
1818         printf("    %s\n", failure.c_str());
1819     }
1820     if (skipCount > 0)
1821     {
1822         printf("%u tests skipped.\n", skipCount);
1823     }
1824 
1825     return static_cast<int>(failures.size());
1826 }
1827 
startWatchdog()1828 void TestSuite::startWatchdog()
1829 {
1830     auto watchdogMain = [this]() {
1831         do
1832         {
1833             {
1834                 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1835                 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1836                     mTestResults.currentTestTimeout)
1837                 {
1838                     break;
1839                 }
1840 
1841                 if (mTestResults.allDone)
1842                     return;
1843             }
1844 
1845             angle::Sleep(500);
1846         } while (true);
1847         onCrashOrTimeout(TestResultType::Timeout);
1848         ::_Exit(EXIT_FAILURE);
1849     };
1850     mWatchdogThread = std::thread(watchdogMain);
1851 }
1852 
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1853 void TestSuite::addHistogramSample(const std::string &measurement,
1854                                    const std::string &story,
1855                                    double value,
1856                                    const std::string &units)
1857 {
1858     mHistogramWriter.addSample(measurement, story, value, units);
1859 }
1860 
hasTestArtifactsDirectory() const1861 bool TestSuite::hasTestArtifactsDirectory() const
1862 {
1863     return !mTestArtifactDirectory.empty();
1864 }
1865 
reserveTestArtifactPath(const std::string & artifactName)1866 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1867 {
1868     mTestResults.testArtifactPaths.push_back(artifactName);
1869 
1870     if (mTestArtifactDirectory.empty())
1871     {
1872         return artifactName;
1873     }
1874 
1875     std::stringstream pathStream;
1876     pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1877     return pathStream.str();
1878 }
1879 
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1880 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1881 {
1882     std::ifstream ifs(fileName);
1883     if (!ifs.is_open())
1884     {
1885         std::cerr << "Error opening " << fileName << "\n";
1886         return false;
1887     }
1888 
1889     js::IStreamWrapper ifsWrapper(ifs);
1890     js::Document document;
1891     document.ParseStream(ifsWrapper);
1892 
1893     if (document.HasParseError())
1894     {
1895         std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1896         return false;
1897     }
1898 
1899     if (!GetTestResultsFromJSON(document, resultsOut))
1900     {
1901         std::cerr << "Error getting test results from JSON.\n";
1902         return false;
1903     }
1904 
1905     return true;
1906 }
1907 
dumpTestExpectationsErrorMessages()1908 void TestSuite::dumpTestExpectationsErrorMessages()
1909 {
1910     std::stringstream errorMsgStream;
1911     for (const auto &message : mTestExpectationsParser.getErrorMessages())
1912     {
1913         errorMsgStream << std::endl << " " << message;
1914     }
1915 
1916     std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1917 }
1918 
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)1919 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1920                                                        const std::string &fileName)
1921 {
1922     if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1923     {
1924         dumpTestExpectationsErrorMessages();
1925         return false;
1926     }
1927     return true;
1928 }
1929 
loadAllTestExpectationsFromFile(const std::string & fileName)1930 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
1931 {
1932     if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
1933     {
1934         dumpTestExpectationsErrorMessages();
1935         return false;
1936     }
1937     return true;
1938 }
1939 
logAnyUnusedTestExpectations()1940 bool TestSuite::logAnyUnusedTestExpectations()
1941 {
1942     std::stringstream unusedMsgStream;
1943     bool anyUnused = false;
1944     for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
1945     {
1946         anyUnused = true;
1947         unusedMsgStream << std::endl << " " << message;
1948     }
1949     if (anyUnused)
1950     {
1951         std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl;
1952         return true;
1953     }
1954     return false;
1955 }
1956 
getTestExpectation(const std::string & testName)1957 int32_t TestSuite::getTestExpectation(const std::string &testName)
1958 {
1959     return mTestExpectationsParser.getTestExpectation(testName);
1960 }
1961 
maybeUpdateTestTimeout(uint32_t testExpectation)1962 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
1963 {
1964     double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
1965                              ? getSlowTestTimeout()
1966                              : mTestTimeout;
1967     std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1968     mTestResults.currentTestTimeout = testTimeout;
1969 }
1970 
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)1971 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
1972                                                                 const std::string &testName)
1973 {
1974     uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
1975     maybeUpdateTestTimeout(expectation);
1976     return expectation;
1977 }
1978 
getSlowTestTimeout() const1979 int TestSuite::getSlowTestTimeout() const
1980 {
1981     return mTestTimeout * kSlowTestTimeoutScale;
1982 }
1983 
writeOutputFiles(bool interrupted)1984 void TestSuite::writeOutputFiles(bool interrupted)
1985 {
1986     if (!mResultsFile.empty())
1987     {
1988         WriteResultsFile(interrupted, mTestResults, mResultsFile);
1989     }
1990 
1991     if (!mHistogramJsonFile.empty())
1992     {
1993         WriteHistogramJson(mHistogramWriter, mHistogramJsonFile);
1994     }
1995 
1996     mMetricWriter.close();
1997 }
1998 
TestResultTypeToString(TestResultType type)1999 const char *TestResultTypeToString(TestResultType type)
2000 {
2001     switch (type)
2002     {
2003         case TestResultType::Crash:
2004             return "Crash";
2005         case TestResultType::Fail:
2006             return "Fail";
2007         case TestResultType::NoResult:
2008             return "NoResult";
2009         case TestResultType::Pass:
2010             return "Pass";
2011         case TestResultType::Skip:
2012             return "Skip";
2013         case TestResultType::Timeout:
2014             return "Timeout";
2015         case TestResultType::Unknown:
2016         default:
2017             return "Unknown";
2018     }
2019 }
2020 
2021 // This code supports using "-" in test names, which happens often in dEQP. GTest uses as a marker
2022 // for the beginning of the exclusion filter. Work around this by replacing "-" with "?" which
2023 // matches any single character.
ReplaceDashesWithQuestionMark(std::string dashesString)2024 std::string ReplaceDashesWithQuestionMark(std::string dashesString)
2025 {
2026     std::string noDashesString = dashesString;
2027     ReplaceAllSubstrings(&noDashesString, "-", "?");
2028     return noDashesString;
2029 }
2030 }  // namespace angle
2031