1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 // Basic implementation of a test harness in ANGLE.
8
9 #include "TestSuite.h"
10
11 #include "common/debug.h"
12 #include "common/platform.h"
13 #include "common/string_utils.h"
14 #include "common/system_utils.h"
15 #include "util/Timer.h"
16
17 #include <stdlib.h>
18 #include <time.h>
19
20 #include <fstream>
21 #include <unordered_map>
22
23 #include <gtest/gtest.h>
24 #include <rapidjson/document.h>
25 #include <rapidjson/filewritestream.h>
26 #include <rapidjson/istreamwrapper.h>
27 #include <rapidjson/prettywriter.h>
28
29 // We directly call into a function to register the parameterized tests. This saves spinning up
30 // a subprocess with a new gtest filter.
31 #include <gtest/../../src/gtest-internal-inl.h>
32
33 namespace js = rapidjson;
34
35 namespace angle
36 {
37 namespace
38 {
39 constexpr char kBatchId[] = "--batch-id=";
40 constexpr char kFilterFileArg[] = "--filter-file=";
41 constexpr char kFlakyRetries[] = "--flaky-retries=";
42 constexpr char kGTestListTests[] = "--gtest_list_tests";
43 constexpr char kHistogramJsonFileArg[] = "--histogram-json-file=";
44 constexpr char kListTests[] = "--list-tests";
45 constexpr char kPrintTestStdout[] = "--print-test-stdout";
46 constexpr char kResultFileArg[] = "--results-file=";
47 constexpr char kTestTimeoutArg[] = "--test-timeout=";
48 constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
49 constexpr char kIsolatedOutDir[] = "--isolated-outdir=";
50 constexpr char kMaxFailures[] = "--max-failures=";
51 constexpr char kRenderTestOutputDir[] = "--render-test-output-dir=";
52
53 constexpr char kStartedTestString[] = "[ RUN ] ";
54 constexpr char kPassedTestString[] = "[ OK ] ";
55 constexpr char kFailedTestString[] = "[ FAILED ] ";
56 constexpr char kSkippedTestString[] = "[ SKIPPED ] ";
57
58 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
59
60 constexpr char kTSanOptionsEnvVar[] = "TSAN_OPTIONS";
61 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
62
63 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
64 // Ideally we could use a separate timeout for the slow first test.
65 // Allow sanitized tests to run more slowly.
66 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
67 constexpr int kDefaultTestTimeout = 60;
68 constexpr int kDefaultBatchTimeout = 300;
69 #else
70 constexpr int kDefaultTestTimeout = 120;
71 constexpr int kDefaultBatchTimeout = 600;
72 #endif
73 constexpr int kSlowTestTimeoutScale = 3;
74 constexpr int kDefaultBatchSize = 256;
75 constexpr double kIdleMessageTimeout = 15.0;
76 constexpr int kDefaultMaxProcesses = 16;
77 constexpr int kDefaultMaxFailures = 100;
78
ParseFlagValue(const char * flag,const char * argument)79 const char *ParseFlagValue(const char *flag, const char *argument)
80 {
81 if (strstr(argument, flag) == argument)
82 {
83 return argument + strlen(flag);
84 }
85
86 return nullptr;
87 }
88
ParseIntArg(const char * flag,const char * argument,int * valueOut)89 bool ParseIntArg(const char *flag, const char *argument, int *valueOut)
90 {
91 const char *value = ParseFlagValue(flag, argument);
92 if (!value)
93 {
94 return false;
95 }
96
97 char *end = nullptr;
98 const long longValue = strtol(value, &end, 10);
99
100 if (*end != '\0')
101 {
102 printf("Error parsing integer flag value.\n");
103 exit(EXIT_FAILURE);
104 }
105
106 if (longValue == LONG_MAX || longValue == LONG_MIN || static_cast<int>(longValue) != longValue)
107 {
108 printf("Overflow when parsing integer flag value.\n");
109 exit(EXIT_FAILURE);
110 }
111
112 *valueOut = static_cast<int>(longValue);
113 return true;
114 }
115
ParseIntArgNoDelete(const char * flag,const char * argument,int * valueOut)116 bool ParseIntArgNoDelete(const char *flag, const char *argument, int *valueOut)
117 {
118 ParseIntArg(flag, argument, valueOut);
119 return false;
120 }
121
ParseFlag(const char * expected,const char * actual,bool * flagOut)122 bool ParseFlag(const char *expected, const char *actual, bool *flagOut)
123 {
124 if (strcmp(expected, actual) == 0)
125 {
126 *flagOut = true;
127 return true;
128 }
129 return false;
130 }
131
ParseStringArg(const char * flag,const char * argument,std::string * valueOut)132 bool ParseStringArg(const char *flag, const char *argument, std::string *valueOut)
133 {
134 const char *value = ParseFlagValue(flag, argument);
135 if (!value)
136 {
137 return false;
138 }
139
140 *valueOut = value;
141 return true;
142 }
143
DeleteArg(int * argc,char ** argv,int argIndex)144 void DeleteArg(int *argc, char **argv, int argIndex)
145 {
146 // Shift the remainder of the argv list left by one. Note that argv has (*argc + 1) elements,
147 // the last one always being NULL. The following loop moves the trailing NULL element as well.
148 for (int index = argIndex; index < *argc; ++index)
149 {
150 argv[index] = argv[index + 1];
151 }
152 (*argc)--;
153 }
154
AddArg(int * argc,char ** argv,const char * arg)155 void AddArg(int *argc, char **argv, const char *arg)
156 {
157 // This unsafe const_cast is necessary to work around gtest limitations.
158 argv[*argc] = const_cast<char *>(arg);
159 argv[*argc + 1] = nullptr;
160 (*argc)++;
161 }
162
ResultTypeToString(TestResultType type)163 const char *ResultTypeToString(TestResultType type)
164 {
165 switch (type)
166 {
167 case TestResultType::Crash:
168 return "CRASH";
169 case TestResultType::Fail:
170 return "FAIL";
171 case TestResultType::NoResult:
172 return "NOTRUN";
173 case TestResultType::Pass:
174 return "PASS";
175 case TestResultType::Skip:
176 return "SKIP";
177 case TestResultType::Timeout:
178 return "TIMEOUT";
179 case TestResultType::Unknown:
180 default:
181 return "UNKNOWN";
182 }
183 }
184
GetResultTypeFromString(const std::string & str)185 TestResultType GetResultTypeFromString(const std::string &str)
186 {
187 if (str == "CRASH")
188 return TestResultType::Crash;
189 if (str == "FAIL")
190 return TestResultType::Fail;
191 if (str == "PASS")
192 return TestResultType::Pass;
193 if (str == "NOTRUN")
194 return TestResultType::NoResult;
195 if (str == "SKIP")
196 return TestResultType::Skip;
197 if (str == "TIMEOUT")
198 return TestResultType::Timeout;
199 return TestResultType::Unknown;
200 }
201
IsFailedResult(TestResultType resultType)202 bool IsFailedResult(TestResultType resultType)
203 {
204 return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
205 }
206
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)207 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
208 {
209 js::Value jsName;
210 jsName.SetString(ResultTypeToString(type), *allocator);
211 return jsName;
212 }
213
WriteJsonFile(const std::string & outputFile,js::Document * doc)214 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
215 {
216 FILE *fp = fopen(outputFile.c_str(), "w");
217 if (!fp)
218 {
219 return false;
220 }
221
222 constexpr size_t kBufferSize = 0xFFFF;
223 std::vector<char> writeBuffer(kBufferSize);
224 js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
225 js::PrettyWriter<js::FileWriteStream> writer(os);
226 if (!doc->Accept(writer))
227 {
228 fclose(fp);
229 return false;
230 }
231 fclose(fp);
232 return true;
233 }
234
235 // Writes out a TestResults to the Chromium JSON Test Results format.
236 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile,const char * testSuiteName)237 void WriteResultsFile(bool interrupted,
238 const TestResults &testResults,
239 const std::string &outputFile,
240 const char *testSuiteName)
241 {
242 time_t ltime;
243 time(<ime);
244 struct tm *timeinfo = gmtime(<ime);
245 ltime = mktime(timeinfo);
246
247 uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
248
249 js::Document doc;
250 doc.SetObject();
251
252 js::Document::AllocatorType &allocator = doc.GetAllocator();
253
254 doc.AddMember("interrupted", interrupted, allocator);
255 doc.AddMember("path_delimiter", ".", allocator);
256 doc.AddMember("version", 3, allocator);
257 doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
258
259 js::Value tests;
260 tests.SetObject();
261
262 // If we have any test artifacts, make a fake test to house them.
263 if (!testResults.testArtifactPaths.empty())
264 {
265 js::Value artifactsTest;
266 artifactsTest.SetObject();
267
268 artifactsTest.AddMember("actual", "PASS", allocator);
269 artifactsTest.AddMember("expected", "PASS", allocator);
270
271 js::Value artifacts;
272 artifacts.SetObject();
273
274 for (const std::string &testArtifactPath : testResults.testArtifactPaths)
275 {
276 std::vector<std::string> pieces =
277 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
278 SplitResult::SPLIT_WANT_NONEMPTY);
279 ASSERT(!pieces.empty());
280
281 js::Value basename;
282 basename.SetString(pieces.back(), allocator);
283
284 js::Value artifactPath;
285 artifactPath.SetString(testArtifactPath, allocator);
286
287 js::Value artifactArray;
288 artifactArray.SetArray();
289 artifactArray.PushBack(artifactPath, allocator);
290
291 artifacts.AddMember(basename, artifactArray, allocator);
292 }
293
294 artifactsTest.AddMember("artifacts", artifacts, allocator);
295
296 js::Value fakeTestName;
297 fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
298 tests.AddMember(fakeTestName, artifactsTest, allocator);
299 }
300
301 std::map<TestResultType, uint32_t> counts;
302
303 for (const auto &resultIter : testResults.results)
304 {
305 const TestIdentifier &id = resultIter.first;
306 const TestResult &result = resultIter.second;
307
308 js::Value jsResult;
309 jsResult.SetObject();
310
311 counts[result.type]++;
312
313 std::string actualResult;
314 for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
315 {
316 actualResult += "FAIL ";
317 }
318
319 actualResult += ResultTypeToString(result.type);
320
321 std::string expectedResult = "PASS";
322 if (result.type == TestResultType::Skip)
323 {
324 expectedResult = "SKIP";
325 }
326
327 // Handle flaky passing tests.
328 if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
329 {
330 expectedResult = "FAIL PASS";
331 jsResult.AddMember("is_flaky", true, allocator);
332 }
333
334 jsResult.AddMember("actual", actualResult, allocator);
335 jsResult.AddMember("expected", expectedResult, allocator);
336
337 if (IsFailedResult(result.type))
338 {
339 jsResult.AddMember("is_unexpected", true, allocator);
340 }
341
342 js::Value times;
343 times.SetArray();
344 for (double elapsedTimeSeconds : result.elapsedTimeSeconds)
345 {
346 times.PushBack(elapsedTimeSeconds, allocator);
347 }
348
349 jsResult.AddMember("times", times, allocator);
350
351 char testName[500];
352 id.sprintfName(testName);
353 js::Value jsName;
354 jsName.SetString(testName, allocator);
355
356 tests.AddMember(jsName, jsResult, allocator);
357 }
358
359 js::Value numFailuresByType;
360 numFailuresByType.SetObject();
361
362 for (const auto &countIter : counts)
363 {
364 TestResultType type = countIter.first;
365 uint32_t count = countIter.second;
366
367 js::Value jsCount(count);
368 numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
369 }
370
371 doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
372
373 doc.AddMember("tests", tests, allocator);
374
375 printf("Writing test results to %s\n", outputFile.c_str());
376
377 if (!WriteJsonFile(outputFile, &doc))
378 {
379 printf("Error writing test results file.\n");
380 }
381 }
382
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile,const char * testSuiteName)383 void WriteHistogramJson(const HistogramWriter &histogramWriter,
384 const std::string &outputFile,
385 const char *testSuiteName)
386 {
387 js::Document doc;
388 doc.SetArray();
389
390 histogramWriter.getAsJSON(&doc);
391
392 printf("Writing histogram json to %s\n", outputFile.c_str());
393
394 if (!WriteJsonFile(outputFile, &doc))
395 {
396 printf("Error writing histogram json file.\n");
397 }
398 }
399
WriteOutputFiles(bool interrupted,const TestResults & testResults,const std::string & resultsFile,const HistogramWriter & histogramWriter,const std::string & histogramJsonOutputFile,const char * testSuiteName)400 void WriteOutputFiles(bool interrupted,
401 const TestResults &testResults,
402 const std::string &resultsFile,
403 const HistogramWriter &histogramWriter,
404 const std::string &histogramJsonOutputFile,
405 const char *testSuiteName)
406 {
407 if (!resultsFile.empty())
408 {
409 WriteResultsFile(interrupted, testResults, resultsFile, testSuiteName);
410 }
411
412 if (!histogramJsonOutputFile.empty())
413 {
414 WriteHistogramJson(histogramWriter, histogramJsonOutputFile, testSuiteName);
415 }
416 }
417
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)418 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
419 {
420 TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
421
422 // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
423 if (resultIn.Skipped())
424 {
425 resultOut.type = TestResultType::Skip;
426 }
427 else if (resultIn.Failed())
428 {
429 resultOut.type = TestResultType::Fail;
430 }
431 else
432 {
433 resultOut.type = TestResultType::Pass;
434 }
435
436 resultOut.elapsedTimeSeconds.back() = resultsOut->currentTestTimer.getElapsedWallClockTime();
437 }
438
GetTestIdentifier(const testing::TestInfo & testInfo)439 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
440 {
441 return {testInfo.test_suite_name(), testInfo.name()};
442 }
443
444 class TestEventListener : public testing::EmptyTestEventListener
445 {
446 public:
447 // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(const std::string & resultsFile,const std::string & histogramJsonFile,const char * testSuiteName,TestResults * testResults,HistogramWriter * histogramWriter)448 TestEventListener(const std::string &resultsFile,
449 const std::string &histogramJsonFile,
450 const char *testSuiteName,
451 TestResults *testResults,
452 HistogramWriter *histogramWriter)
453 : mResultsFile(resultsFile),
454 mHistogramJsonFile(histogramJsonFile),
455 mTestSuiteName(testSuiteName),
456 mTestResults(testResults),
457 mHistogramWriter(histogramWriter)
458 {}
459
OnTestStart(const testing::TestInfo & testInfo)460 void OnTestStart(const testing::TestInfo &testInfo) override
461 {
462 std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
463 mTestResults->currentTest = GetTestIdentifier(testInfo);
464 mTestResults->currentTestTimer.start();
465 }
466
OnTestEnd(const testing::TestInfo & testInfo)467 void OnTestEnd(const testing::TestInfo &testInfo) override
468 {
469 std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
470 mTestResults->currentTestTimer.stop();
471 const testing::TestResult &resultIn = *testInfo.result();
472 UpdateCurrentTestResult(resultIn, mTestResults);
473 mTestResults->currentTest = TestIdentifier();
474 }
475
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)476 void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
477 {
478 std::lock_guard<std::mutex> guard(mTestResults->currentTestMutex);
479 mTestResults->allDone = true;
480 WriteOutputFiles(false, *mTestResults, mResultsFile, *mHistogramWriter, mHistogramJsonFile,
481 mTestSuiteName);
482 }
483
484 private:
485 std::string mResultsFile;
486 std::string mHistogramJsonFile;
487 const char *mTestSuiteName;
488 TestResults *mTestResults;
489 HistogramWriter *mHistogramWriter;
490 };
491
IsTestDisabled(const testing::TestInfo & testInfo)492 bool IsTestDisabled(const testing::TestInfo &testInfo)
493 {
494 return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
495 }
496
497 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
498
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)499 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
500 TestIdentifierFilter filter,
501 bool alsoRunDisabledTests)
502 {
503 std::vector<TestIdentifier> tests;
504
505 const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
506 for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
507 {
508 const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
509 for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
510 {
511 const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
512 TestIdentifier id = GetTestIdentifier(testInfo);
513 if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
514 {
515 tests.emplace_back(id);
516
517 if (fileLinesOut)
518 {
519 (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
520 }
521 }
522 }
523 }
524
525 return tests;
526 }
527
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)528 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
529 bool alsoRunDisabledTests)
530 {
531 TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
532 return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
533 };
534
535 return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
536 }
537
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)538 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
539 int shardIndex,
540 int shardCount,
541 std::map<TestIdentifier, FileLine> *fileLinesOut,
542 bool alsoRunDisabledTests)
543 {
544 std::vector<TestIdentifier> shardTests;
545
546 for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
547 testIndex += shardCount)
548 {
549 shardTests.emplace_back(allTests[testIndex]);
550 }
551
552 return shardTests;
553 }
554
GetTestFilter(const std::vector<TestIdentifier> & tests)555 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
556 {
557 std::stringstream filterStream;
558
559 filterStream << "--gtest_filter=";
560
561 for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
562 {
563 if (testIndex != 0)
564 {
565 filterStream << ":";
566 }
567
568 filterStream << tests[testIndex];
569 }
570
571 return filterStream.str();
572 }
573
ParseTestSuiteName(const char * executable)574 std::string ParseTestSuiteName(const char *executable)
575 {
576 const char *baseNameStart = strrchr(executable, GetPathSeparator());
577 if (!baseNameStart)
578 {
579 baseNameStart = executable;
580 }
581 else
582 {
583 baseNameStart++;
584 }
585
586 const char *suffix = GetExecutableExtension();
587 size_t suffixLen = strlen(suffix);
588 if (suffixLen == 0)
589 {
590 return baseNameStart;
591 }
592
593 if (!EndsWith(baseNameStart, suffix))
594 {
595 return baseNameStart;
596 }
597
598 return std::string(baseNameStart, baseNameStart + strlen(baseNameStart) - suffixLen);
599 }
600
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)601 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
602 std::vector<std::string> *testArtifactPathsOut)
603 {
604 if (!obj.HasMember("artifacts"))
605 {
606 printf("No artifacts member.\n");
607 return false;
608 }
609
610 const js::Value &jsArtifacts = obj["artifacts"];
611 if (!jsArtifacts.IsObject())
612 {
613 printf("Artifacts are not an object.\n");
614 return false;
615 }
616
617 const js::Value::ConstObject &artifacts = jsArtifacts.GetObject();
618 for (const auto &artifactMember : artifacts)
619 {
620 const js::Value &artifact = artifactMember.value;
621 if (!artifact.IsArray())
622 {
623 printf("Artifact is not an array of strings of size 1.\n");
624 return false;
625 }
626
627 const js::Value::ConstArray &artifactArray = artifact.GetArray();
628 if (artifactArray.Size() != 1)
629 {
630 printf("Artifact is not an array of strings of size 1.\n");
631 return false;
632 }
633
634 const js::Value &artifactName = artifactArray[0];
635 if (!artifactName.IsString())
636 {
637 printf("Artifact is not an array of strings of size 1.\n");
638 return false;
639 }
640
641 testArtifactPathsOut->push_back(artifactName.GetString());
642 }
643
644 return true;
645 }
646
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)647 bool GetSingleTestResultFromJSON(const js::Value &name,
648 const js::Value::ConstObject &obj,
649 TestResults *resultsOut)
650 {
651
652 TestIdentifier id;
653 if (!TestIdentifier::ParseFromString(name.GetString(), &id))
654 {
655 printf("Could not parse test identifier.\n");
656 return false;
657 }
658
659 if (!obj.HasMember("expected") || !obj.HasMember("actual"))
660 {
661 printf("No expected or actual member.\n");
662 return false;
663 }
664
665 const js::Value &expected = obj["expected"];
666 const js::Value &actual = obj["actual"];
667
668 if (!expected.IsString() || !actual.IsString())
669 {
670 printf("Expected or actual member is not a string.\n");
671 return false;
672 }
673
674 const std::string actualStr = actual.GetString();
675
676 TestResultType resultType = TestResultType::Unknown;
677 int flakyFailures = 0;
678 if (actualStr.find(' '))
679 {
680 std::istringstream strstr(actualStr);
681 std::string token;
682 while (std::getline(strstr, token, ' '))
683 {
684 resultType = GetResultTypeFromString(token);
685 if (resultType == TestResultType::Unknown)
686 {
687 printf("Failed to parse result type.\n");
688 return false;
689 }
690 if (IsFailedResult(resultType))
691 {
692 flakyFailures++;
693 }
694 }
695 }
696 else
697 {
698 resultType = GetResultTypeFromString(actualStr);
699 if (resultType == TestResultType::Unknown)
700 {
701 printf("Failed to parse result type.\n");
702 return false;
703 }
704 }
705
706 std::vector<double> elapsedTimeSeconds;
707 if (obj.HasMember("times"))
708 {
709 const js::Value × = obj["times"];
710 if (!times.IsArray())
711 {
712 return false;
713 }
714
715 const js::Value::ConstArray ×Array = times.GetArray();
716 if (timesArray.Size() < 1)
717 {
718 return false;
719 }
720 for (const js::Value &time : timesArray)
721 {
722 if (!time.IsDouble())
723 {
724 return false;
725 }
726
727 elapsedTimeSeconds.push_back(time.GetDouble());
728 }
729 }
730
731 TestResult &result = resultsOut->results[id];
732 result.elapsedTimeSeconds = elapsedTimeSeconds;
733 result.type = resultType;
734 result.flakyFailures = flakyFailures;
735 return true;
736 }
737
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)738 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
739 {
740 if (!document.HasMember("tests") || !document["tests"].IsObject())
741 {
742 printf("JSON document has no tests member.\n");
743 return false;
744 }
745
746 const js::Value::ConstObject &tests = document["tests"].GetObject();
747 for (const auto &testMember : tests)
748 {
749 // Get test identifier.
750 const js::Value &name = testMember.name;
751 if (!name.IsString())
752 {
753 printf("Name is not a string.\n");
754 return false;
755 }
756
757 // Get test result.
758 const js::Value &value = testMember.value;
759 if (!value.IsObject())
760 {
761 printf("Test result is not an object.\n");
762 return false;
763 }
764
765 const js::Value::ConstObject &obj = value.GetObject();
766
767 if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
768 {
769 if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
770 {
771 return false;
772 }
773 }
774 else
775 {
776 if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
777 {
778 return false;
779 }
780 }
781 }
782
783 return true;
784 }
785
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)786 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
787 {
788 for (auto &resultsIter : input->results)
789 {
790 const TestIdentifier &id = resultsIter.first;
791 TestResult &inputResult = resultsIter.second;
792 TestResult &outputResult = output->results[id];
793
794 if (inputResult.type != TestResultType::NoResult)
795 {
796 if (outputResult.type != TestResultType::NoResult)
797 {
798 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
799 id.testName.c_str());
800 return false;
801 }
802
803 // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
804 // attempt the test again.
805 uint32_t runCount = outputResult.flakyFailures + 1;
806 if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
807 {
808 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
809 id.testName.c_str());
810 inputResult.type = TestResultType::NoResult;
811 outputResult.flakyFailures++;
812 }
813 else
814 {
815 outputResult.type = inputResult.type;
816 }
817 if (runCount == 1)
818 {
819 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
820 }
821 else
822 {
823 outputResult.elapsedTimeSeconds.insert(outputResult.elapsedTimeSeconds.end(),
824 inputResult.elapsedTimeSeconds.begin(),
825 inputResult.elapsedTimeSeconds.end());
826 }
827 }
828 }
829
830 output->testArtifactPaths.insert(output->testArtifactPaths.end(),
831 input->testArtifactPaths.begin(),
832 input->testArtifactPaths.end());
833
834 return true;
835 }
836
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)837 void PrintTestOutputSnippet(const TestIdentifier &id,
838 const TestResult &result,
839 const std::string &fullOutput)
840 {
841 std::stringstream nameStream;
842 nameStream << id;
843 std::string fullName = nameStream.str();
844
845 size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
846 if (runPos == std::string::npos)
847 {
848 printf("Cannot locate test output snippet.\n");
849 return;
850 }
851
852 size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
853 // Only clip the snippet to the "OK" message if the test really
854 // succeeded. It still might have e.g. crashed after printing it.
855 if (endPos == std::string::npos && result.type == TestResultType::Pass)
856 {
857 endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
858 }
859 if (endPos != std::string::npos)
860 {
861 size_t newline_pos = fullOutput.find("\n", endPos);
862 if (newline_pos != std::string::npos)
863 endPos = newline_pos + 1;
864 }
865
866 std::cout << "\n";
867 if (endPos != std::string::npos)
868 {
869 std::cout << fullOutput.substr(runPos, endPos - runPos);
870 }
871 else
872 {
873 std::cout << fullOutput.substr(runPos);
874 }
875 }
876
GetConfigNameFromTestIdentifier(const TestIdentifier & id)877 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
878 {
879 size_t slashPos = id.testName.find('/');
880 if (slashPos == std::string::npos)
881 {
882 return "default";
883 }
884
885 size_t doubleUnderscorePos = id.testName.find("__");
886 if (doubleUnderscorePos == std::string::npos)
887 {
888 std::string configName = id.testName.substr(slashPos + 1);
889
890 if (!BeginsWith(configName, "ES"))
891 {
892 return "default";
893 }
894
895 return configName;
896 }
897 else
898 {
899 return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
900 }
901 }
902
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)903 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
904 {
905 // First sort tests by configuration.
906 angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
907 for (const TestIdentifier &id : tests)
908 {
909 std::string config = GetConfigNameFromTestIdentifier(id);
910 testsSortedByConfig[config].push_back(id);
911 }
912
913 // Then group into batches by 'batchSize'.
914 TestQueue testQueue;
915 for (const auto &configAndIds : testsSortedByConfig)
916 {
917 const std::vector<TestIdentifier> &configTests = configAndIds.second;
918
919 // Count the number of batches needed for this config.
920 int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
921
922 // Create batches with striping to split up slow tests.
923 for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
924 {
925 std::vector<TestIdentifier> batchTests;
926 for (size_t testIndex = batchIndex; testIndex < configTests.size();
927 testIndex += batchesForConfig)
928 {
929 batchTests.push_back(configTests[testIndex]);
930 }
931 testQueue.emplace(std::move(batchTests));
932 ASSERT(batchTests.empty());
933 }
934 }
935
936 return testQueue;
937 }
938
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)939 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
940 {
941 std::cout << "Tests list:\n";
942
943 for (const auto &resultIt : resultsMap)
944 {
945 const TestIdentifier &id = resultIt.first;
946 std::cout << id << "\n";
947 }
948
949 std::cout << "End tests list.\n";
950 }
951
952 // Prints the names of the tests matching the user-specified filter flag.
953 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
954 // See http://anglebug.com/5164
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)955 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
956 {
957 std::map<std::string, std::vector<std::string>> suites;
958
959 for (const auto &resultIt : resultsMap)
960 {
961 const TestIdentifier &id = resultIt.first;
962 suites[id.testSuiteName].push_back(id.testName);
963 }
964
965 for (const auto &testSuiteIt : suites)
966 {
967 bool printedTestSuiteName = false;
968
969 const std::string &suiteName = testSuiteIt.first;
970 const std::vector<std::string> &testNames = testSuiteIt.second;
971
972 for (const std::string &testName : testNames)
973 {
974 if (!printedTestSuiteName)
975 {
976 printedTestSuiteName = true;
977 printf("%s.\n", suiteName.c_str());
978 }
979 printf(" %s\n", testName.c_str());
980 }
981 }
982 }
983
984 // On Android, batching is done on the host, i.e. externally.
985 // TestSuite executes on the device and should just passthrough all args to GTest.
UsesExternalBatching()986 bool UsesExternalBatching()
987 {
988 #if defined(ANGLE_PLATFORM_ANDROID)
989 return true;
990 #else
991 return false;
992 #endif
993 }
994 } // namespace
995
996 // static
997 TestSuite *TestSuite::mInstance = nullptr;
998
999 TestIdentifier::TestIdentifier() = default;
1000
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)1001 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
1002 : testSuiteName(suiteNameIn), testName(nameIn)
1003 {}
1004
1005 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
1006
1007 TestIdentifier::~TestIdentifier() = default;
1008
1009 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
1010
sprintfName(char * outBuffer) const1011 void TestIdentifier::sprintfName(char *outBuffer) const
1012 {
1013 sprintf(outBuffer, "%s.%s", testSuiteName.c_str(), testName.c_str());
1014 }
1015
1016 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)1017 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
1018 {
1019 size_t separator = str.find(".");
1020 if (separator == std::string::npos)
1021 {
1022 return false;
1023 }
1024
1025 idOut->testSuiteName = str.substr(0, separator);
1026 idOut->testName = str.substr(separator + 1, str.length() - separator - 1);
1027 return true;
1028 }
1029
1030 TestResults::TestResults() = default;
1031
1032 TestResults::~TestResults() = default;
1033
1034 ProcessInfo::ProcessInfo() = default;
1035
operator =(ProcessInfo && rhs)1036 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
1037 {
1038 process = std::move(rhs.process);
1039 testsInBatch = std::move(rhs.testsInBatch);
1040 resultsFileName = std::move(rhs.resultsFileName);
1041 filterFileName = std::move(rhs.filterFileName);
1042 commandLine = std::move(rhs.commandLine);
1043 filterString = std::move(rhs.filterString);
1044 return *this;
1045 }
1046
1047 ProcessInfo::~ProcessInfo() = default;
1048
ProcessInfo(ProcessInfo && other)1049 ProcessInfo::ProcessInfo(ProcessInfo &&other)
1050 {
1051 *this = std::move(other);
1052 }
1053
TestSuite(int * argc,char ** argv)1054 TestSuite::TestSuite(int *argc, char **argv)
1055 : mShardCount(-1),
1056 mShardIndex(-1),
1057 mBotMode(false),
1058 mDebugTestGroups(false),
1059 mGTestListTests(false),
1060 mListTests(false),
1061 mPrintTestStdout(false),
1062 mDisableCrashHandler(false),
1063 mBatchSize(kDefaultBatchSize),
1064 mCurrentResultCount(0),
1065 mTotalResultCount(0),
1066 mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
1067 mTestTimeout(kDefaultTestTimeout),
1068 mBatchTimeout(kDefaultBatchTimeout),
1069 mBatchId(-1),
1070 mFlakyRetries(0),
1071 mMaxFailures(kDefaultMaxFailures),
1072 mFailureCount(0),
1073 mModifiedPreferredDevice(false)
1074 {
1075 ASSERT(mInstance == nullptr);
1076 mInstance = this;
1077
1078 Optional<int> filterArgIndex;
1079 bool alsoRunDisabledTests = false;
1080
1081 #if defined(ANGLE_PLATFORM_MACOS)
1082 // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
1083 // file access.
1084 angle::InitMetalFileAPIHooking(*argc, argv);
1085 #endif
1086
1087 #if defined(ANGLE_PLATFORM_WINDOWS)
1088 testing::GTEST_FLAG(catch_exceptions) = false;
1089 #endif
1090
1091 if (*argc <= 0)
1092 {
1093 printf("Missing test arguments.\n");
1094 exit(EXIT_FAILURE);
1095 }
1096
1097 mTestExecutableName = argv[0];
1098 mTestSuiteName = ParseTestSuiteName(mTestExecutableName.c_str());
1099
1100 for (int argIndex = 1; argIndex < *argc;)
1101 {
1102 if (parseSingleArg(argv[argIndex]))
1103 {
1104 DeleteArg(argc, argv, argIndex);
1105 continue;
1106 }
1107
1108 if (ParseFlagValue("--gtest_filter=", argv[argIndex]))
1109 {
1110 filterArgIndex = argIndex;
1111 }
1112 else
1113 {
1114 // Don't include disabled tests in test lists unless the user asks for them.
1115 if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1116 {
1117 alsoRunDisabledTests = true;
1118 }
1119
1120 mChildProcessArgs.push_back(argv[argIndex]);
1121 }
1122 ++argIndex;
1123 }
1124
1125 if (UsesExternalBatching() && mBotMode)
1126 {
1127 printf("Bot mode is mutually exclusive with external batching.\n");
1128 exit(EXIT_FAILURE);
1129 }
1130
1131 mTestResults.currentTestTimeout = mTestTimeout;
1132
1133 #if defined(ANGLE_PLATFORM_ANDROID)
1134 // Workaround for the Android test runner requiring a GTest test list.
1135 if (mListTests && filterArgIndex.valid())
1136 {
1137 DeleteArg(argc, argv, filterArgIndex.value());
1138 }
1139 #endif // defined(ANGLE_PLATFORM_ANDROID)
1140
1141 if (!mDisableCrashHandler)
1142 {
1143 // Note that the crash callback must be owned and not use global constructors.
1144 mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1145 InitCrashHandler(&mCrashCallback);
1146 }
1147
1148 std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1149 if (!envShardIndex.empty())
1150 {
1151 angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1152 if (mShardIndex == -1)
1153 {
1154 std::stringstream shardIndexStream(envShardIndex);
1155 shardIndexStream >> mShardIndex;
1156 }
1157 }
1158
1159 std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1160 if (!envTotalShards.empty())
1161 {
1162 angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1163 if (mShardCount == -1)
1164 {
1165 std::stringstream shardCountStream(envTotalShards);
1166 shardCountStream >> mShardCount;
1167 }
1168 }
1169
1170 // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1171 // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1172 // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1173 // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1174 const GPUTestConfig testConfig = GPUTestConfig();
1175 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1176 if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1177 {
1178 mModifiedPreferredDevice = true;
1179 const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1180 if (conditions[GPUTestConfig::kConditionAMD])
1181 {
1182 SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1183 }
1184 else if (conditions[GPUTestConfig::kConditionNVIDIA])
1185 {
1186 SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1187 }
1188 else if (conditions[GPUTestConfig::kConditionIntel])
1189 {
1190 SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1191 }
1192 else if (conditions[GPUTestConfig::kConditionApple])
1193 {
1194 SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1195 }
1196 }
1197
1198 // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1199 if (IsTSan())
1200 {
1201 std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1202 tsanOptions += " halt_on_error=1";
1203 SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1204 }
1205
1206 if (IsUBSan())
1207 {
1208 std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1209 ubsanOptions += " halt_on_error=1";
1210 SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1211 }
1212
1213 if ((mShardIndex == -1) != (mShardCount == -1))
1214 {
1215 printf("Shard index and shard count must be specified together.\n");
1216 exit(EXIT_FAILURE);
1217 }
1218
1219 if (!mFilterFile.empty())
1220 {
1221 if (filterArgIndex.valid())
1222 {
1223 printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1224 exit(EXIT_FAILURE);
1225 }
1226
1227 uint32_t fileSize = 0;
1228 if (!GetFileSize(mFilterFile.c_str(), &fileSize))
1229 {
1230 printf("Error getting filter file size: %s\n", mFilterFile.c_str());
1231 exit(EXIT_FAILURE);
1232 }
1233
1234 std::vector<char> fileContents(fileSize + 1, 0);
1235 if (!ReadEntireFileToString(mFilterFile.c_str(), fileContents.data(), fileSize))
1236 {
1237 printf("Error loading filter file: %s\n", mFilterFile.c_str());
1238 exit(EXIT_FAILURE);
1239 }
1240 mFilterString.assign(fileContents.data());
1241
1242 if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1243 {
1244 printf("Filter file must start with \"--gtest_filter=\".\n");
1245 exit(EXIT_FAILURE);
1246 }
1247
1248 // Note that we only add a filter string if we previously deleted a shader filter file
1249 // argument. So we will have space for the new filter string in argv.
1250 AddArg(argc, argv, mFilterString.c_str());
1251 }
1252
1253 // Call into gtest internals to force parameterized test name registration.
1254 testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1255 impl->RegisterParameterizedTests();
1256
1257 // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1258 testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1259
1260 std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1261
1262 if (mShardCount == 0)
1263 {
1264 printf("Shard count must be > 0.\n");
1265 exit(EXIT_FAILURE);
1266 }
1267 else if (mShardCount > 0)
1268 {
1269 if (mShardIndex >= mShardCount)
1270 {
1271 printf("Shard index must be less than shard count.\n");
1272 exit(EXIT_FAILURE);
1273 }
1274
1275 // If there's only one shard, we can use the testSet as defined above.
1276 if (mShardCount > 1)
1277 {
1278 if (!mBotMode && !UsesExternalBatching())
1279 {
1280 printf("Sharding is only supported in bot mode or external batching.\n");
1281 exit(EXIT_FAILURE);
1282 }
1283 // With external batching, we must use exactly the testSet as defined externally.
1284 // But when listing tests, we do need to apply sharding ourselves,
1285 // since we use our own implementation for listing tests and not GTest directly.
1286 if (!UsesExternalBatching() || mGTestListTests || mListTests)
1287 {
1288 testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1289 alsoRunDisabledTests);
1290 }
1291 }
1292 }
1293
1294 if (!testSet.empty())
1295 {
1296 std::stringstream fakeTestName;
1297 fakeTestName << kArtifactsFakeTestName << '-' << testSet[0].testName;
1298 mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1299 }
1300
1301 if (mBotMode)
1302 {
1303 // Split up test batches.
1304 mTestQueue = BatchTests(testSet, mBatchSize);
1305
1306 if (mDebugTestGroups)
1307 {
1308 std::cout << "Test Groups:\n";
1309
1310 while (!mTestQueue.empty())
1311 {
1312 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1313 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1314 << static_cast<int>(tests.size()) << ")\n";
1315 mTestQueue.pop();
1316 }
1317
1318 exit(EXIT_SUCCESS);
1319 }
1320 }
1321
1322 testing::InitGoogleTest(argc, argv);
1323
1324 mTotalResultCount = testSet.size();
1325
1326 if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1327 {
1328 // Create a default output file in bot mode.
1329 mResultsFile = "output.json";
1330 }
1331
1332 if (!mResultsDirectory.empty())
1333 {
1334 std::stringstream resultFileName;
1335 resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1336 mResultsFile = resultFileName.str();
1337 }
1338
1339 if (!mBotMode)
1340 {
1341 testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1342 listeners.Append(new TestEventListener(mResultsFile, mHistogramJsonFile,
1343 mTestSuiteName.c_str(), &mTestResults,
1344 &mHistogramWriter));
1345
1346 for (const TestIdentifier &id : testSet)
1347 {
1348 mTestResults.results[id].type = TestResultType::NoResult;
1349 }
1350 }
1351 }
1352
~TestSuite()1353 TestSuite::~TestSuite()
1354 {
1355 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1356 if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1357 {
1358 angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1359 }
1360
1361 if (mWatchdogThread.joinable())
1362 {
1363 mWatchdogThread.detach();
1364 }
1365 TerminateCrashHandler();
1366 }
1367
parseSingleArg(const char * argument)1368 bool TestSuite::parseSingleArg(const char *argument)
1369 {
1370 // Note: Flags should be documented in README.md.
1371 return (ParseIntArg("--shard-count=", argument, &mShardCount) ||
1372 ParseIntArg("--shard-index=", argument, &mShardIndex) ||
1373 ParseIntArg("--batch-size=", argument, &mBatchSize) ||
1374 ParseIntArg("--max-processes=", argument, &mMaxProcesses) ||
1375 ParseIntArg(kTestTimeoutArg, argument, &mTestTimeout) ||
1376 ParseIntArg("--batch-timeout=", argument, &mBatchTimeout) ||
1377 ParseIntArg(kFlakyRetries, argument, &mFlakyRetries) ||
1378 ParseIntArg(kMaxFailures, argument, &mMaxFailures) ||
1379 // Other test functions consume the batch ID, so keep it in the list.
1380 ParseIntArgNoDelete(kBatchId, argument, &mBatchId) ||
1381 ParseStringArg("--results-directory=", argument, &mResultsDirectory) ||
1382 ParseStringArg(kResultFileArg, argument, &mResultsFile) ||
1383 ParseStringArg("--isolated-script-test-output=", argument, &mResultsFile) ||
1384 ParseStringArg(kFilterFileArg, argument, &mFilterFile) ||
1385 ParseStringArg(kHistogramJsonFileArg, argument, &mHistogramJsonFile) ||
1386 // We need these overloads to work around technical debt in the Android test runner.
1387 ParseStringArg("--isolated-script-test-perf-output=", argument, &mHistogramJsonFile) ||
1388 ParseStringArg("--isolated_script_test_perf_output=", argument, &mHistogramJsonFile) ||
1389 ParseStringArg(kRenderTestOutputDir, argument, &mTestArtifactDirectory) ||
1390 ParseStringArg(kIsolatedOutDir, argument, &mTestArtifactDirectory) ||
1391 ParseFlag("--bot-mode", argument, &mBotMode) ||
1392 ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) ||
1393 ParseFlag(kGTestListTests, argument, &mGTestListTests) ||
1394 ParseFlag(kListTests, argument, &mListTests) ||
1395 ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) ||
1396 ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler));
1397 }
1398
onCrashOrTimeout(TestResultType crashOrTimeout)1399 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1400 {
1401 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1402 if (mTestResults.currentTest.valid())
1403 {
1404 TestResult &result = mTestResults.results[mTestResults.currentTest];
1405 result.type = crashOrTimeout;
1406 result.elapsedTimeSeconds.back() = mTestResults.currentTestTimer.getElapsedWallClockTime();
1407 }
1408
1409 if (mResultsFile.empty())
1410 {
1411 printf("No results file specified.\n");
1412 return;
1413 }
1414
1415 WriteOutputFiles(true, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1416 mTestSuiteName.c_str());
1417 }
1418
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1419 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1420 const std::vector<TestIdentifier> &testsInBatch)
1421 {
1422 constexpr uint32_t kMaxPath = 1000;
1423
1424 // Create a temporary file to store the test list
1425 ProcessInfo processInfo;
1426
1427 char filterBuffer[kMaxPath] = {};
1428 if (!CreateTemporaryFile(filterBuffer, kMaxPath))
1429 {
1430 std::cerr << "Error creating temporary file for test list.\n";
1431 return false;
1432 }
1433 processInfo.filterFileName.assign(filterBuffer);
1434
1435 std::string filterString = GetTestFilter(testsInBatch);
1436
1437 FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1438 if (!fp)
1439 {
1440 std::cerr << "Error opening temporary file for test list.\n";
1441 return false;
1442 }
1443 fprintf(fp, "%s", filterString.c_str());
1444 fclose(fp);
1445
1446 processInfo.filterString = filterString;
1447
1448 std::string filterFileArg = kFilterFileArg + processInfo.filterFileName;
1449
1450 // Create a temporary file to store the test output.
1451 char resultsBuffer[kMaxPath] = {};
1452 if (!CreateTemporaryFile(resultsBuffer, kMaxPath))
1453 {
1454 std::cerr << "Error creating temporary file for test list.\n";
1455 return false;
1456 }
1457 processInfo.resultsFileName.assign(resultsBuffer);
1458
1459 std::string resultsFileArg = kResultFileArg + processInfo.resultsFileName;
1460
1461 // Construct command line for child process.
1462 std::vector<const char *> args;
1463
1464 args.push_back(mTestExecutableName.c_str());
1465 args.push_back(filterFileArg.c_str());
1466 args.push_back(resultsFileArg.c_str());
1467
1468 std::stringstream batchIdStream;
1469 batchIdStream << kBatchId << batchId;
1470 std::string batchIdString = batchIdStream.str();
1471 args.push_back(batchIdString.c_str());
1472
1473 for (const std::string &arg : mChildProcessArgs)
1474 {
1475 args.push_back(arg.c_str());
1476 }
1477
1478 if (mDisableCrashHandler)
1479 {
1480 args.push_back(kDisableCrashHandler);
1481 }
1482
1483 std::string timeoutStr;
1484 if (mTestTimeout != kDefaultTestTimeout)
1485 {
1486 std::stringstream timeoutStream;
1487 timeoutStream << kTestTimeoutArg << mTestTimeout;
1488 timeoutStr = timeoutStream.str();
1489 args.push_back(timeoutStr.c_str());
1490 }
1491
1492 std::string artifactsDir;
1493 if (!mTestArtifactDirectory.empty())
1494 {
1495 std::stringstream artifactsDirStream;
1496 artifactsDirStream << kIsolatedOutDir << mTestArtifactDirectory;
1497 artifactsDir = artifactsDirStream.str();
1498 args.push_back(artifactsDir.c_str());
1499 }
1500
1501 // Launch child process and wait for completion.
1502 processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1503
1504 if (!processInfo.process->started())
1505 {
1506 std::cerr << "Error launching child process.\n";
1507 return false;
1508 }
1509
1510 std::stringstream commandLineStr;
1511 for (const char *arg : args)
1512 {
1513 commandLineStr << arg << " ";
1514 }
1515
1516 processInfo.commandLine = commandLineStr.str();
1517 processInfo.testsInBatch = testsInBatch;
1518 mCurrentProcesses.emplace_back(std::move(processInfo));
1519 return true;
1520 }
1521
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1522 void ParseTestIdentifierAndSetResult(const std::string &testName,
1523 TestResultType result,
1524 TestResults *results)
1525 {
1526 // Trim off any whitespace + extra stuff at the end of the string.
1527 std::string modifiedTestName = testName.substr(0, testName.find(' '));
1528 modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
1529 TestIdentifier id;
1530 bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1531 ASSERT(ok);
1532 results->results[id] = {result};
1533 }
1534
finishProcess(ProcessInfo * processInfo)1535 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1536 {
1537 // Get test results and merge into main list.
1538 TestResults batchResults;
1539
1540 if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1541 {
1542 std::cerr << "Warning: could not find test results file from child process.\n";
1543
1544 // First assume all tests get skipped.
1545 for (const TestIdentifier &id : processInfo->testsInBatch)
1546 {
1547 batchResults.results[id] = {TestResultType::NoResult};
1548 }
1549
1550 // Attempt to reconstruct passing list from stdout snippets.
1551 const std::string &batchStdout = processInfo->process->getStdout();
1552 std::istringstream linesStream(batchStdout);
1553
1554 std::string line;
1555 while (std::getline(linesStream, line))
1556 {
1557 size_t startPos = line.find(kStartedTestString);
1558 size_t failPos = line.find(kFailedTestString);
1559 size_t passPos = line.find(kPassedTestString);
1560 size_t skippedPos = line.find(kSkippedTestString);
1561
1562 if (startPos != std::string::npos)
1563 {
1564 // Assume a test that's started crashed until we see it completed.
1565 std::string testName = line.substr(strlen(kStartedTestString));
1566 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1567 }
1568 else if (failPos != std::string::npos)
1569 {
1570 std::string testName = line.substr(strlen(kFailedTestString));
1571 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1572 }
1573 else if (passPos != std::string::npos)
1574 {
1575 std::string testName = line.substr(strlen(kPassedTestString));
1576 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1577 }
1578 else if (skippedPos != std::string::npos)
1579 {
1580 std::string testName = line.substr(strlen(kSkippedTestString));
1581 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1582 }
1583 }
1584 }
1585
1586 if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1587 {
1588 std::cerr << "Error merging batch test results.\n";
1589 return false;
1590 }
1591
1592 if (!batchResults.results.empty())
1593 {
1594 const TestIdentifier &id = batchResults.results.begin()->first;
1595 std::string config = GetConfigNameFromTestIdentifier(id);
1596 printf("Completed batch with config: %s\n", config.c_str());
1597
1598 for (const auto &resultIter : batchResults.results)
1599 {
1600 const TestResult &result = resultIter.second;
1601 if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1602 {
1603 printf("To reproduce the batch, use filter:\n%s\n",
1604 processInfo->filterString.c_str());
1605 break;
1606 }
1607 }
1608 }
1609
1610 // Process results and print unexpected errors.
1611 for (const auto &resultIter : batchResults.results)
1612 {
1613 const TestIdentifier &id = resultIter.first;
1614 const TestResult &result = resultIter.second;
1615
1616 // Skip results aren't procesed since they're added back to the test queue below.
1617 if (result.type == TestResultType::NoResult)
1618 {
1619 continue;
1620 }
1621
1622 mCurrentResultCount++;
1623
1624 printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1625 id.testName.c_str());
1626
1627 if (mPrintTestStdout)
1628 {
1629 const std::string &batchStdout = processInfo->process->getStdout();
1630 PrintTestOutputSnippet(id, result, batchStdout);
1631 }
1632 else if (result.type == TestResultType::Pass)
1633 {
1634 printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds.back() * 1000.0);
1635 }
1636 else if (result.type == TestResultType::Skip)
1637 {
1638 printf(" (skipped)\n");
1639 }
1640 else if (result.type == TestResultType::Timeout)
1641 {
1642 printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds.back());
1643 mFailureCount++;
1644 }
1645 else
1646 {
1647 printf(" (%s)\n", ResultTypeToString(result.type));
1648 mFailureCount++;
1649
1650 const std::string &batchStdout = processInfo->process->getStdout();
1651 PrintTestOutputSnippet(id, result, batchStdout);
1652 }
1653 }
1654
1655 // On unexpected exit, re-queue any unfinished tests.
1656 std::vector<TestIdentifier> unfinishedTests;
1657 for (const auto &resultIter : batchResults.results)
1658 {
1659 const TestIdentifier &id = resultIter.first;
1660 const TestResult &result = resultIter.second;
1661
1662 if (result.type == TestResultType::NoResult)
1663 {
1664 unfinishedTests.push_back(id);
1665 }
1666 }
1667
1668 if (!unfinishedTests.empty())
1669 {
1670 mTestQueue.emplace(std::move(unfinishedTests));
1671 }
1672
1673 // Clean up any dirty temporary files.
1674 for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1675 {
1676 // Note: we should be aware that this cleanup won't happen if the harness itself
1677 // crashes. If this situation comes up in the future we should add crash cleanup to the
1678 // harness.
1679 if (!angle::DeleteSystemFile(tempFile.c_str()))
1680 {
1681 std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1682 }
1683 }
1684
1685 processInfo->process.reset();
1686 return true;
1687 }
1688
run()1689 int TestSuite::run()
1690 {
1691 #if defined(ANGLE_PLATFORM_ANDROID)
1692 if (mListTests && mGTestListTests)
1693 {
1694 // Workaround for the Android test runner requiring a GTest test list.
1695 printf("PlaceholderTest.\n Placeholder\n");
1696 return EXIT_SUCCESS;
1697 }
1698 #endif // defined(ANGLE_PLATFORM_ANDROID)
1699
1700 if (mListTests)
1701 {
1702 ListTests(mTestResults.results);
1703
1704 #if defined(ANGLE_PLATFORM_ANDROID)
1705 // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1706 // tricks to get the test list output. We add placeholder output for a single test to trick
1707 // the test runner into thinking it ran the tests successfully. We also add an end marker
1708 // for the tests list so we can parse the list from the more spammy Android stdout log.
1709 static constexpr char kPlaceholderTestTest[] = R"(
1710 [==========] Running 1 test from 1 test suite.
1711 [----------] Global test environment set-up.
1712 [----------] 1 test from PlaceholderTest
1713 [ RUN ] PlaceholderTest.Placeholder
1714 [ OK ] PlaceholderTest.Placeholder (0 ms)
1715 [----------] 1 test from APITest (0 ms total)
1716
1717 [----------] Global test environment tear-down
1718 [==========] 1 test from 1 test suite ran. (24 ms total)
1719 [ PASSED ] 1 test.
1720 )";
1721 printf(kPlaceholderTestTest);
1722 #endif // defined(ANGLE_PLATFORM_ANDROID)
1723
1724 return EXIT_SUCCESS;
1725 }
1726
1727 if (mGTestListTests)
1728 {
1729 GTestListTests(mTestResults.results);
1730 return EXIT_SUCCESS;
1731 }
1732
1733 // Run tests serially.
1734 if (!mBotMode)
1735 {
1736 // Only start the watchdog if the debugger is not attached and we're a child process.
1737 if (!angle::IsDebuggerAttached() && mBatchId != -1)
1738 {
1739 startWatchdog();
1740 }
1741
1742 int retVal = RUN_ALL_TESTS();
1743 {
1744 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1745 mTestResults.allDone = true;
1746 }
1747
1748 if (mWatchdogThread.joinable())
1749 {
1750 mWatchdogThread.join();
1751 }
1752 return retVal;
1753 }
1754
1755 Timer totalRunTime;
1756 totalRunTime.start();
1757
1758 Timer messageTimer;
1759 messageTimer.start();
1760
1761 uint32_t batchId = 0;
1762
1763 while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1764 {
1765 bool progress = false;
1766
1767 // Spawn a process if needed and possible.
1768 if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1769 {
1770 std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1771 mTestQueue.pop();
1772
1773 if (!launchChildTestProcess(++batchId, testsInBatch))
1774 {
1775 return 1;
1776 }
1777
1778 progress = true;
1779 }
1780
1781 // Check for process completion.
1782 uint32_t totalTestCount = 0;
1783 for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1784 {
1785 ProcessInfo &processInfo = *processIter;
1786 if (processInfo.process->finished())
1787 {
1788 if (!finishProcess(&processInfo))
1789 {
1790 return 1;
1791 }
1792 processIter = mCurrentProcesses.erase(processIter);
1793 progress = true;
1794 }
1795 else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1796 {
1797 // Terminate the process and record timeouts for the batch.
1798 // Because we can't determine which sub-test caused a timeout, record the whole
1799 // batch as a timeout failure. Can be improved by using socket message passing.
1800 if (!processInfo.process->kill())
1801 {
1802 return 1;
1803 }
1804
1805 const std::string &batchStdout = processInfo.process->getStdout();
1806 std::vector<std::string> lines =
1807 SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1808 SplitResult::SPLIT_WANT_NONEMPTY);
1809 constexpr size_t kKeepLines = 10;
1810 printf("Batch timeout! Last %d lines of batch stdout:\n",
1811 static_cast<int>(kKeepLines));
1812 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1813 lineNo < lines.size(); ++lineNo)
1814 {
1815 printf("%s\n", lines[lineNo].c_str());
1816 }
1817
1818 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1819 {
1820 // Because the whole batch failed we can't know how long each test took.
1821 mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1822 mFailureCount++;
1823 }
1824
1825 processIter = mCurrentProcesses.erase(processIter);
1826 progress = true;
1827 }
1828 else
1829 {
1830 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1831 processIter++;
1832 }
1833 }
1834
1835 if (progress)
1836 {
1837 messageTimer.start();
1838 }
1839 else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1840 {
1841 const ProcessInfo &processInfo = mCurrentProcesses[0];
1842 double processTime = processInfo.process->getElapsedTimeSeconds();
1843 printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1844 static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1845 messageTimer.start();
1846 }
1847
1848 // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1849 if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1850 {
1851 printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1852 TestQueue emptyTestQueue;
1853 std::swap(mTestQueue, emptyTestQueue);
1854 }
1855
1856 // Sleep briefly and continue.
1857 angle::Sleep(100);
1858 }
1859
1860 // Dump combined results.
1861 if (mFailureCount > mMaxFailures)
1862 {
1863 printf(
1864 "Omitted results files because the failure count (%d) exceeded the maximum number of "
1865 "failures (%d).\n",
1866 mFailureCount, mMaxFailures);
1867 }
1868 else
1869 {
1870 WriteOutputFiles(false, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile,
1871 mTestSuiteName.c_str());
1872 }
1873
1874 totalRunTime.stop();
1875 printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1876
1877 return printFailuresAndReturnCount() == 0 ? 0 : 1;
1878 }
1879
printFailuresAndReturnCount() const1880 int TestSuite::printFailuresAndReturnCount() const
1881 {
1882 std::vector<std::string> failures;
1883 uint32_t skipCount = 0;
1884
1885 for (const auto &resultIter : mTestResults.results)
1886 {
1887 const TestIdentifier &id = resultIter.first;
1888 const TestResult &result = resultIter.second;
1889
1890 if (result.type == TestResultType::Skip)
1891 {
1892 skipCount++;
1893 }
1894 else if (result.type != TestResultType::Pass)
1895 {
1896 const FileLine &fileLine = mTestFileLines.find(id)->second;
1897
1898 std::stringstream failureMessage;
1899 failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1900 << ResultTypeToString(result.type) << ")";
1901 failures.emplace_back(failureMessage.str());
1902 }
1903 }
1904
1905 if (failures.empty())
1906 return 0;
1907
1908 printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1909 for (const std::string &failure : failures)
1910 {
1911 printf(" %s\n", failure.c_str());
1912 }
1913 if (skipCount > 0)
1914 {
1915 printf("%u tests skipped.\n", skipCount);
1916 }
1917
1918 return static_cast<int>(failures.size());
1919 }
1920
startWatchdog()1921 void TestSuite::startWatchdog()
1922 {
1923 auto watchdogMain = [this]() {
1924 do
1925 {
1926 {
1927 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1928 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1929 mTestResults.currentTestTimeout)
1930 {
1931 break;
1932 }
1933
1934 if (mTestResults.allDone)
1935 return;
1936 }
1937
1938 angle::Sleep(500);
1939 } while (true);
1940 onCrashOrTimeout(TestResultType::Timeout);
1941 ::_Exit(EXIT_FAILURE);
1942 };
1943 mWatchdogThread = std::thread(watchdogMain);
1944 }
1945
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1946 void TestSuite::addHistogramSample(const std::string &measurement,
1947 const std::string &story,
1948 double value,
1949 const std::string &units)
1950 {
1951 mHistogramWriter.addSample(measurement, story, value, units);
1952 }
1953
hasTestArtifactsDirectory() const1954 bool TestSuite::hasTestArtifactsDirectory() const
1955 {
1956 return !mTestArtifactDirectory.empty();
1957 }
1958
reserveTestArtifactPath(const std::string & artifactName)1959 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1960 {
1961 mTestResults.testArtifactPaths.push_back(artifactName);
1962
1963 if (mTestArtifactDirectory.empty())
1964 {
1965 return artifactName;
1966 }
1967
1968 std::stringstream pathStream;
1969 pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1970 return pathStream.str();
1971 }
1972
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1973 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1974 {
1975 std::ifstream ifs(fileName);
1976 if (!ifs.is_open())
1977 {
1978 std::cerr << "Error opening " << fileName << "\n";
1979 return false;
1980 }
1981
1982 js::IStreamWrapper ifsWrapper(ifs);
1983 js::Document document;
1984 document.ParseStream(ifsWrapper);
1985
1986 if (document.HasParseError())
1987 {
1988 std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1989 return false;
1990 }
1991
1992 if (!GetTestResultsFromJSON(document, resultsOut))
1993 {
1994 std::cerr << "Error getting test results from JSON.\n";
1995 return false;
1996 }
1997
1998 return true;
1999 }
2000
dumpTestExpectationsErrorMessages()2001 void TestSuite::dumpTestExpectationsErrorMessages()
2002 {
2003 std::stringstream errorMsgStream;
2004 for (const auto &message : mTestExpectationsParser.getErrorMessages())
2005 {
2006 errorMsgStream << std::endl << " " << message;
2007 }
2008
2009 std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
2010 }
2011
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)2012 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
2013 const std::string &fileName)
2014 {
2015 if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
2016 {
2017 dumpTestExpectationsErrorMessages();
2018 return false;
2019 }
2020 return true;
2021 }
2022
loadAllTestExpectationsFromFile(const std::string & fileName)2023 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
2024 {
2025 if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
2026 {
2027 dumpTestExpectationsErrorMessages();
2028 return false;
2029 }
2030 return true;
2031 }
2032
logAnyUnusedTestExpectations()2033 bool TestSuite::logAnyUnusedTestExpectations()
2034 {
2035 std::stringstream unusedMsgStream;
2036 bool anyUnused = false;
2037 for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
2038 {
2039 anyUnused = true;
2040 unusedMsgStream << std::endl << " " << message;
2041 }
2042 if (anyUnused)
2043 {
2044 std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl;
2045 return true;
2046 }
2047 return false;
2048 }
2049
getTestExpectation(const std::string & testName)2050 int32_t TestSuite::getTestExpectation(const std::string &testName)
2051 {
2052 return mTestExpectationsParser.getTestExpectation(testName);
2053 }
2054
maybeUpdateTestTimeout(uint32_t testExpectation)2055 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
2056 {
2057 double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
2058 ? getSlowTestTimeout()
2059 : mTestTimeout;
2060 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
2061 mTestResults.currentTestTimeout = testTimeout;
2062 }
2063
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)2064 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
2065 const std::string &testName)
2066 {
2067 uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
2068 maybeUpdateTestTimeout(expectation);
2069 return expectation;
2070 }
2071
getSlowTestTimeout() const2072 int TestSuite::getSlowTestTimeout() const
2073 {
2074 return mTestTimeout * kSlowTestTimeoutScale;
2075 }
2076
TestResultTypeToString(TestResultType type)2077 const char *TestResultTypeToString(TestResultType type)
2078 {
2079 switch (type)
2080 {
2081 case TestResultType::Crash:
2082 return "Crash";
2083 case TestResultType::Fail:
2084 return "Fail";
2085 case TestResultType::NoResult:
2086 return "NoResult";
2087 case TestResultType::Pass:
2088 return "Pass";
2089 case TestResultType::Skip:
2090 return "Skip";
2091 case TestResultType::Timeout:
2092 return "Timeout";
2093 case TestResultType::Unknown:
2094 default:
2095 return "Unknown";
2096 }
2097 }
2098 } // namespace angle
2099