// // Copyright 2019 The ANGLE Project Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. // // TestSuite: // Basic implementation of a test harness in ANGLE. #include "TestSuite.h" #include "common/debug.h" #include "common/platform.h" #include "common/string_utils.h" #include "common/system_utils.h" #include "util/Timer.h" #include #include #include #include #include #include #include #include #include // We directly call into a function to register the parameterized tests. This saves spinning up // a subprocess with a new gtest filter. #include namespace js = rapidjson; namespace angle { namespace { constexpr char kBatchId[] = "--batch-id="; constexpr char kFilterFileArg[] = "--filter-file="; constexpr char kFlakyRetries[] = "--flaky-retries="; constexpr char kGTestListTests[] = "--gtest_list_tests"; constexpr char kHistogramJsonFileArg[] = "--histogram-json-file="; constexpr char kListTests[] = "--list-tests"; constexpr char kPrintTestStdout[] = "--print-test-stdout"; constexpr char kResultFileArg[] = "--results-file="; constexpr char kTestTimeoutArg[] = "--test-timeout="; constexpr char kDisableCrashHandler[] = "--disable-crash-handler"; constexpr char kIsolatedOutDir[] = "--isolated-outdir="; constexpr char kMaxFailures[] = "--max-failures="; constexpr char kStartedTestString[] = "[ RUN ] "; constexpr char kPassedTestString[] = "[ OK ] "; constexpr char kFailedTestString[] = "[ FAILED ] "; constexpr char kSkippedTestString[] = "[ SKIPPED ] "; constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest"; #if defined(NDEBUG) constexpr int kDefaultTestTimeout = 20; #else constexpr int kDefaultTestTimeout = 60; #endif #if defined(NDEBUG) constexpr int kDefaultBatchTimeout = 240; #else constexpr int kDefaultBatchTimeout = 600; #endif constexpr int kDefaultBatchSize = 256; constexpr double kIdleMessageTimeout = 15.0; constexpr int kDefaultMaxProcesses = 16; constexpr int kDefaultMaxFailures = 100; const char *ParseFlagValue(const char *flag, const char *argument) { if (strstr(argument, flag) == argument) { return argument + strlen(flag); } return nullptr; } bool ParseIntArg(const char *flag, const char *argument, int *valueOut) { const char *value = ParseFlagValue(flag, argument); if (!value) { return false; } char *end = nullptr; const long longValue = strtol(value, &end, 10); if (*end != '\0') { printf("Error parsing integer flag value.\n"); exit(EXIT_FAILURE); } if (longValue == LONG_MAX || longValue == LONG_MIN || static_cast(longValue) != longValue) { printf("Overflow when parsing integer flag value.\n"); exit(EXIT_FAILURE); } *valueOut = static_cast(longValue); return true; } bool ParseIntArgNoDelete(const char *flag, const char *argument, int *valueOut) { ParseIntArg(flag, argument, valueOut); return false; } bool ParseFlag(const char *expected, const char *actual, bool *flagOut) { if (strcmp(expected, actual) == 0) { *flagOut = true; return true; } return false; } bool ParseStringArg(const char *flag, const char *argument, std::string *valueOut) { const char *value = ParseFlagValue(flag, argument); if (!value) { return false; } *valueOut = value; return true; } void DeleteArg(int *argc, char **argv, int argIndex) { // Shift the remainder of the argv list left by one. Note that argv has (*argc + 1) elements, // the last one always being NULL. The following loop moves the trailing NULL element as well. for (int index = argIndex; index < *argc; ++index) { argv[index] = argv[index + 1]; } (*argc)--; } void AddArg(int *argc, char **argv, const char *arg) { // This unsafe const_cast is necessary to work around gtest limitations. argv[*argc] = const_cast(arg); argv[*argc + 1] = nullptr; (*argc)++; } const char *ResultTypeToString(TestResultType type) { switch (type) { case TestResultType::Crash: return "CRASH"; case TestResultType::Fail: return "FAIL"; case TestResultType::NoResult: return "NOTRUN"; case TestResultType::Pass: return "PASS"; case TestResultType::Skip: return "SKIP"; case TestResultType::Timeout: return "TIMEOUT"; case TestResultType::Unknown: return "UNKNOWN"; } } TestResultType GetResultTypeFromString(const std::string &str) { if (str == "CRASH") return TestResultType::Crash; if (str == "FAIL") return TestResultType::Fail; if (str == "PASS") return TestResultType::Pass; if (str == "NOTRUN") return TestResultType::NoResult; if (str == "SKIP") return TestResultType::Skip; if (str == "TIMEOUT") return TestResultType::Timeout; return TestResultType::Unknown; } bool IsFailedResult(TestResultType resultType) { return resultType != TestResultType::Pass && resultType != TestResultType::Skip; } js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator) { js::Value jsName; jsName.SetString(ResultTypeToString(type), *allocator); return jsName; } bool WriteJsonFile(const std::string &outputFile, js::Document *doc) { FILE *fp = fopen(outputFile.c_str(), "w"); if (!fp) { return false; } constexpr size_t kBufferSize = 0xFFFF; std::vector writeBuffer(kBufferSize); js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize); js::PrettyWriter writer(os); if (!doc->Accept(writer)) { fclose(fp); return false; } fclose(fp); return true; } // Writes out a TestResults to the Chromium JSON Test Results format. // https://chromium.googlesource.com/chromium/src.git/+/master/docs/testing/json_test_results_format.md void WriteResultsFile(bool interrupted, const TestResults &testResults, const std::string &outputFile, const char *testSuiteName) { time_t ltime; time(<ime); struct tm *timeinfo = gmtime(<ime); ltime = mktime(timeinfo); uint64_t secondsSinceEpoch = static_cast(ltime); js::Document doc; doc.SetObject(); js::Document::AllocatorType &allocator = doc.GetAllocator(); doc.AddMember("interrupted", interrupted, allocator); doc.AddMember("path_delimiter", ".", allocator); doc.AddMember("version", 3, allocator); doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator); js::Value tests; tests.SetObject(); // If we have any test artifacts, make a fake test to house them. if (!testResults.testArtifactPaths.empty()) { js::Value artifactsTest; artifactsTest.SetObject(); artifactsTest.AddMember("actual", "PASS", allocator); artifactsTest.AddMember("expected", "PASS", allocator); js::Value artifacts; artifacts.SetObject(); for (const std::string &testArtifactPath : testResults.testArtifactPaths) { std::vector pieces = SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE, SplitResult::SPLIT_WANT_NONEMPTY); ASSERT(!pieces.empty()); js::Value basename; basename.SetString(pieces.back(), allocator); js::Value artifactPath; artifactPath.SetString(testArtifactPath, allocator); js::Value artifactArray; artifactArray.SetArray(); artifactArray.PushBack(artifactPath, allocator); artifacts.AddMember(basename, artifactArray, allocator); } artifactsTest.AddMember("artifacts", artifacts, allocator); js::Value fakeTestName; fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator); tests.AddMember(fakeTestName, artifactsTest, allocator); } std::map counts; for (const auto &resultIter : testResults.results) { const TestIdentifier &id = resultIter.first; const TestResult &result = resultIter.second; js::Value jsResult; jsResult.SetObject(); counts[result.type]++; std::string actualResult; for (uint32_t fail = 0; fail < result.flakyFailures; ++fail) { actualResult += "FAIL "; } actualResult += ResultTypeToString(result.type); std::string expectedResult = "PASS"; if (result.type == TestResultType::Skip) { expectedResult = "SKIP"; } // Handle flaky passing tests. if (result.flakyFailures > 0 && result.type == TestResultType::Pass) { expectedResult = "FAIL PASS"; jsResult.AddMember("is_flaky", true, allocator); } jsResult.AddMember("actual", actualResult, allocator); jsResult.AddMember("expected", expectedResult, allocator); if (IsFailedResult(result.type)) { jsResult.AddMember("is_unexpected", true, allocator); } js::Value times; times.SetArray(); times.PushBack(result.elapsedTimeSeconds, allocator); jsResult.AddMember("times", times, allocator); char testName[500]; id.sprintfName(testName); js::Value jsName; jsName.SetString(testName, allocator); tests.AddMember(jsName, jsResult, allocator); } js::Value numFailuresByType; numFailuresByType.SetObject(); for (const auto &countIter : counts) { TestResultType type = countIter.first; uint32_t count = countIter.second; js::Value jsCount(count); numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator); } doc.AddMember("num_failures_by_type", numFailuresByType, allocator); doc.AddMember("tests", tests, allocator); printf("Writing test results to %s\n", outputFile.c_str()); if (!WriteJsonFile(outputFile, &doc)) { printf("Error writing test results file.\n"); } } void WriteHistogramJson(const HistogramWriter &histogramWriter, const std::string &outputFile, const char *testSuiteName) { js::Document doc; doc.SetArray(); histogramWriter.getAsJSON(&doc); printf("Writing histogram json to %s\n", outputFile.c_str()); if (!WriteJsonFile(outputFile, &doc)) { printf("Error writing histogram json file.\n"); } } void WriteOutputFiles(bool interrupted, const TestResults &testResults, const std::string &resultsFile, const HistogramWriter &histogramWriter, const std::string &histogramJsonOutputFile, const char *testSuiteName) { if (!resultsFile.empty()) { WriteResultsFile(interrupted, testResults, resultsFile, testSuiteName); } if (!histogramJsonOutputFile.empty()) { WriteHistogramJson(histogramWriter, histogramJsonOutputFile, testSuiteName); } } void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut) { TestResult &resultOut = resultsOut->results[resultsOut->currentTest]; // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread. if (resultIn.Skipped()) { resultOut.type = TestResultType::Skip; } else if (resultIn.Failed()) { resultOut.type = TestResultType::Fail; } else { resultOut.type = TestResultType::Pass; } resultOut.elapsedTimeSeconds = resultsOut->currentTestTimer.getElapsedTime(); } TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo) { return {testInfo.test_suite_name(), testInfo.name()}; } bool IsSlowTest(const std::vector &slowTests, const TestIdentifier &testID) { char buffer[200] = {}; testID.sprintfName(buffer); for (const std::string &slowTest : slowTests) { if (NamesMatchWithWildcard(slowTest.c_str(), buffer)) { return true; } } return false; } class TestEventListener : public testing::EmptyTestEventListener { public: // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener. TestEventListener(const std::string &resultsFile, const std::string &histogramJsonFile, const std::vector &slowTests, double fastTestTimeout, double slowTestTimeout, const char *testSuiteName, TestResults *testResults, HistogramWriter *histogramWriter) : mResultsFile(resultsFile), mHistogramJsonFile(histogramJsonFile), mSlowTests(slowTests), mFastTestTimeout(fastTestTimeout), mSlowTestTimeout(slowTestTimeout), mTestSuiteName(testSuiteName), mTestResults(testResults), mHistogramWriter(histogramWriter) {} void OnTestStart(const testing::TestInfo &testInfo) override { std::lock_guard guard(mTestResults->currentTestMutex); mTestResults->currentTest = GetTestIdentifier(testInfo); mTestResults->currentTestTimer.start(); mTestResults->currentTestTimeout = IsSlowTest(mSlowTests, mTestResults->currentTest) ? mSlowTestTimeout : mFastTestTimeout; } void OnTestEnd(const testing::TestInfo &testInfo) override { std::lock_guard guard(mTestResults->currentTestMutex); mTestResults->currentTestTimer.stop(); const testing::TestResult &resultIn = *testInfo.result(); UpdateCurrentTestResult(resultIn, mTestResults); mTestResults->currentTest = TestIdentifier(); } void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override { std::lock_guard guard(mTestResults->currentTestMutex); mTestResults->allDone = true; WriteOutputFiles(false, *mTestResults, mResultsFile, *mHistogramWriter, mHistogramJsonFile, mTestSuiteName); } private: std::string mResultsFile; std::string mHistogramJsonFile; const std::vector &mSlowTests; double mFastTestTimeout; double mSlowTestTimeout; const char *mTestSuiteName; TestResults *mTestResults; HistogramWriter *mHistogramWriter; }; bool IsTestDisabled(const testing::TestInfo &testInfo) { return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name(); } using TestIdentifierFilter = std::function; std::vector FilterTests(std::map *fileLinesOut, TestIdentifierFilter filter, bool alsoRunDisabledTests) { std::vector tests; const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance(); for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex) { const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex); for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex) { const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex); TestIdentifier id = GetTestIdentifier(testInfo); if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests)) { tests.emplace_back(id); if (fileLinesOut) { (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()}; } } } } return tests; } std::vector GetFilteredTests(std::map *fileLinesOut, bool alsoRunDisabledTests) { TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) { return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName); }; return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests); } std::vector GetShardTests(const std::vector &allTests, int shardIndex, int shardCount, std::map *fileLinesOut, bool alsoRunDisabledTests) { std::vector shardTests; for (int testIndex = shardIndex; testIndex < static_cast(allTests.size()); testIndex += shardCount) { shardTests.emplace_back(allTests[testIndex]); } return shardTests; } std::string GetTestFilter(const std::vector &tests) { std::stringstream filterStream; filterStream << "--gtest_filter="; for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex) { if (testIndex != 0) { filterStream << ":"; } filterStream << tests[testIndex]; } return filterStream.str(); } std::string ParseTestSuiteName(const char *executable) { const char *baseNameStart = strrchr(executable, GetPathSeparator()); if (!baseNameStart) { baseNameStart = executable; } else { baseNameStart++; } const char *suffix = GetExecutableExtension(); size_t suffixLen = strlen(suffix); if (suffixLen == 0) { return baseNameStart; } if (!EndsWith(baseNameStart, suffix)) { return baseNameStart; } return std::string(baseNameStart, baseNameStart + strlen(baseNameStart) - suffixLen); } bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj, std::vector *testArtifactPathsOut) { if (!obj.HasMember("artifacts")) { printf("No artifacts member.\n"); return false; } const js::Value &jsArtifacts = obj["artifacts"]; if (!jsArtifacts.IsObject()) { printf("Artifacts are not an object.\n"); return false; } const js::Value::ConstObject &artifacts = jsArtifacts.GetObject(); for (const auto &artifactMember : artifacts) { const js::Value &artifact = artifactMember.value; if (!artifact.IsArray()) { printf("Artifact is not an array of strings of size 1.\n"); return false; } const js::Value::ConstArray &artifactArray = artifact.GetArray(); if (artifactArray.Size() != 1) { printf("Artifact is not an array of strings of size 1.\n"); return false; } const js::Value &artifactName = artifactArray[0]; if (!artifactName.IsString()) { printf("Artifact is not an array of strings of size 1.\n"); return false; } testArtifactPathsOut->push_back(artifactName.GetString()); } return true; } bool GetSingleTestResultFromJSON(const js::Value &name, const js::Value::ConstObject &obj, TestResults *resultsOut) { TestIdentifier id; if (!TestIdentifier::ParseFromString(name.GetString(), &id)) { printf("Could not parse test identifier.\n"); return false; } if (!obj.HasMember("expected") || !obj.HasMember("actual")) { printf("No expected or actual member.\n"); return false; } const js::Value &expected = obj["expected"]; const js::Value &actual = obj["actual"]; if (!expected.IsString() || !actual.IsString()) { printf("Expected or actual member is not a string.\n"); return false; } const std::string actualStr = actual.GetString(); TestResultType resultType = TestResultType::Unknown; int flakyFailures = 0; if (actualStr.find(' ')) { std::istringstream strstr(actualStr); std::string token; while (std::getline(strstr, token, ' ')) { resultType = GetResultTypeFromString(token); if (resultType == TestResultType::Unknown) { printf("Failed to parse result type.\n"); return false; } if (IsFailedResult(resultType)) { flakyFailures++; } } } else { resultType = GetResultTypeFromString(actualStr); if (resultType == TestResultType::Unknown) { printf("Failed to parse result type.\n"); return false; } } double elapsedTimeSeconds = 0.0; if (obj.HasMember("times")) { const js::Value × = obj["times"]; if (!times.IsArray()) { return false; } const js::Value::ConstArray ×Array = times.GetArray(); if (timesArray.Size() != 1 || !timesArray[0].IsDouble()) { return false; } elapsedTimeSeconds = timesArray[0].GetDouble(); } TestResult &result = resultsOut->results[id]; result.elapsedTimeSeconds = elapsedTimeSeconds; result.type = resultType; result.flakyFailures = flakyFailures; return true; } bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut) { if (!document.HasMember("tests") || !document["tests"].IsObject()) { printf("JSON document has no tests member.\n"); return false; } const js::Value::ConstObject &tests = document["tests"].GetObject(); for (const auto &testMember : tests) { // Get test identifier. const js::Value &name = testMember.name; if (!name.IsString()) { printf("Name is not a string.\n"); return false; } // Get test result. const js::Value &value = testMember.value; if (!value.IsObject()) { printf("Test result is not an object.\n"); return false; } const js::Value::ConstObject &obj = value.GetObject(); if (BeginsWith(name.GetString(), kArtifactsFakeTestName)) { if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths)) { return false; } } else { if (!GetSingleTestResultFromJSON(name, obj, resultsOut)) { return false; } } } return true; } bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries) { for (auto &resultsIter : input->results) { const TestIdentifier &id = resultsIter.first; TestResult &inputResult = resultsIter.second; TestResult &outputResult = output->results[id]; if (inputResult.type != TestResultType::NoResult) { if (outputResult.type != TestResultType::NoResult) { printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(), id.testName.c_str()); return false; } // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE // attempt the test again. uint32_t runCount = outputResult.flakyFailures + 1; if (IsFailedResult(inputResult.type) && runCount < static_cast(flakyRetries)) { printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(), id.testName.c_str()); inputResult.type = TestResultType::NoResult; outputResult.flakyFailures++; } else { outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds; outputResult.type = inputResult.type; } } } output->testArtifactPaths.insert(output->testArtifactPaths.end(), input->testArtifactPaths.begin(), input->testArtifactPaths.end()); return true; } void PrintTestOutputSnippet(const TestIdentifier &id, const TestResult &result, const std::string &fullOutput) { std::stringstream nameStream; nameStream << id; std::string fullName = nameStream.str(); size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName); if (runPos == std::string::npos) { printf("Cannot locate test output snippet.\n"); return; } size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos); // Only clip the snippet to the "OK" message if the test really // succeeded. It still might have e.g. crashed after printing it. if (endPos == std::string::npos && result.type == TestResultType::Pass) { endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos); } if (endPos != std::string::npos) { size_t newline_pos = fullOutput.find("\n", endPos); if (newline_pos != std::string::npos) endPos = newline_pos + 1; } std::cout << "\n"; if (endPos != std::string::npos) { std::cout << fullOutput.substr(runPos, endPos - runPos); } else { std::cout << fullOutput.substr(runPos); } } std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id) { size_t slashPos = id.testName.find('/'); if (slashPos == std::string::npos) { return "default"; } size_t doubleUnderscorePos = id.testName.find("__"); if (doubleUnderscorePos == std::string::npos) { std::string configName = id.testName.substr(slashPos + 1); if (!BeginsWith(configName, "ES")) { return "default"; } return configName; } else { return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1); } } TestQueue BatchTests(const std::vector &tests, int batchSize) { // First sort tests by configuration. angle::HashMap> testsSortedByConfig; for (const TestIdentifier &id : tests) { std::string config = GetConfigNameFromTestIdentifier(id); testsSortedByConfig[config].push_back(id); } // Then group into batches by 'batchSize'. TestQueue testQueue; for (const auto &configAndIds : testsSortedByConfig) { const std::vector &configTests = configAndIds.second; // Count the number of batches needed for this config. int batchesForConfig = static_cast(configTests.size() + batchSize - 1) / batchSize; // Create batches with striping to split up slow tests. for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex) { std::vector batchTests; for (size_t testIndex = batchIndex; testIndex < configTests.size(); testIndex += batchesForConfig) { batchTests.push_back(configTests[testIndex]); } testQueue.emplace(std::move(batchTests)); ASSERT(batchTests.empty()); } } return testQueue; } void ListTests(const std::map &resultsMap) { std::map> suites; std::cout << "Tests list:\n"; for (const auto &resultIt : resultsMap) { const TestIdentifier &id = resultIt.first; std::cout << id << "\n"; } } // Prints the names of the tests matching the user-specified filter flag. // This matches the output from googletest/src/gtest.cc but is much much faster for large filters. // See http://anglebug.com/5164 void GTestListTests(const std::map &resultsMap) { std::map> suites; for (const auto &resultIt : resultsMap) { const TestIdentifier &id = resultIt.first; suites[id.testSuiteName].push_back(id.testName); } for (const auto &testSuiteIt : suites) { bool printedTestSuiteName = false; const std::string &suiteName = testSuiteIt.first; const std::vector &testNames = testSuiteIt.second; for (const std::string &testName : testNames) { if (!printedTestSuiteName) { printedTestSuiteName = true; printf("%s.\n", suiteName.c_str()); } printf(" %s\n", testName.c_str()); } } } } // namespace // static TestSuite *TestSuite::mInstance = nullptr; TestIdentifier::TestIdentifier() = default; TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn) : testSuiteName(suiteNameIn), testName(nameIn) {} TestIdentifier::TestIdentifier(const TestIdentifier &other) = default; TestIdentifier::~TestIdentifier() = default; TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default; void TestIdentifier::sprintfName(char *outBuffer) const { sprintf(outBuffer, "%s.%s", testSuiteName.c_str(), testName.c_str()); } // static bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut) { size_t separator = str.find("."); if (separator == std::string::npos) { return false; } idOut->testSuiteName = str.substr(0, separator); idOut->testName = str.substr(separator + 1, str.length() - separator - 1); return true; } TestResults::TestResults() = default; TestResults::~TestResults() = default; ProcessInfo::ProcessInfo() = default; ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs) { process = std::move(rhs.process); testsInBatch = std::move(rhs.testsInBatch); resultsFileName = std::move(rhs.resultsFileName); filterFileName = std::move(rhs.filterFileName); commandLine = std::move(rhs.commandLine); filterString = std::move(rhs.filterString); return *this; } ProcessInfo::~ProcessInfo() = default; ProcessInfo::ProcessInfo(ProcessInfo &&other) { *this = std::move(other); } TestSuite::TestSuite(int *argc, char **argv) : mShardCount(-1), mShardIndex(-1), mBotMode(false), mDebugTestGroups(false), mGTestListTests(false), mListTests(false), mPrintTestStdout(false), mDisableCrashHandler(false), mBatchSize(kDefaultBatchSize), mCurrentResultCount(0), mTotalResultCount(0), mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)), mTestTimeout(kDefaultTestTimeout), mBatchTimeout(kDefaultBatchTimeout), mBatchId(-1), mFlakyRetries(0), mMaxFailures(kDefaultMaxFailures), mFailureCount(0) { ASSERT(mInstance == nullptr); mInstance = this; Optional filterArgIndex; bool alsoRunDisabledTests = false; #if defined(ANGLE_PLATFORM_MACOS) // By default, we should hook file API functions on macOS to avoid slow Metal shader caching // file access. // TODO(anglebug.com/5505): in the angle_end2end_tests suite, // disabling the shader cache makes the tests run more slowly than // leaving it enabled. // angle::InitMetalFileAPIHooking(*argc, argv); #endif #if defined(ANGLE_PLATFORM_WINDOWS) testing::GTEST_FLAG(catch_exceptions) = false; #endif if (*argc <= 0) { printf("Missing test arguments.\n"); exit(EXIT_FAILURE); } mTestExecutableName = argv[0]; mTestSuiteName = ParseTestSuiteName(mTestExecutableName.c_str()); for (int argIndex = 1; argIndex < *argc;) { if (parseSingleArg(argv[argIndex])) { DeleteArg(argc, argv, argIndex); continue; } if (ParseFlagValue("--gtest_filter=", argv[argIndex])) { filterArgIndex = argIndex; } else { // Don't include disabled tests in test lists unless the user asks for them. if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0) { alsoRunDisabledTests = true; } mChildProcessArgs.push_back(argv[argIndex]); } ++argIndex; } if (!mDisableCrashHandler) { // Note that the crash callback must be owned and not use global constructors. mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); }; InitCrashHandler(&mCrashCallback); } std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX"); if (!envShardIndex.empty()) { angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX"); if (mShardIndex == -1) { std::stringstream shardIndexStream(envShardIndex); shardIndexStream >> mShardIndex; } } std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS"); if (!envTotalShards.empty()) { angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS"); if (mShardCount == -1) { std::stringstream shardCountStream(envTotalShards); shardCountStream >> mShardCount; } } if ((mShardIndex == -1) != (mShardCount == -1)) { printf("Shard index and shard count must be specified together.\n"); exit(EXIT_FAILURE); } if (!mFilterFile.empty()) { if (filterArgIndex.valid()) { printf("Cannot use gtest_filter in conjunction with a filter file.\n"); exit(EXIT_FAILURE); } uint32_t fileSize = 0; if (!GetFileSize(mFilterFile.c_str(), &fileSize)) { printf("Error getting filter file size: %s\n", mFilterFile.c_str()); exit(EXIT_FAILURE); } std::vector fileContents(fileSize + 1, 0); if (!ReadEntireFileToString(mFilterFile.c_str(), fileContents.data(), fileSize)) { printf("Error loading filter file: %s\n", mFilterFile.c_str()); exit(EXIT_FAILURE); } mFilterString.assign(fileContents.data()); if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter=")) { printf("Filter file must start with \"--gtest_filter=\".\n"); exit(EXIT_FAILURE); } // Note that we only add a filter string if we previously deleted a shader filter file // argument. So we will have space for the new filter string in argv. AddArg(argc, argv, mFilterString.c_str()); } // Call into gtest internals to force parameterized test name registration. testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl(); impl->RegisterParameterizedTests(); // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest". testing::internal::ParseGoogleTestFlagsOnly(argc, argv); std::vector testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests); if (mShardCount == 0) { printf("Shard count must be > 0.\n"); exit(EXIT_FAILURE); } else if (mShardCount > 0) { if (mShardIndex >= mShardCount) { printf("Shard index must be less than shard count.\n"); exit(EXIT_FAILURE); } // If there's only one shard, we can use the testSet as defined above. if (mShardCount > 1) { testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines, alsoRunDisabledTests); if (!mBotMode) { mFilterString = GetTestFilter(testSet); if (filterArgIndex.valid()) { argv[filterArgIndex.value()] = const_cast(mFilterString.c_str()); } else { // Note that we only add a filter string if we previously deleted a shard // index/count argument. So we will have space for the new filter string in // argv. AddArg(argc, argv, mFilterString.c_str()); } // Force-re-initialize GoogleTest flags to load the shard filter. testing::internal::ParseGoogleTestFlagsOnly(argc, argv); } } } { std::stringstream fakeTestName; fakeTestName << kArtifactsFakeTestName; if (mShardIndex != -1) { fakeTestName << "-Shard" << std::setfill('0') << std::setw(2) << mShardIndex; } mTestResults.testArtifactsFakeTestName = fakeTestName.str(); } if (mBotMode) { // Split up test batches. mTestQueue = BatchTests(testSet, mBatchSize); if (mDebugTestGroups) { std::cout << "Test Groups:\n"; while (!mTestQueue.empty()) { const std::vector &tests = mTestQueue.front(); std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " (" << static_cast(tests.size()) << ")\n"; mTestQueue.pop(); } exit(EXIT_SUCCESS); } } testing::InitGoogleTest(argc, argv); mTotalResultCount = testSet.size(); if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty()) { // Create a default output file in bot mode. mResultsFile = "output.json"; } if (!mResultsDirectory.empty()) { std::stringstream resultFileName; resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile; mResultsFile = resultFileName.str(); } if (!mBotMode) { testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners(); listeners.Append(new TestEventListener( mResultsFile, mHistogramJsonFile, mSlowTests, mTestTimeout, mTestTimeout * 3.0, mTestSuiteName.c_str(), &mTestResults, &mHistogramWriter)); for (const TestIdentifier &id : testSet) { mTestResults.results[id].type = TestResultType::NoResult; } } } TestSuite::~TestSuite() { if (mWatchdogThread.joinable()) { mWatchdogThread.detach(); } TerminateCrashHandler(); } bool TestSuite::parseSingleArg(const char *argument) { // Note: Flags should be documented in README.md. return (ParseIntArg("--shard-count=", argument, &mShardCount) || ParseIntArg("--shard-index=", argument, &mShardIndex) || ParseIntArg("--batch-size=", argument, &mBatchSize) || ParseIntArg("--max-processes=", argument, &mMaxProcesses) || ParseIntArg(kTestTimeoutArg, argument, &mTestTimeout) || ParseIntArg("--batch-timeout=", argument, &mBatchTimeout) || ParseIntArg(kFlakyRetries, argument, &mFlakyRetries) || ParseIntArg(kMaxFailures, argument, &mMaxFailures) || // Other test functions consume the batch ID, so keep it in the list. ParseIntArgNoDelete(kBatchId, argument, &mBatchId) || ParseStringArg("--results-directory=", argument, &mResultsDirectory) || ParseStringArg(kResultFileArg, argument, &mResultsFile) || ParseStringArg("--isolated-script-test-output=", argument, &mResultsFile) || ParseStringArg(kFilterFileArg, argument, &mFilterFile) || ParseStringArg(kHistogramJsonFileArg, argument, &mHistogramJsonFile) || ParseStringArg("--isolated-script-test-perf-output=", argument, &mHistogramJsonFile) || ParseStringArg(kIsolatedOutDir, argument, &mTestArtifactDirectory) || ParseFlag("--bot-mode", argument, &mBotMode) || ParseFlag("--debug-test-groups", argument, &mDebugTestGroups) || ParseFlag(kGTestListTests, argument, &mGTestListTests) || ParseFlag(kListTests, argument, &mListTests) || ParseFlag(kPrintTestStdout, argument, &mPrintTestStdout) || ParseFlag(kDisableCrashHandler, argument, &mDisableCrashHandler)); } void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout) { std::lock_guard guard(mTestResults.currentTestMutex); if (mTestResults.currentTest.valid()) { TestResult &result = mTestResults.results[mTestResults.currentTest]; result.type = crashOrTimeout; result.elapsedTimeSeconds = mTestResults.currentTestTimer.getElapsedTime(); } if (mResultsFile.empty()) { printf("No results file specified.\n"); return; } WriteOutputFiles(true, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile, mTestSuiteName.c_str()); } bool TestSuite::launchChildTestProcess(uint32_t batchId, const std::vector &testsInBatch) { constexpr uint32_t kMaxPath = 1000; // Create a temporary file to store the test list ProcessInfo processInfo; char filterBuffer[kMaxPath] = {}; if (!CreateTemporaryFile(filterBuffer, kMaxPath)) { std::cerr << "Error creating temporary file for test list.\n"; return false; } processInfo.filterFileName.assign(filterBuffer); std::string filterString = GetTestFilter(testsInBatch); FILE *fp = fopen(processInfo.filterFileName.c_str(), "w"); if (!fp) { std::cerr << "Error opening temporary file for test list.\n"; return false; } fprintf(fp, "%s", filterString.c_str()); fclose(fp); processInfo.filterString = filterString; std::string filterFileArg = kFilterFileArg + processInfo.filterFileName; // Create a temporary file to store the test output. char resultsBuffer[kMaxPath] = {}; if (!CreateTemporaryFile(resultsBuffer, kMaxPath)) { std::cerr << "Error creating temporary file for test list.\n"; return false; } processInfo.resultsFileName.assign(resultsBuffer); std::string resultsFileArg = kResultFileArg + processInfo.resultsFileName; // Construct commandline for child process. std::vector args; args.push_back(mTestExecutableName.c_str()); args.push_back(filterFileArg.c_str()); args.push_back(resultsFileArg.c_str()); std::stringstream batchIdStream; batchIdStream << kBatchId << batchId; std::string batchIdString = batchIdStream.str(); args.push_back(batchIdString.c_str()); for (const std::string &arg : mChildProcessArgs) { args.push_back(arg.c_str()); } if (mDisableCrashHandler) { args.push_back(kDisableCrashHandler); } std::string timeoutStr; if (mTestTimeout != kDefaultTestTimeout) { std::stringstream timeoutStream; timeoutStream << kTestTimeoutArg << mTestTimeout; timeoutStr = timeoutStream.str(); args.push_back(timeoutStr.c_str()); } std::string artifactsDir; if (!mTestArtifactDirectory.empty()) { std::stringstream artifactsDirStream; artifactsDirStream << kIsolatedOutDir << mTestArtifactDirectory; artifactsDir = artifactsDirStream.str(); args.push_back(artifactsDir.c_str()); } // Launch child process and wait for completion. processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved); if (!processInfo.process->started()) { std::cerr << "Error launching child process.\n"; return false; } std::stringstream commandLineStr; for (const char *arg : args) { commandLineStr << arg << " "; } processInfo.commandLine = commandLineStr.str(); processInfo.testsInBatch = testsInBatch; mCurrentProcesses.emplace_back(std::move(processInfo)); return true; } void ParseTestIdentifierAndSetResult(const std::string &testName, TestResultType result, TestResults *results) { // Trim off any whitespace + extra stuff at the end of the string. std::string modifiedTestName = testName.substr(0, testName.find(' ')); modifiedTestName = modifiedTestName.substr(0, testName.find('\r')); TestIdentifier id; bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id); ASSERT(ok); results->results[id] = {result}; } bool TestSuite::finishProcess(ProcessInfo *processInfo) { // Get test results and merge into master list. TestResults batchResults; if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults)) { std::cerr << "Warning: could not find test results file from child process.\n"; // First assume all tests get skipped. for (const TestIdentifier &id : processInfo->testsInBatch) { batchResults.results[id] = {TestResultType::NoResult}; } // Attempt to reconstruct passing list from stdout snippets. const std::string &batchStdout = processInfo->process->getStdout(); std::istringstream linesStream(batchStdout); std::string line; while (std::getline(linesStream, line)) { size_t startPos = line.find(kStartedTestString); size_t failPos = line.find(kFailedTestString); size_t passPos = line.find(kPassedTestString); size_t skippedPos = line.find(kSkippedTestString); if (startPos != std::string::npos) { // Assume a test that's started crashed until we see it completed. std::string testName = line.substr(strlen(kStartedTestString)); ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults); } else if (failPos != std::string::npos) { std::string testName = line.substr(strlen(kFailedTestString)); ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults); } else if (passPos != std::string::npos) { std::string testName = line.substr(strlen(kPassedTestString)); ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults); } else if (skippedPos != std::string::npos) { std::string testName = line.substr(strlen(kSkippedTestString)); ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults); } } } if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries)) { std::cerr << "Error merging batch test results.\n"; return false; } if (!batchResults.results.empty()) { const TestIdentifier &id = batchResults.results.begin()->first; std::string config = GetConfigNameFromTestIdentifier(id); printf("Completed batch with config: %s\n", config.c_str()); for (const auto &resultIter : batchResults.results) { const TestResult &result = resultIter.second; if (result.type != TestResultType::NoResult && IsFailedResult(result.type)) { printf("To reproduce the batch, use filter:\n%s\n", processInfo->filterString.c_str()); break; } } } // Process results and print unexpected errors. for (const auto &resultIter : batchResults.results) { const TestIdentifier &id = resultIter.first; const TestResult &result = resultIter.second; // Skip results aren't procesed since they're added back to the test queue below. if (result.type == TestResultType::NoResult) { continue; } mCurrentResultCount++; printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(), id.testName.c_str()); if (mPrintTestStdout) { const std::string &batchStdout = processInfo->process->getStdout(); PrintTestOutputSnippet(id, result, batchStdout); } else if (result.type == TestResultType::Pass) { printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds * 1000.0); } else if (result.type == TestResultType::Skip) { printf(" (skipped)\n"); } else if (result.type == TestResultType::Timeout) { printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds); mFailureCount++; } else { printf(" (%s)\n", ResultTypeToString(result.type)); mFailureCount++; const std::string &batchStdout = processInfo->process->getStdout(); PrintTestOutputSnippet(id, result, batchStdout); } } // On unexpected exit, re-queue any unfinished tests. std::vector unfinishedTests; for (const auto &resultIter : batchResults.results) { const TestIdentifier &id = resultIter.first; const TestResult &result = resultIter.second; if (result.type == TestResultType::NoResult) { unfinishedTests.push_back(id); } } if (!unfinishedTests.empty()) { mTestQueue.emplace(std::move(unfinishedTests)); } // Clean up any dirty temporary files. for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName}) { // Note: we should be aware that this cleanup won't happen if the harness itself // crashes. If this situation comes up in the future we should add crash cleanup to the // harness. if (!angle::DeleteFile(tempFile.c_str())) { std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n"; } } processInfo->process.reset(); return true; } int TestSuite::run() { if (mListTests) { ListTests(mTestResults.results); return EXIT_SUCCESS; } if (mGTestListTests) { GTestListTests(mTestResults.results); return EXIT_SUCCESS; } // Run tests serially. if (!mBotMode) { // Only start the watchdog if the debugger is not attached and we're a child process. if (!angle::IsDebuggerAttached() && mBatchId != -1) { startWatchdog(); } int retVal = RUN_ALL_TESTS(); { std::lock_guard guard(mTestResults.currentTestMutex); mTestResults.allDone = true; } if (mWatchdogThread.joinable()) { mWatchdogThread.join(); } return retVal; } Timer totalRunTime; totalRunTime.start(); Timer messageTimer; messageTimer.start(); uint32_t batchId = 0; while (!mTestQueue.empty() || !mCurrentProcesses.empty()) { bool progress = false; // Spawn a process if needed and possible. if (static_cast(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty()) { std::vector testsInBatch = mTestQueue.front(); mTestQueue.pop(); if (!launchChildTestProcess(++batchId, testsInBatch)) { return 1; } progress = true; } // Check for process completion. uint32_t totalTestCount = 0; for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();) { ProcessInfo &processInfo = *processIter; if (processInfo.process->finished()) { if (!finishProcess(&processInfo)) { return 1; } processIter = mCurrentProcesses.erase(processIter); progress = true; } else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout) { // Terminate the process and record timeouts for the batch. // Because we can't determine which sub-test caused a timeout, record the whole // batch as a timeout failure. Can be improved by using socket message passing. if (!processInfo.process->kill()) { return 1; } for (const TestIdentifier &testIdentifier : processInfo.testsInBatch) { // Because the whole batch failed we can't know how long each test took. mTestResults.results[testIdentifier].type = TestResultType::Timeout; mFailureCount++; } processIter = mCurrentProcesses.erase(processIter); progress = true; } else { totalTestCount += static_cast(processInfo.testsInBatch.size()); processIter++; } } if (progress) { messageTimer.start(); } else if (messageTimer.getElapsedTime() > kIdleMessageTimeout) { const ProcessInfo &processInfo = mCurrentProcesses[0]; double processTime = processInfo.process->getElapsedTimeSeconds(); printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount, static_cast(mCurrentProcesses.size()), static_cast(processTime)); messageTimer.start(); } // Early exit if we passed the maximum failure threshold. Still wait for current tests. if (mFailureCount > mMaxFailures && !mTestQueue.empty()) { printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures); TestQueue emptyTestQueue; std::swap(mTestQueue, emptyTestQueue); } // Sleep briefly and continue. angle::Sleep(100); } // Dump combined results. if (mFailureCount > mMaxFailures) { printf( "Omitted results files because the failure count (%d) exceeded the maximum number of " "failures (%d).\n", mFailureCount, mMaxFailures); } else { WriteOutputFiles(false, mTestResults, mResultsFile, mHistogramWriter, mHistogramJsonFile, mTestSuiteName.c_str()); } totalRunTime.stop(); printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedTime()); return printFailuresAndReturnCount() == 0 ? 0 : 1; } int TestSuite::printFailuresAndReturnCount() const { std::vector failures; uint32_t skipCount = 0; for (const auto &resultIter : mTestResults.results) { const TestIdentifier &id = resultIter.first; const TestResult &result = resultIter.second; if (result.type == TestResultType::Skip) { skipCount++; } else if (result.type != TestResultType::Pass) { const FileLine &fileLine = mTestFileLines.find(id)->second; std::stringstream failureMessage; failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") (" << ResultTypeToString(result.type) << ")"; failures.emplace_back(failureMessage.str()); } } if (failures.empty()) return 0; printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : ""); for (const std::string &failure : failures) { printf(" %s\n", failure.c_str()); } if (skipCount > 0) { printf("%u tests skipped.\n", skipCount); } return static_cast(failures.size()); } void TestSuite::startWatchdog() { auto watchdogMain = [this]() { do { { std::lock_guard guard(mTestResults.currentTestMutex); if (mTestResults.currentTestTimer.getElapsedTime() > mTestResults.currentTestTimeout) { break; } if (mTestResults.allDone) return; } angle::Sleep(500); } while (true); onCrashOrTimeout(TestResultType::Timeout); ::_Exit(EXIT_FAILURE); }; mWatchdogThread = std::thread(watchdogMain); } void TestSuite::addHistogramSample(const std::string &measurement, const std::string &story, double value, const std::string &units) { mHistogramWriter.addSample(measurement, story, value, units); } void TestSuite::registerSlowTests(const char *slowTests[], size_t numSlowTests) { for (size_t slowTestIndex = 0; slowTestIndex < numSlowTests; ++slowTestIndex) { mSlowTests.push_back(slowTests[slowTestIndex]); } } std::string TestSuite::addTestArtifact(const std::string &artifactName) { mTestResults.testArtifactPaths.push_back(artifactName); if (mTestArtifactDirectory.empty()) { return artifactName; } std::stringstream pathStream; pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName; return pathStream.str(); } bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut) { std::ifstream ifs(fileName); if (!ifs.is_open()) { std::cerr << "Error opening " << fileName << "\n"; return false; } js::IStreamWrapper ifsWrapper(ifs); js::Document document; document.ParseStream(ifsWrapper); if (document.HasParseError()) { std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n"; return false; } if (!GetTestResultsFromJSON(document, resultsOut)) { std::cerr << "Error getting test results from JSON.\n"; return false; } return true; } void TestSuite::dumpTestExpectationsErrorMessages() { std::stringstream errorMsgStream; for (const auto &message : mTestExpectationsParser.getErrorMessages()) { errorMsgStream << std::endl << " " << message; } std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl; } bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config, const std::string &fileName) { if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName)) { dumpTestExpectationsErrorMessages(); return false; } return true; } bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName) { if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName)) { dumpTestExpectationsErrorMessages(); return false; } return true; } bool TestSuite::logAnyUnusedTestExpectations() { std::stringstream unusedMsgStream; bool anyUnused = false; for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages()) { anyUnused = true; unusedMsgStream << std::endl << " " << message; } if (anyUnused) { std::cerr << "Failed to validate test expectations." << unusedMsgStream.str() << std::endl; return true; } return false; } int32_t TestSuite::getTestExpectation(const std::string &testName) { return mTestExpectationsParser.getTestExpectation(testName); } int32_t TestSuite::getTestExpectationWithConfig(const GPUTestConfig &config, const std::string &testName) { return mTestExpectationsParser.getTestExpectationWithConfig(config, testName); } const char *TestResultTypeToString(TestResultType type) { switch (type) { case TestResultType::Crash: return "Crash"; case TestResultType::Fail: return "Fail"; case TestResultType::NoResult: return "NoResult"; case TestResultType::Pass: return "Pass"; case TestResultType::Skip: return "Skip"; case TestResultType::Timeout: return "Timeout"; case TestResultType::Unknown: return "Unknown"; } } } // namespace angle