1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 // Basic implementation of a test harness in ANGLE.
8
9 #include "TestSuite.h"
10
11 #include "common/debug.h"
12 #include "common/platform.h"
13 #include "common/string_utils.h"
14 #include "common/system_utils.h"
15 #include "util/Timer.h"
16
17 #include <stdlib.h>
18 #include <time.h>
19
20 #include <fstream>
21 #include <unordered_map>
22
23 #include <gtest/gtest.h>
24 #include <rapidjson/document.h>
25 #include <rapidjson/filewritestream.h>
26 #include <rapidjson/istreamwrapper.h>
27 #include <rapidjson/prettywriter.h>
28
29 // We directly call into a function to register the parameterized tests. This saves spinning up
30 // a subprocess with a new gtest filter.
31 #include <gtest/../../src/gtest-internal-inl.h>
32
33 namespace js = rapidjson;
34
35 namespace angle
36 {
37 namespace
38 {
39 constexpr char kBatchId[] = "--batch-id";
40 constexpr char kFilterFileArg[] = "--filter-file";
41 constexpr char kResultFileArg[] = "--results-file";
42 constexpr char kTestTimeoutArg[] = "--test-timeout";
43 constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
44 constexpr char kIsolatedOutDir[] = "--isolated-outdir";
45
46 constexpr char kStartedTestString[] = "[ RUN ] ";
47 constexpr char kPassedTestString[] = "[ OK ] ";
48 constexpr char kFailedTestString[] = "[ FAILED ] ";
49 constexpr char kSkippedTestString[] = "[ SKIPPED ] ";
50
51 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
52
53 constexpr char kTSanOptionsEnvVar[] = "TSAN_OPTIONS";
54 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
55
56 [[maybe_unused]] constexpr char kVkLoaderDisableDLLUnloadingEnvVar[] =
57 "VK_LOADER_DISABLE_DYNAMIC_LIBRARY_UNLOADING";
58
59 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
60 // Ideally we could use a separate timeout for the slow first test.
61 // Allow sanitized tests to run more slowly.
62 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
63 constexpr int kDefaultTestTimeout = 60;
64 constexpr int kDefaultBatchTimeout = 300;
65 #else
66 constexpr int kDefaultTestTimeout = 120;
67 constexpr int kDefaultBatchTimeout = 700;
68 #endif
69 constexpr int kSlowTestTimeoutScale = 3;
70 constexpr int kDefaultBatchSize = 256;
71 constexpr double kIdleMessageTimeout = 15.0;
72 constexpr int kDefaultMaxProcesses = 16;
73 constexpr int kDefaultMaxFailures = 100;
74
ResultTypeToString(TestResultType type)75 const char *ResultTypeToString(TestResultType type)
76 {
77 switch (type)
78 {
79 case TestResultType::Crash:
80 return "CRASH";
81 case TestResultType::Fail:
82 return "FAIL";
83 case TestResultType::NoResult:
84 return "NOTRUN";
85 case TestResultType::Pass:
86 return "PASS";
87 case TestResultType::Skip:
88 return "SKIP";
89 case TestResultType::Timeout:
90 return "TIMEOUT";
91 case TestResultType::Unknown:
92 default:
93 return "UNKNOWN";
94 }
95 }
96
GetResultTypeFromString(const std::string & str)97 TestResultType GetResultTypeFromString(const std::string &str)
98 {
99 if (str == "CRASH")
100 return TestResultType::Crash;
101 if (str == "FAIL")
102 return TestResultType::Fail;
103 if (str == "PASS")
104 return TestResultType::Pass;
105 if (str == "NOTRUN")
106 return TestResultType::NoResult;
107 if (str == "SKIP")
108 return TestResultType::Skip;
109 if (str == "TIMEOUT")
110 return TestResultType::Timeout;
111 return TestResultType::Unknown;
112 }
113
IsFailedResult(TestResultType resultType)114 bool IsFailedResult(TestResultType resultType)
115 {
116 return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
117 }
118
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)119 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
120 {
121 js::Value jsName;
122 jsName.SetString(ResultTypeToString(type), *allocator);
123 return jsName;
124 }
125
WriteJsonFile(const std::string & outputFile,js::Document * doc)126 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
127 {
128 FILE *fp = fopen(outputFile.c_str(), "w");
129 if (!fp)
130 {
131 return false;
132 }
133
134 constexpr size_t kBufferSize = 0xFFFF;
135 std::vector<char> writeBuffer(kBufferSize);
136 js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
137 js::PrettyWriter<js::FileWriteStream> writer(os);
138 if (!doc->Accept(writer))
139 {
140 fclose(fp);
141 return false;
142 }
143 fclose(fp);
144 return true;
145 }
146
147 // Writes out a TestResults to the Chromium JSON Test Results format.
148 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile)149 void WriteResultsFile(bool interrupted,
150 const TestResults &testResults,
151 const std::string &outputFile)
152 {
153 time_t ltime;
154 time(<ime);
155 struct tm *timeinfo = gmtime(<ime);
156 ltime = mktime(timeinfo);
157
158 uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
159
160 js::Document doc;
161 doc.SetObject();
162
163 js::Document::AllocatorType &allocator = doc.GetAllocator();
164
165 doc.AddMember("interrupted", interrupted, allocator);
166 doc.AddMember("path_delimiter", ".", allocator);
167 doc.AddMember("version", 3, allocator);
168 doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
169
170 js::Value tests;
171 tests.SetObject();
172
173 // If we have any test artifacts, make a fake test to house them.
174 if (!testResults.testArtifactPaths.empty())
175 {
176 js::Value artifactsTest;
177 artifactsTest.SetObject();
178
179 artifactsTest.AddMember("actual", "PASS", allocator);
180 artifactsTest.AddMember("expected", "PASS", allocator);
181
182 js::Value artifacts;
183 artifacts.SetObject();
184
185 for (const std::string &testArtifactPath : testResults.testArtifactPaths)
186 {
187 std::vector<std::string> pieces =
188 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
189 SplitResult::SPLIT_WANT_NONEMPTY);
190 ASSERT(!pieces.empty());
191
192 js::Value basename;
193 basename.SetString(pieces.back(), allocator);
194
195 js::Value artifactPath;
196 artifactPath.SetString(testArtifactPath, allocator);
197
198 js::Value artifactArray;
199 artifactArray.SetArray();
200 artifactArray.PushBack(artifactPath, allocator);
201
202 artifacts.AddMember(basename, artifactArray, allocator);
203 }
204
205 artifactsTest.AddMember("artifacts", artifacts, allocator);
206
207 js::Value fakeTestName;
208 fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
209 tests.AddMember(fakeTestName, artifactsTest, allocator);
210 }
211
212 std::map<TestResultType, uint32_t> counts;
213
214 for (const auto &resultIter : testResults.results)
215 {
216 const TestIdentifier &id = resultIter.first;
217 const TestResult &result = resultIter.second;
218
219 js::Value jsResult;
220 jsResult.SetObject();
221
222 counts[result.type]++;
223
224 std::string actualResult;
225 for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
226 {
227 actualResult += "FAIL ";
228 }
229
230 actualResult += ResultTypeToString(result.type);
231
232 std::string expectedResult = "PASS";
233 if (result.type == TestResultType::Skip)
234 {
235 expectedResult = "SKIP";
236 }
237
238 // Handle flaky passing tests.
239 if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
240 {
241 expectedResult = "FAIL PASS";
242 jsResult.AddMember("is_flaky", true, allocator);
243 }
244
245 jsResult.AddMember("actual", actualResult, allocator);
246 jsResult.AddMember("expected", expectedResult, allocator);
247
248 if (IsFailedResult(result.type))
249 {
250 jsResult.AddMember("is_unexpected", true, allocator);
251 }
252
253 js::Value times;
254 times.SetArray();
255 for (double elapsedTimeSeconds : result.elapsedTimeSeconds)
256 {
257 times.PushBack(elapsedTimeSeconds, allocator);
258 }
259
260 jsResult.AddMember("times", times, allocator);
261
262 char testName[500];
263 id.snprintfName(testName, sizeof(testName));
264 js::Value jsName;
265 jsName.SetString(testName, allocator);
266
267 tests.AddMember(jsName, jsResult, allocator);
268 }
269
270 js::Value numFailuresByType;
271 numFailuresByType.SetObject();
272
273 for (const auto &countIter : counts)
274 {
275 TestResultType type = countIter.first;
276 uint32_t count = countIter.second;
277
278 js::Value jsCount(count);
279 numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
280 }
281
282 doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
283
284 doc.AddMember("tests", tests, allocator);
285
286 printf("Writing test results to %s\n", outputFile.c_str());
287
288 if (!WriteJsonFile(outputFile, &doc))
289 {
290 printf("Error writing test results file.\n");
291 }
292 }
293
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile)294 void WriteHistogramJson(const HistogramWriter &histogramWriter, const std::string &outputFile)
295 {
296 js::Document doc;
297 doc.SetArray();
298
299 histogramWriter.getAsJSON(&doc);
300
301 printf("Writing histogram json to %s\n", outputFile.c_str());
302
303 if (!WriteJsonFile(outputFile, &doc))
304 {
305 printf("Error writing histogram json file.\n");
306 }
307 }
308
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)309 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
310 {
311 TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
312
313 // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
314 if (resultIn.Skipped())
315 {
316 resultOut.type = TestResultType::Skip;
317 }
318 else if (resultIn.Failed())
319 {
320 resultOut.type = TestResultType::Fail;
321 }
322 else
323 {
324 resultOut.type = TestResultType::Pass;
325 }
326
327 resultOut.elapsedTimeSeconds.back() = resultsOut->currentTestTimer.getElapsedWallClockTime();
328 }
329
GetTestIdentifier(const testing::TestInfo & testInfo)330 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
331 {
332 return {testInfo.test_suite_name(), testInfo.name()};
333 }
334
IsTestDisabled(const testing::TestInfo & testInfo)335 bool IsTestDisabled(const testing::TestInfo &testInfo)
336 {
337 return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
338 }
339
340 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
341
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)342 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
343 TestIdentifierFilter filter,
344 bool alsoRunDisabledTests)
345 {
346 std::vector<TestIdentifier> tests;
347
348 const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
349 for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
350 {
351 const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
352 for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
353 {
354 const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
355 TestIdentifier id = GetTestIdentifier(testInfo);
356 if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
357 {
358 tests.emplace_back(id);
359
360 if (fileLinesOut)
361 {
362 (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
363 }
364 }
365 }
366 }
367
368 return tests;
369 }
370
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)371 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
372 bool alsoRunDisabledTests)
373 {
374 TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
375 return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
376 };
377
378 return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
379 }
380
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)381 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
382 int shardIndex,
383 int shardCount,
384 std::map<TestIdentifier, FileLine> *fileLinesOut,
385 bool alsoRunDisabledTests)
386 {
387 std::vector<TestIdentifier> shardTests;
388
389 for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
390 testIndex += shardCount)
391 {
392 shardTests.emplace_back(allTests[testIndex]);
393 }
394
395 return shardTests;
396 }
397
GetTestFilter(const std::vector<TestIdentifier> & tests)398 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
399 {
400 std::stringstream filterStream;
401
402 filterStream << "--gtest_filter=";
403
404 for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
405 {
406 if (testIndex != 0)
407 {
408 filterStream << ":";
409 }
410
411 filterStream << ReplaceDashesWithQuestionMark(tests[testIndex].testSuiteName) << "."
412 << ReplaceDashesWithQuestionMark(tests[testIndex].testName);
413 }
414
415 return filterStream.str();
416 }
417
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)418 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
419 std::vector<std::string> *testArtifactPathsOut)
420 {
421 if (!obj.HasMember("artifacts"))
422 {
423 printf("No artifacts member.\n");
424 return false;
425 }
426
427 const js::Value &jsArtifacts = obj["artifacts"];
428 if (!jsArtifacts.IsObject())
429 {
430 printf("Artifacts are not an object.\n");
431 return false;
432 }
433
434 const js::Value::ConstObject &artifacts = jsArtifacts.GetObj();
435 for (const auto &artifactMember : artifacts)
436 {
437 const js::Value &artifact = artifactMember.value;
438 if (!artifact.IsArray())
439 {
440 printf("Artifact is not an array of strings of size 1.\n");
441 return false;
442 }
443
444 const js::Value::ConstArray &artifactArray = artifact.GetArray();
445 if (artifactArray.Size() != 1)
446 {
447 printf("Artifact is not an array of strings of size 1.\n");
448 return false;
449 }
450
451 const js::Value &artifactName = artifactArray[0];
452 if (!artifactName.IsString())
453 {
454 printf("Artifact is not an array of strings of size 1.\n");
455 return false;
456 }
457
458 testArtifactPathsOut->push_back(artifactName.GetString());
459 }
460
461 return true;
462 }
463
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)464 bool GetSingleTestResultFromJSON(const js::Value &name,
465 const js::Value::ConstObject &obj,
466 TestResults *resultsOut)
467 {
468
469 TestIdentifier id;
470 if (!TestIdentifier::ParseFromString(name.GetString(), &id))
471 {
472 printf("Could not parse test identifier.\n");
473 return false;
474 }
475
476 if (!obj.HasMember("expected") || !obj.HasMember("actual"))
477 {
478 printf("No expected or actual member.\n");
479 return false;
480 }
481
482 const js::Value &expected = obj["expected"];
483 const js::Value &actual = obj["actual"];
484
485 if (!expected.IsString() || !actual.IsString())
486 {
487 printf("Expected or actual member is not a string.\n");
488 return false;
489 }
490
491 const std::string actualStr = actual.GetString();
492
493 TestResultType resultType = TestResultType::Unknown;
494 int flakyFailures = 0;
495 if (actualStr.find(' '))
496 {
497 std::istringstream strstr(actualStr);
498 std::string token;
499 while (std::getline(strstr, token, ' '))
500 {
501 resultType = GetResultTypeFromString(token);
502 if (resultType == TestResultType::Unknown)
503 {
504 printf("Failed to parse result type.\n");
505 return false;
506 }
507 if (IsFailedResult(resultType))
508 {
509 flakyFailures++;
510 }
511 }
512 }
513 else
514 {
515 resultType = GetResultTypeFromString(actualStr);
516 if (resultType == TestResultType::Unknown)
517 {
518 printf("Failed to parse result type.\n");
519 return false;
520 }
521 }
522
523 std::vector<double> elapsedTimeSeconds;
524 if (obj.HasMember("times"))
525 {
526 const js::Value × = obj["times"];
527 if (!times.IsArray())
528 {
529 return false;
530 }
531
532 const js::Value::ConstArray ×Array = times.GetArray();
533 if (timesArray.Size() < 1)
534 {
535 return false;
536 }
537 for (const js::Value &time : timesArray)
538 {
539 if (!time.IsDouble())
540 {
541 return false;
542 }
543
544 elapsedTimeSeconds.push_back(time.GetDouble());
545 }
546 }
547
548 TestResult &result = resultsOut->results[id];
549 result.elapsedTimeSeconds = elapsedTimeSeconds;
550 result.type = resultType;
551 result.flakyFailures = flakyFailures;
552 return true;
553 }
554
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)555 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
556 {
557 if (!document.HasMember("tests") || !document["tests"].IsObject())
558 {
559 printf("JSON document has no tests member.\n");
560 return false;
561 }
562
563 const js::Value::ConstObject &tests = document["tests"].GetObj();
564 for (const auto &testMember : tests)
565 {
566 // Get test identifier.
567 const js::Value &name = testMember.name;
568 if (!name.IsString())
569 {
570 printf("Name is not a string.\n");
571 return false;
572 }
573
574 // Get test result.
575 const js::Value &value = testMember.value;
576 if (!value.IsObject())
577 {
578 printf("Test result is not an object.\n");
579 return false;
580 }
581
582 const js::Value::ConstObject &obj = value.GetObj();
583
584 if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
585 {
586 if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
587 {
588 return false;
589 }
590 }
591 else
592 {
593 if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
594 {
595 return false;
596 }
597 }
598 }
599
600 return true;
601 }
602
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)603 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
604 {
605 for (auto &resultsIter : input->results)
606 {
607 const TestIdentifier &id = resultsIter.first;
608 TestResult &inputResult = resultsIter.second;
609 TestResult &outputResult = output->results[id];
610
611 if (inputResult.type != TestResultType::NoResult)
612 {
613 if (outputResult.type != TestResultType::NoResult)
614 {
615 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
616 id.testName.c_str());
617 return false;
618 }
619
620 // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
621 // attempt the test again.
622 uint32_t runCount = outputResult.flakyFailures + 1;
623 if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
624 {
625 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
626 id.testName.c_str());
627 inputResult.type = TestResultType::NoResult;
628 outputResult.flakyFailures++;
629 }
630 else
631 {
632 outputResult.type = inputResult.type;
633 }
634 if (runCount == 1)
635 {
636 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
637 }
638 else
639 {
640 outputResult.elapsedTimeSeconds.insert(outputResult.elapsedTimeSeconds.end(),
641 inputResult.elapsedTimeSeconds.begin(),
642 inputResult.elapsedTimeSeconds.end());
643 }
644 }
645 }
646
647 output->testArtifactPaths.insert(output->testArtifactPaths.end(),
648 input->testArtifactPaths.begin(),
649 input->testArtifactPaths.end());
650
651 return true;
652 }
653
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)654 void PrintTestOutputSnippet(const TestIdentifier &id,
655 const TestResult &result,
656 const std::string &fullOutput)
657 {
658 std::stringstream nameStream;
659 nameStream << id;
660 std::string fullName = nameStream.str();
661
662 size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
663 if (runPos == std::string::npos)
664 {
665 printf("Cannot locate test output snippet.\n");
666 return;
667 }
668
669 size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
670 // Only clip the snippet to the "OK" message if the test really
671 // succeeded. It still might have e.g. crashed after printing it.
672 if (endPos == std::string::npos && result.type == TestResultType::Pass)
673 {
674 endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
675 }
676 if (endPos != std::string::npos)
677 {
678 size_t newline_pos = fullOutput.find("\n", endPos);
679 if (newline_pos != std::string::npos)
680 endPos = newline_pos + 1;
681 }
682
683 std::cout << "\n";
684 if (endPos != std::string::npos)
685 {
686 std::cout << fullOutput.substr(runPos, endPos - runPos);
687 }
688 else
689 {
690 std::cout << fullOutput.substr(runPos);
691 }
692 }
693
GetConfigNameFromTestIdentifier(const TestIdentifier & id)694 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
695 {
696 size_t slashPos = id.testName.find('/');
697 if (slashPos == std::string::npos)
698 {
699 return "default";
700 }
701
702 size_t doubleUnderscorePos = id.testName.find("__");
703 if (doubleUnderscorePos == std::string::npos)
704 {
705 std::string configName = id.testName.substr(slashPos + 1);
706
707 if (!BeginsWith(configName, "ES"))
708 {
709 return "default";
710 }
711
712 return configName;
713 }
714 else
715 {
716 return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
717 }
718 }
719
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)720 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
721 {
722 // First sort tests by configuration.
723 angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
724 for (const TestIdentifier &id : tests)
725 {
726 std::string config = GetConfigNameFromTestIdentifier(id);
727 testsSortedByConfig[config].push_back(id);
728 }
729
730 // Then group into batches by 'batchSize'.
731 TestQueue testQueue;
732 for (const auto &configAndIds : testsSortedByConfig)
733 {
734 const std::vector<TestIdentifier> &configTests = configAndIds.second;
735
736 // Count the number of batches needed for this config.
737 int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
738
739 // Create batches with striping to split up slow tests.
740 for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
741 {
742 std::vector<TestIdentifier> batchTests;
743 for (size_t testIndex = batchIndex; testIndex < configTests.size();
744 testIndex += batchesForConfig)
745 {
746 batchTests.push_back(configTests[testIndex]);
747 }
748 testQueue.emplace(std::move(batchTests));
749 ASSERT(batchTests.empty());
750 }
751 }
752
753 return testQueue;
754 }
755
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)756 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
757 {
758 std::cout << "Tests list:\n";
759
760 for (const auto &resultIt : resultsMap)
761 {
762 const TestIdentifier &id = resultIt.first;
763 std::cout << id << "\n";
764 }
765
766 std::cout << "End tests list.\n";
767 }
768
769 // Prints the names of the tests matching the user-specified filter flag.
770 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
771 // See http://anglebug.com/5164
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)772 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
773 {
774 std::map<std::string, std::vector<std::string>> suites;
775
776 for (const auto &resultIt : resultsMap)
777 {
778 const TestIdentifier &id = resultIt.first;
779 suites[id.testSuiteName].push_back(id.testName);
780 }
781
782 for (const auto &testSuiteIt : suites)
783 {
784 bool printedTestSuiteName = false;
785
786 const std::string &suiteName = testSuiteIt.first;
787 const std::vector<std::string> &testNames = testSuiteIt.second;
788
789 for (const std::string &testName : testNames)
790 {
791 if (!printedTestSuiteName)
792 {
793 printedTestSuiteName = true;
794 printf("%s.\n", suiteName.c_str());
795 }
796 printf(" %s\n", testName.c_str());
797 }
798 }
799 }
800
801 // On Android, batching is done on the host, i.e. externally.
802 // TestSuite executes on the device and should just passthrough all args to GTest.
UsesExternalBatching()803 bool UsesExternalBatching()
804 {
805 #if defined(ANGLE_PLATFORM_ANDROID)
806 return true;
807 #else
808 return false;
809 #endif
810 }
811 } // namespace
812
enable(const std::string & testArtifactDirectory)813 void MetricWriter::enable(const std::string &testArtifactDirectory)
814 {
815 mPath = testArtifactDirectory + GetPathSeparator() + "angle_metrics";
816 }
817
writeInfo(const std::string & name,const std::string & backend,const std::string & story,const std::string & metric,const std::string & units)818 void MetricWriter::writeInfo(const std::string &name,
819 const std::string &backend,
820 const std::string &story,
821 const std::string &metric,
822 const std::string &units)
823 {
824 if (mPath.empty())
825 {
826 return;
827 }
828
829 if (mFile == nullptr)
830 {
831 mFile = fopen(mPath.c_str(), "w");
832 }
833 ASSERT(mFile != nullptr);
834
835 fprintf(mFile, "{\"name\":\"%s\",", name.c_str());
836 fprintf(mFile, "\"backend\":\"%s\",", backend.c_str());
837 fprintf(mFile, "\"story\":\"%s\",", story.c_str());
838 fprintf(mFile, "\"metric\":\"%s\",", metric.c_str());
839 fprintf(mFile, "\"units\":\"%s\",", units.c_str());
840 // followed by writing value, so no closing bracket yet
841 }
842
writeDoubleValue(double value)843 void MetricWriter::writeDoubleValue(double value)
844 {
845 if (mFile != nullptr)
846 {
847 fprintf(mFile, "\"value\":\"%lf\"}\n", value);
848 }
849 }
850
writeIntegerValue(size_t value)851 void MetricWriter::writeIntegerValue(size_t value)
852 {
853 if (mFile != nullptr)
854 {
855 fprintf(mFile, "\"value\":\"%zu\"}\n", value);
856 }
857 }
858
close()859 void MetricWriter::close()
860 {
861 if (mFile != nullptr)
862 {
863 fclose(mFile);
864 mFile = nullptr;
865 }
866 }
867
868 // static
869 TestSuite *TestSuite::mInstance = nullptr;
870
871 TestIdentifier::TestIdentifier() = default;
872
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)873 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
874 : testSuiteName(suiteNameIn), testName(nameIn)
875 {}
876
877 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
878
879 TestIdentifier::~TestIdentifier() = default;
880
881 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
882
snprintfName(char * outBuffer,size_t maxLen) const883 void TestIdentifier::snprintfName(char *outBuffer, size_t maxLen) const
884 {
885 snprintf(outBuffer, maxLen, "%s.%s", testSuiteName.c_str(), testName.c_str());
886 }
887
888 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)889 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
890 {
891 size_t separator = str.find(".");
892 if (separator == std::string::npos)
893 {
894 return false;
895 }
896
897 idOut->testSuiteName = str.substr(0, separator);
898 idOut->testName = str.substr(separator + 1, str.length() - separator - 1);
899 return true;
900 }
901
902 TestResults::TestResults() = default;
903
904 TestResults::~TestResults() = default;
905
906 ProcessInfo::ProcessInfo() = default;
907
operator =(ProcessInfo && rhs)908 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
909 {
910 process = std::move(rhs.process);
911 testsInBatch = std::move(rhs.testsInBatch);
912 resultsFileName = std::move(rhs.resultsFileName);
913 filterFileName = std::move(rhs.filterFileName);
914 commandLine = std::move(rhs.commandLine);
915 filterString = std::move(rhs.filterString);
916 return *this;
917 }
918
919 ProcessInfo::~ProcessInfo() = default;
920
ProcessInfo(ProcessInfo && other)921 ProcessInfo::ProcessInfo(ProcessInfo &&other)
922 {
923 *this = std::move(other);
924 }
925
926 class TestSuite::TestEventListener : public testing::EmptyTestEventListener
927 {
928 public:
929 // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(TestSuite * testSuite)930 TestEventListener(TestSuite *testSuite) : mTestSuite(testSuite) {}
931
OnTestStart(const testing::TestInfo & testInfo)932 void OnTestStart(const testing::TestInfo &testInfo) override
933 {
934 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
935 mTestSuite->mTestResults.currentTest = GetTestIdentifier(testInfo);
936 mTestSuite->mTestResults.currentTestTimer.start();
937 }
938
OnTestEnd(const testing::TestInfo & testInfo)939 void OnTestEnd(const testing::TestInfo &testInfo) override
940 {
941 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
942 mTestSuite->mTestResults.currentTestTimer.stop();
943 const testing::TestResult &resultIn = *testInfo.result();
944 UpdateCurrentTestResult(resultIn, &mTestSuite->mTestResults);
945 mTestSuite->mTestResults.currentTest = TestIdentifier();
946 }
947
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)948 void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
949 {
950 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
951 mTestSuite->mTestResults.allDone = true;
952 mTestSuite->writeOutputFiles(false);
953 }
954
955 private:
956 TestSuite *mTestSuite;
957 };
958
TestSuite(int * argc,char ** argv)959 TestSuite::TestSuite(int *argc, char **argv) : TestSuite(argc, argv, []() {}) {}
960
TestSuite(int * argc,char ** argv,std::function<void ()> registerTestsCallback)961 TestSuite::TestSuite(int *argc, char **argv, std::function<void()> registerTestsCallback)
962 : mShardCount(-1),
963 mShardIndex(-1),
964 mBotMode(false),
965 mDebugTestGroups(false),
966 mGTestListTests(false),
967 mListTests(false),
968 mPrintTestStdout(false),
969 mDisableCrashHandler(false),
970 mBatchSize(kDefaultBatchSize),
971 mCurrentResultCount(0),
972 mTotalResultCount(0),
973 mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
974 mTestTimeout(kDefaultTestTimeout),
975 mBatchTimeout(kDefaultBatchTimeout),
976 mBatchId(-1),
977 mFlakyRetries(0),
978 mMaxFailures(kDefaultMaxFailures),
979 mFailureCount(0),
980 mModifiedPreferredDevice(false)
981 {
982 ASSERT(mInstance == nullptr);
983 mInstance = this;
984
985 Optional<int> filterArgIndex;
986 bool alsoRunDisabledTests = false;
987
988 #if defined(ANGLE_PLATFORM_MACOS)
989 // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
990 // file access.
991 angle::InitMetalFileAPIHooking(*argc, argv);
992 #endif
993
994 #if defined(ANGLE_PLATFORM_WINDOWS)
995 GTEST_FLAG_SET(catch_exceptions, false);
996 #endif
997
998 if (*argc <= 0)
999 {
1000 printf("Missing test arguments.\n");
1001 exit(EXIT_FAILURE);
1002 }
1003
1004 mTestExecutableName = argv[0];
1005
1006 for (int argIndex = 1; argIndex < *argc;)
1007 {
1008 if (parseSingleArg(argc, argv, argIndex))
1009 {
1010 continue;
1011 }
1012
1013 if (strstr(argv[argIndex], "--gtest_filter=") == argv[argIndex])
1014 {
1015 filterArgIndex = argIndex;
1016 }
1017 else
1018 {
1019 // Don't include disabled tests in test lists unless the user asks for them.
1020 if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1021 {
1022 alsoRunDisabledTests = true;
1023 }
1024
1025 mChildProcessArgs.push_back(argv[argIndex]);
1026 }
1027 ++argIndex;
1028 }
1029
1030 if (mTestArtifactDirectory.empty())
1031 {
1032 mTestArtifactDirectory = GetEnvironmentVar("ISOLATED_OUTDIR");
1033 }
1034
1035 #if defined(ANGLE_PLATFORM_FUCHSIA)
1036 if (mBotMode)
1037 {
1038 printf("Note: Bot mode is not available on Fuchsia. See http://anglebug.com/7312\n");
1039 mBotMode = false;
1040 }
1041 #endif
1042
1043 if (UsesExternalBatching() && mBotMode)
1044 {
1045 printf("Bot mode is mutually exclusive with external batching.\n");
1046 exit(EXIT_FAILURE);
1047 }
1048
1049 mTestResults.currentTestTimeout = mTestTimeout;
1050
1051 if (!mDisableCrashHandler)
1052 {
1053 // Note that the crash callback must be owned and not use global constructors.
1054 mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1055 InitCrashHandler(&mCrashCallback);
1056 }
1057
1058 #if defined(ANGLE_PLATFORM_WINDOWS) || defined(ANGLE_PLATFORM_LINUX)
1059 if (IsASan())
1060 {
1061 // Set before `registerTestsCallback()` call
1062 SetEnvironmentVar(kVkLoaderDisableDLLUnloadingEnvVar, "1");
1063 }
1064 #endif
1065
1066 registerTestsCallback();
1067
1068 std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1069 if (!envShardIndex.empty())
1070 {
1071 angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1072 if (mShardIndex == -1)
1073 {
1074 std::stringstream shardIndexStream(envShardIndex);
1075 shardIndexStream >> mShardIndex;
1076 }
1077 }
1078
1079 std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1080 if (!envTotalShards.empty())
1081 {
1082 angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1083 if (mShardCount == -1)
1084 {
1085 std::stringstream shardCountStream(envTotalShards);
1086 shardCountStream >> mShardCount;
1087 }
1088 }
1089
1090 // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1091 // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1092 // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1093 // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1094 const GPUTestConfig testConfig = GPUTestConfig();
1095 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1096 if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1097 {
1098 mModifiedPreferredDevice = true;
1099 const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1100 if (conditions[GPUTestConfig::kConditionAMD])
1101 {
1102 SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1103 }
1104 else if (conditions[GPUTestConfig::kConditionNVIDIA])
1105 {
1106 SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1107 }
1108 else if (conditions[GPUTestConfig::kConditionIntel])
1109 {
1110 SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1111 }
1112 else if (conditions[GPUTestConfig::kConditionApple])
1113 {
1114 SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1115 }
1116 }
1117
1118 // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1119 if (IsTSan())
1120 {
1121 std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1122 tsanOptions += " halt_on_error=1";
1123 SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1124 }
1125
1126 if (IsUBSan())
1127 {
1128 std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1129 ubsanOptions += " halt_on_error=1";
1130 SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1131 }
1132
1133 if ((mShardIndex == -1) != (mShardCount == -1))
1134 {
1135 printf("Shard index and shard count must be specified together.\n");
1136 exit(EXIT_FAILURE);
1137 }
1138
1139 if (!mFilterFile.empty())
1140 {
1141 if (filterArgIndex.valid())
1142 {
1143 printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1144 exit(EXIT_FAILURE);
1145 }
1146
1147 std::string fileContents;
1148 if (!ReadEntireFileToString(mFilterFile.c_str(), &fileContents))
1149 {
1150 printf("Error loading filter file: %s\n", mFilterFile.c_str());
1151 exit(EXIT_FAILURE);
1152 }
1153 mFilterString.assign(fileContents.data());
1154
1155 if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1156 {
1157 printf("Filter file must start with \"--gtest_filter=\".\n");
1158 exit(EXIT_FAILURE);
1159 }
1160
1161 // Note that we only add a filter string if we previously deleted a shader filter file
1162 // argument. So we will have space for the new filter string in argv.
1163 AddArg(argc, argv, mFilterString.c_str());
1164 }
1165
1166 // Call into gtest internals to force parameterized test name registration.
1167 testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1168 impl->RegisterParameterizedTests();
1169
1170 // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1171 testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1172
1173 std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1174
1175 if (mShardCount == 0)
1176 {
1177 printf("Shard count must be > 0.\n");
1178 exit(EXIT_FAILURE);
1179 }
1180 else if (mShardCount > 0)
1181 {
1182 if (mShardIndex >= mShardCount)
1183 {
1184 printf("Shard index must be less than shard count.\n");
1185 exit(EXIT_FAILURE);
1186 }
1187
1188 // If there's only one shard, we can use the testSet as defined above.
1189 if (mShardCount > 1)
1190 {
1191 if (!mBotMode && !UsesExternalBatching())
1192 {
1193 printf("Sharding is only supported in bot mode or external batching.\n");
1194 exit(EXIT_FAILURE);
1195 }
1196 // With external batching, we must use exactly the testSet as defined externally.
1197 // But when listing tests, we do need to apply sharding ourselves,
1198 // since we use our own implementation for listing tests and not GTest directly.
1199 if (!UsesExternalBatching() || mGTestListTests || mListTests)
1200 {
1201 testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1202 alsoRunDisabledTests);
1203 }
1204 }
1205 }
1206
1207 if (!testSet.empty())
1208 {
1209 std::stringstream fakeTestName;
1210 fakeTestName << kArtifactsFakeTestName << '-' << testSet[0].testName;
1211 mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1212 }
1213
1214 if (mBotMode)
1215 {
1216 // Split up test batches.
1217 mTestQueue = BatchTests(testSet, mBatchSize);
1218
1219 if (mDebugTestGroups)
1220 {
1221 std::cout << "Test Groups:\n";
1222
1223 while (!mTestQueue.empty())
1224 {
1225 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1226 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1227 << static_cast<int>(tests.size()) << ")\n";
1228 mTestQueue.pop();
1229 }
1230
1231 exit(EXIT_SUCCESS);
1232 }
1233 }
1234
1235 testing::InitGoogleTest(argc, argv);
1236
1237 mTotalResultCount = testSet.size();
1238
1239 if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1240 {
1241 // Create a default output file in bot mode.
1242 mResultsFile = "output.json";
1243 }
1244
1245 if (!mResultsDirectory.empty())
1246 {
1247 std::stringstream resultFileName;
1248 resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1249 mResultsFile = resultFileName.str();
1250 }
1251
1252 if (!mTestArtifactDirectory.empty())
1253 {
1254 mMetricWriter.enable(mTestArtifactDirectory);
1255 }
1256
1257 if (!mBotMode)
1258 {
1259 testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1260 listeners.Append(new TestEventListener(this));
1261
1262 for (const TestIdentifier &id : testSet)
1263 {
1264 mTestResults.results[id].type = TestResultType::NoResult;
1265 }
1266 }
1267 }
1268
~TestSuite()1269 TestSuite::~TestSuite()
1270 {
1271 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1272 if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1273 {
1274 angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1275 }
1276
1277 if (mWatchdogThread.joinable())
1278 {
1279 mWatchdogThread.detach();
1280 }
1281 TerminateCrashHandler();
1282 }
1283
parseSingleArg(int * argc,char ** argv,int argIndex)1284 bool TestSuite::parseSingleArg(int *argc, char **argv, int argIndex)
1285 {
1286 // Note: Flags should be documented in README.md.
1287 return ParseIntArg("--shard-count", argc, argv, argIndex, &mShardCount) ||
1288 ParseIntArg("--shard-index", argc, argv, argIndex, &mShardIndex) ||
1289 ParseIntArg("--batch-size", argc, argv, argIndex, &mBatchSize) ||
1290 ParseIntArg("--max-processes", argc, argv, argIndex, &mMaxProcesses) ||
1291 ParseIntArg(kTestTimeoutArg, argc, argv, argIndex, &mTestTimeout) ||
1292 ParseIntArg("--batch-timeout", argc, argv, argIndex, &mBatchTimeout) ||
1293 ParseIntArg("--flaky-retries", argc, argv, argIndex, &mFlakyRetries) ||
1294 ParseIntArg("--max-failures", argc, argv, argIndex, &mMaxFailures) ||
1295 // Other test functions consume the batch ID, so keep it in the list.
1296 ParseIntArgWithHandling(kBatchId, argc, argv, argIndex, &mBatchId,
1297 ArgHandling::Preserve) ||
1298 ParseStringArg("--results-directory", argc, argv, argIndex, &mResultsDirectory) ||
1299 ParseStringArg(kResultFileArg, argc, argv, argIndex, &mResultsFile) ||
1300 ParseStringArg("--isolated-script-test-output", argc, argv, argIndex, &mResultsFile) ||
1301 ParseStringArg(kFilterFileArg, argc, argv, argIndex, &mFilterFile) ||
1302 ParseStringArg("--histogram-json-file", argc, argv, argIndex, &mHistogramJsonFile) ||
1303 // We need these overloads to work around technical debt in the Android test runner.
1304 ParseStringArg("--isolated-script-test-perf-output", argc, argv, argIndex,
1305 &mHistogramJsonFile) ||
1306 ParseStringArg("--isolated_script_test_perf_output", argc, argv, argIndex,
1307 &mHistogramJsonFile) ||
1308 ParseStringArg("--render-test-output-dir", argc, argv, argIndex,
1309 &mTestArtifactDirectory) ||
1310 ParseStringArg("--isolated-outdir", argc, argv, argIndex, &mTestArtifactDirectory) ||
1311 ParseFlag("--test-launcher-bot-mode", argc, argv, argIndex, &mBotMode) ||
1312 ParseFlag("--bot-mode", argc, argv, argIndex, &mBotMode) ||
1313 ParseFlag("--debug-test-groups", argc, argv, argIndex, &mDebugTestGroups) ||
1314 ParseFlag("--gtest_list_tests", argc, argv, argIndex, &mGTestListTests) ||
1315 ParseFlag("--list-tests", argc, argv, argIndex, &mListTests) ||
1316 ParseFlag("--print-test-stdout", argc, argv, argIndex, &mPrintTestStdout) ||
1317 ParseFlag(kDisableCrashHandler, argc, argv, argIndex, &mDisableCrashHandler);
1318 }
1319
onCrashOrTimeout(TestResultType crashOrTimeout)1320 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1321 {
1322 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1323 if (mTestResults.currentTest.valid())
1324 {
1325 TestResult &result = mTestResults.results[mTestResults.currentTest];
1326 result.type = crashOrTimeout;
1327 result.elapsedTimeSeconds.back() = mTestResults.currentTestTimer.getElapsedWallClockTime();
1328 }
1329
1330 if (mResultsFile.empty())
1331 {
1332 printf("No results file specified.\n");
1333 return;
1334 }
1335
1336 writeOutputFiles(true);
1337 }
1338
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1339 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1340 const std::vector<TestIdentifier> &testsInBatch)
1341 {
1342 // Create a temporary file to store the test list
1343 ProcessInfo processInfo;
1344
1345 Optional<std::string> filterBuffer = CreateTemporaryFile();
1346 if (!filterBuffer.valid())
1347 {
1348 std::cerr << "Error creating temporary file for test list.\n";
1349 return false;
1350 }
1351 processInfo.filterFileName.assign(filterBuffer.value());
1352
1353 std::string filterString = GetTestFilter(testsInBatch);
1354
1355 FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1356 if (!fp)
1357 {
1358 std::cerr << "Error opening temporary file for test list.\n";
1359 return false;
1360 }
1361 fprintf(fp, "%s", filterString.c_str());
1362 fclose(fp);
1363
1364 processInfo.filterString = filterString;
1365
1366 std::string filterFileArg = kFilterFileArg + std::string("=") + processInfo.filterFileName;
1367
1368 // Create a temporary file to store the test output.
1369 Optional<std::string> resultsBuffer = CreateTemporaryFile();
1370 if (!resultsBuffer.valid())
1371 {
1372 std::cerr << "Error creating temporary file for test list.\n";
1373 return false;
1374 }
1375 processInfo.resultsFileName.assign(resultsBuffer.value());
1376
1377 std::string resultsFileArg = kResultFileArg + std::string("=") + processInfo.resultsFileName;
1378
1379 // Construct command line for child process.
1380 std::vector<const char *> args;
1381
1382 args.push_back(mTestExecutableName.c_str());
1383 args.push_back(filterFileArg.c_str());
1384 args.push_back(resultsFileArg.c_str());
1385
1386 std::stringstream batchIdStream;
1387 batchIdStream << kBatchId << "=" << batchId;
1388 std::string batchIdString = batchIdStream.str();
1389 args.push_back(batchIdString.c_str());
1390
1391 for (const std::string &arg : mChildProcessArgs)
1392 {
1393 args.push_back(arg.c_str());
1394 }
1395
1396 if (mDisableCrashHandler)
1397 {
1398 args.push_back(kDisableCrashHandler);
1399 }
1400
1401 std::string timeoutStr;
1402 if (mTestTimeout != kDefaultTestTimeout)
1403 {
1404 std::stringstream timeoutStream;
1405 timeoutStream << kTestTimeoutArg << "=" << mTestTimeout;
1406 timeoutStr = timeoutStream.str();
1407 args.push_back(timeoutStr.c_str());
1408 }
1409
1410 std::string artifactsDir;
1411 if (!mTestArtifactDirectory.empty())
1412 {
1413 std::stringstream artifactsDirStream;
1414 artifactsDirStream << kIsolatedOutDir << "=" << mTestArtifactDirectory;
1415 artifactsDir = artifactsDirStream.str();
1416 args.push_back(artifactsDir.c_str());
1417 }
1418
1419 // Launch child process and wait for completion.
1420 processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1421
1422 if (!processInfo.process->started())
1423 {
1424 std::cerr << "Error launching child process.\n";
1425 return false;
1426 }
1427
1428 std::stringstream commandLineStr;
1429 for (const char *arg : args)
1430 {
1431 commandLineStr << arg << " ";
1432 }
1433
1434 processInfo.commandLine = commandLineStr.str();
1435 processInfo.testsInBatch = testsInBatch;
1436 mCurrentProcesses.emplace_back(std::move(processInfo));
1437 return true;
1438 }
1439
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1440 void ParseTestIdentifierAndSetResult(const std::string &testName,
1441 TestResultType result,
1442 TestResults *results)
1443 {
1444 // Trim off any whitespace + extra stuff at the end of the string.
1445 std::string modifiedTestName = testName.substr(0, testName.find(' '));
1446 modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
1447 TestIdentifier id;
1448 bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1449 ASSERT(ok);
1450 results->results[id] = {result};
1451 }
1452
finishProcess(ProcessInfo * processInfo)1453 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1454 {
1455 // Get test results and merge into main list.
1456 TestResults batchResults;
1457
1458 if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1459 {
1460 std::cerr << "Warning: could not find test results file from child process.\n";
1461
1462 // First assume all tests get skipped.
1463 for (const TestIdentifier &id : processInfo->testsInBatch)
1464 {
1465 batchResults.results[id] = {TestResultType::NoResult};
1466 }
1467
1468 // Attempt to reconstruct passing list from stdout snippets.
1469 const std::string &batchStdout = processInfo->process->getStdout();
1470 std::istringstream linesStream(batchStdout);
1471
1472 std::string line;
1473 while (std::getline(linesStream, line))
1474 {
1475 size_t startPos = line.find(kStartedTestString);
1476 size_t failPos = line.find(kFailedTestString);
1477 size_t passPos = line.find(kPassedTestString);
1478 size_t skippedPos = line.find(kSkippedTestString);
1479
1480 if (startPos != std::string::npos)
1481 {
1482 // Assume a test that's started crashed until we see it completed.
1483 std::string testName = line.substr(strlen(kStartedTestString));
1484 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1485 }
1486 else if (failPos != std::string::npos)
1487 {
1488 std::string testName = line.substr(strlen(kFailedTestString));
1489 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1490 }
1491 else if (passPos != std::string::npos)
1492 {
1493 std::string testName = line.substr(strlen(kPassedTestString));
1494 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1495 }
1496 else if (skippedPos != std::string::npos)
1497 {
1498 std::string testName = line.substr(strlen(kSkippedTestString));
1499 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1500 }
1501 }
1502 }
1503
1504 if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1505 {
1506 std::cerr << "Error merging batch test results.\n";
1507 return false;
1508 }
1509
1510 if (!batchResults.results.empty())
1511 {
1512 const TestIdentifier &id = batchResults.results.begin()->first;
1513 std::string config = GetConfigNameFromTestIdentifier(id);
1514 printf("Completed batch with config: %s\n", config.c_str());
1515
1516 for (const auto &resultIter : batchResults.results)
1517 {
1518 const TestResult &result = resultIter.second;
1519 if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1520 {
1521 printf("To reproduce the batch, use filter:\n%s\n",
1522 processInfo->filterString.c_str());
1523 break;
1524 }
1525 }
1526 }
1527
1528 // Process results and print unexpected errors.
1529 for (const auto &resultIter : batchResults.results)
1530 {
1531 const TestIdentifier &id = resultIter.first;
1532 const TestResult &result = resultIter.second;
1533
1534 // Skip results aren't procesed since they're added back to the test queue below.
1535 if (result.type == TestResultType::NoResult)
1536 {
1537 continue;
1538 }
1539
1540 mCurrentResultCount++;
1541
1542 printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1543 id.testName.c_str());
1544
1545 if (mPrintTestStdout)
1546 {
1547 const std::string &batchStdout = processInfo->process->getStdout();
1548 PrintTestOutputSnippet(id, result, batchStdout);
1549 }
1550 else if (result.type == TestResultType::Pass)
1551 {
1552 printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds.back() * 1000.0);
1553 }
1554 else if (result.type == TestResultType::Skip)
1555 {
1556 printf(" (skipped)\n");
1557 }
1558 else if (result.type == TestResultType::Timeout)
1559 {
1560 printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds.back());
1561 mFailureCount++;
1562 }
1563 else
1564 {
1565 printf(" (%s)\n", ResultTypeToString(result.type));
1566 mFailureCount++;
1567
1568 const std::string &batchStdout = processInfo->process->getStdout();
1569 PrintTestOutputSnippet(id, result, batchStdout);
1570 }
1571 }
1572
1573 // On unexpected exit, re-queue any unfinished tests.
1574 std::vector<TestIdentifier> unfinishedTests;
1575 for (const auto &resultIter : batchResults.results)
1576 {
1577 const TestIdentifier &id = resultIter.first;
1578 const TestResult &result = resultIter.second;
1579
1580 if (result.type == TestResultType::NoResult)
1581 {
1582 unfinishedTests.push_back(id);
1583 }
1584 }
1585
1586 if (!unfinishedTests.empty())
1587 {
1588 mTestQueue.emplace(std::move(unfinishedTests));
1589 }
1590
1591 // Clean up any dirty temporary files.
1592 for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1593 {
1594 // Note: we should be aware that this cleanup won't happen if the harness itself
1595 // crashes. If this situation comes up in the future we should add crash cleanup to the
1596 // harness.
1597 if (!angle::DeleteSystemFile(tempFile.c_str()))
1598 {
1599 std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1600 }
1601 }
1602
1603 processInfo->process.reset();
1604 return true;
1605 }
1606
run()1607 int TestSuite::run()
1608 {
1609 #if defined(ANGLE_PLATFORM_ANDROID)
1610 if (mListTests && mGTestListTests)
1611 {
1612 // Workaround for the Android test runner requiring a GTest test list.
1613 printf("PlaceholderTest.\n Placeholder\n");
1614 return EXIT_SUCCESS;
1615 }
1616 #endif // defined(ANGLE_PLATFORM_ANDROID)
1617
1618 if (mListTests)
1619 {
1620 ListTests(mTestResults.results);
1621
1622 #if defined(ANGLE_PLATFORM_ANDROID)
1623 // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1624 // tricks to get the test list output. We add placeholder output for a single test to trick
1625 // the test runner into thinking it ran the tests successfully. We also add an end marker
1626 // for the tests list so we can parse the list from the more spammy Android stdout log.
1627 static constexpr char kPlaceholderTestTest[] = R"(
1628 [==========] Running 1 test from 1 test suite.
1629 [----------] Global test environment set-up.
1630 [----------] 1 test from PlaceholderTest
1631 [ RUN ] PlaceholderTest.Placeholder
1632 [ OK ] PlaceholderTest.Placeholder (0 ms)
1633 [----------] 1 test from APITest (0 ms total)
1634
1635 [----------] Global test environment tear-down
1636 [==========] 1 test from 1 test suite ran. (24 ms total)
1637 [ PASSED ] 1 test.
1638 )";
1639 printf(kPlaceholderTestTest);
1640 #endif // defined(ANGLE_PLATFORM_ANDROID)
1641
1642 return EXIT_SUCCESS;
1643 }
1644
1645 if (mGTestListTests)
1646 {
1647 GTestListTests(mTestResults.results);
1648 return EXIT_SUCCESS;
1649 }
1650
1651 // Run tests serially.
1652 if (!mBotMode)
1653 {
1654 // Only start the watchdog if the debugger is not attached and we're a child process.
1655 if (!angle::IsDebuggerAttached() && mBatchId != -1)
1656 {
1657 startWatchdog();
1658 }
1659
1660 int retVal = RUN_ALL_TESTS();
1661 {
1662 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1663 mTestResults.allDone = true;
1664 }
1665
1666 if (mWatchdogThread.joinable())
1667 {
1668 mWatchdogThread.join();
1669 }
1670 return retVal;
1671 }
1672
1673 Timer totalRunTime;
1674 totalRunTime.start();
1675
1676 Timer messageTimer;
1677 messageTimer.start();
1678
1679 uint32_t batchId = 0;
1680
1681 while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1682 {
1683 bool progress = false;
1684
1685 // Spawn a process if needed and possible.
1686 if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1687 {
1688 std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1689 mTestQueue.pop();
1690
1691 if (!launchChildTestProcess(++batchId, testsInBatch))
1692 {
1693 return 1;
1694 }
1695
1696 progress = true;
1697 }
1698
1699 // Check for process completion.
1700 uint32_t totalTestCount = 0;
1701 for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1702 {
1703 ProcessInfo &processInfo = *processIter;
1704 if (processInfo.process->finished())
1705 {
1706 if (!finishProcess(&processInfo))
1707 {
1708 return 1;
1709 }
1710 processIter = mCurrentProcesses.erase(processIter);
1711 progress = true;
1712 }
1713 else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1714 {
1715 // Terminate the process and record timeouts for the batch.
1716 // Because we can't determine which sub-test caused a timeout, record the whole
1717 // batch as a timeout failure. Can be improved by using socket message passing.
1718 if (!processInfo.process->kill())
1719 {
1720 return 1;
1721 }
1722
1723 const std::string &batchStdout = processInfo.process->getStdout();
1724 std::vector<std::string> lines =
1725 SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1726 SplitResult::SPLIT_WANT_NONEMPTY);
1727 constexpr size_t kKeepLines = 10;
1728 printf("\nBatch timeout! Last %zu lines of batch stdout:\n", kKeepLines);
1729 printf("---------------------------------------------\n");
1730 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1731 lineNo < lines.size(); ++lineNo)
1732 {
1733 printf("%s\n", lines[lineNo].c_str());
1734 }
1735 printf("---------------------------------------------\n\n");
1736
1737 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1738 {
1739 // Because the whole batch failed we can't know how long each test took.
1740 mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1741 mFailureCount++;
1742 }
1743
1744 processIter = mCurrentProcesses.erase(processIter);
1745 progress = true;
1746 }
1747 else
1748 {
1749 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1750 processIter++;
1751 }
1752 }
1753
1754 if (progress)
1755 {
1756 messageTimer.start();
1757 }
1758 else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1759 {
1760 const ProcessInfo &processInfo = mCurrentProcesses[0];
1761 double processTime = processInfo.process->getElapsedTimeSeconds();
1762 printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1763 static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1764 messageTimer.start();
1765 }
1766
1767 // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1768 if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1769 {
1770 printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1771 TestQueue emptyTestQueue;
1772 std::swap(mTestQueue, emptyTestQueue);
1773 }
1774
1775 // Sleep briefly and continue.
1776 angle::Sleep(100);
1777 }
1778
1779 // Dump combined results.
1780 if (mFailureCount > mMaxFailures)
1781 {
1782 printf(
1783 "Omitted results files because the failure count (%d) exceeded the maximum number of "
1784 "failures (%d).\n",
1785 mFailureCount, mMaxFailures);
1786 }
1787 else
1788 {
1789 writeOutputFiles(false);
1790 }
1791
1792 totalRunTime.stop();
1793 printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1794
1795 return printFailuresAndReturnCount() == 0 ? 0 : 1;
1796 }
1797
printFailuresAndReturnCount() const1798 int TestSuite::printFailuresAndReturnCount() const
1799 {
1800 std::vector<std::string> failures;
1801 uint32_t skipCount = 0;
1802
1803 for (const auto &resultIter : mTestResults.results)
1804 {
1805 const TestIdentifier &id = resultIter.first;
1806 const TestResult &result = resultIter.second;
1807
1808 if (result.type == TestResultType::Skip)
1809 {
1810 skipCount++;
1811 }
1812 else if (result.type != TestResultType::Pass)
1813 {
1814 const FileLine &fileLine = mTestFileLines.find(id)->second;
1815
1816 std::stringstream failureMessage;
1817 failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1818 << ResultTypeToString(result.type) << ")";
1819 failures.emplace_back(failureMessage.str());
1820 }
1821 }
1822
1823 if (failures.empty())
1824 return 0;
1825
1826 printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1827 for (const std::string &failure : failures)
1828 {
1829 printf(" %s\n", failure.c_str());
1830 }
1831 if (skipCount > 0)
1832 {
1833 printf("%u tests skipped.\n", skipCount);
1834 }
1835
1836 return static_cast<int>(failures.size());
1837 }
1838
startWatchdog()1839 void TestSuite::startWatchdog()
1840 {
1841 auto watchdogMain = [this]() {
1842 do
1843 {
1844 {
1845 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1846 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1847 mTestResults.currentTestTimeout)
1848 {
1849 break;
1850 }
1851
1852 if (mTestResults.allDone)
1853 return;
1854 }
1855
1856 angle::Sleep(500);
1857 } while (true);
1858 onCrashOrTimeout(TestResultType::Timeout);
1859 ::_Exit(EXIT_FAILURE);
1860 };
1861 mWatchdogThread = std::thread(watchdogMain);
1862 }
1863
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1864 void TestSuite::addHistogramSample(const std::string &measurement,
1865 const std::string &story,
1866 double value,
1867 const std::string &units)
1868 {
1869 mHistogramWriter.addSample(measurement, story, value, units);
1870 }
1871
hasTestArtifactsDirectory() const1872 bool TestSuite::hasTestArtifactsDirectory() const
1873 {
1874 return !mTestArtifactDirectory.empty();
1875 }
1876
reserveTestArtifactPath(const std::string & artifactName)1877 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1878 {
1879 mTestResults.testArtifactPaths.push_back(artifactName);
1880
1881 if (mTestArtifactDirectory.empty())
1882 {
1883 return artifactName;
1884 }
1885
1886 std::stringstream pathStream;
1887 pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1888 return pathStream.str();
1889 }
1890
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1891 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1892 {
1893 std::ifstream ifs(fileName);
1894 if (!ifs.is_open())
1895 {
1896 std::cerr << "Error opening " << fileName << "\n";
1897 return false;
1898 }
1899
1900 js::IStreamWrapper ifsWrapper(ifs);
1901 js::Document document;
1902 document.ParseStream(ifsWrapper);
1903
1904 if (document.HasParseError())
1905 {
1906 std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1907 return false;
1908 }
1909
1910 if (!GetTestResultsFromJSON(document, resultsOut))
1911 {
1912 std::cerr << "Error getting test results from JSON.\n";
1913 return false;
1914 }
1915
1916 return true;
1917 }
1918
dumpTestExpectationsErrorMessages()1919 void TestSuite::dumpTestExpectationsErrorMessages()
1920 {
1921 std::stringstream errorMsgStream;
1922 for (const auto &message : mTestExpectationsParser.getErrorMessages())
1923 {
1924 errorMsgStream << std::endl << " " << message;
1925 }
1926
1927 std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1928 }
1929
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)1930 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1931 const std::string &fileName)
1932 {
1933 if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1934 {
1935 dumpTestExpectationsErrorMessages();
1936 return false;
1937 }
1938 return true;
1939 }
1940
loadAllTestExpectationsFromFile(const std::string & fileName)1941 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
1942 {
1943 if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
1944 {
1945 dumpTestExpectationsErrorMessages();
1946 return false;
1947 }
1948 return true;
1949 }
1950
logAnyUnusedTestExpectations()1951 bool TestSuite::logAnyUnusedTestExpectations()
1952 {
1953 std::stringstream unusedMsgStream;
1954 bool anyUnused = false;
1955 for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
1956 {
1957 anyUnused = true;
1958 unusedMsgStream << std::endl << " " << message;
1959 }
1960 if (anyUnused)
1961 {
1962 std::cerr << "Found unused test expectations:" << unusedMsgStream.str() << std::endl;
1963 return true;
1964 }
1965 return false;
1966 }
1967
getTestExpectation(const std::string & testName)1968 int32_t TestSuite::getTestExpectation(const std::string &testName)
1969 {
1970 return mTestExpectationsParser.getTestExpectation(testName);
1971 }
1972
maybeUpdateTestTimeout(uint32_t testExpectation)1973 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
1974 {
1975 double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
1976 ? getSlowTestTimeout()
1977 : mTestTimeout;
1978 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1979 mTestResults.currentTestTimeout = testTimeout;
1980 }
1981
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)1982 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
1983 const std::string &testName)
1984 {
1985 uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
1986 maybeUpdateTestTimeout(expectation);
1987 return expectation;
1988 }
1989
getSlowTestTimeout() const1990 int TestSuite::getSlowTestTimeout() const
1991 {
1992 return mTestTimeout * kSlowTestTimeoutScale;
1993 }
1994
writeOutputFiles(bool interrupted)1995 void TestSuite::writeOutputFiles(bool interrupted)
1996 {
1997 if (!mResultsFile.empty())
1998 {
1999 WriteResultsFile(interrupted, mTestResults, mResultsFile);
2000 }
2001
2002 if (!mHistogramJsonFile.empty())
2003 {
2004 WriteHistogramJson(mHistogramWriter, mHistogramJsonFile);
2005 }
2006
2007 mMetricWriter.close();
2008 }
2009
TestResultTypeToString(TestResultType type)2010 const char *TestResultTypeToString(TestResultType type)
2011 {
2012 switch (type)
2013 {
2014 case TestResultType::Crash:
2015 return "Crash";
2016 case TestResultType::Fail:
2017 return "Fail";
2018 case TestResultType::NoResult:
2019 return "NoResult";
2020 case TestResultType::Pass:
2021 return "Pass";
2022 case TestResultType::Skip:
2023 return "Skip";
2024 case TestResultType::Timeout:
2025 return "Timeout";
2026 case TestResultType::Unknown:
2027 default:
2028 return "Unknown";
2029 }
2030 }
2031
2032 // This code supports using "-" in test names, which happens often in dEQP. GTest uses as a marker
2033 // for the beginning of the exclusion filter. Work around this by replacing "-" with "?" which
2034 // matches any single character.
ReplaceDashesWithQuestionMark(std::string dashesString)2035 std::string ReplaceDashesWithQuestionMark(std::string dashesString)
2036 {
2037 std::string noDashesString = dashesString;
2038 ReplaceAllSubstrings(&noDashesString, "-", "?");
2039 return noDashesString;
2040 }
2041 } // namespace angle
2042