1 //
2 // Copyright 2019 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // TestSuite:
7 // Basic implementation of a test harness in ANGLE.
8
9 #include "TestSuite.h"
10
11 #include "common/debug.h"
12 #include "common/hash_containers.h"
13 #include "common/platform.h"
14 #include "common/string_utils.h"
15 #include "common/system_utils.h"
16 #include "util/Timer.h"
17
18 #include <stdlib.h>
19 #include <time.h>
20
21 #include <fstream>
22 #include <unordered_map>
23
24 #include <gtest/gtest.h>
25 #include <rapidjson/document.h>
26 #include <rapidjson/filewritestream.h>
27 #include <rapidjson/istreamwrapper.h>
28 #include <rapidjson/prettywriter.h>
29
30 // We directly call into a function to register the parameterized tests. This saves spinning up
31 // a subprocess with a new gtest filter.
32 #include <gtest/../../src/gtest-internal-inl.h>
33
34 namespace js = rapidjson;
35
36 namespace angle
37 {
38 namespace
39 {
40 constexpr char kBatchId[] = "--batch-id";
41 constexpr char kFilterFileArg[] = "--filter-file";
42 constexpr char kResultFileArg[] = "--results-file";
43 constexpr char kTestTimeoutArg[] = "--test-timeout";
44 constexpr char kDisableCrashHandler[] = "--disable-crash-handler";
45 constexpr char kIsolatedOutDir[] = "--isolated-outdir";
46
47 constexpr char kStartedTestString[] = "[ RUN ] ";
48 constexpr char kPassedTestString[] = "[ OK ] ";
49 constexpr char kFailedTestString[] = "[ FAILED ] ";
50 constexpr char kSkippedTestString[] = "[ SKIPPED ] ";
51
52 constexpr char kArtifactsFakeTestName[] = "TestArtifactsFakeTest";
53
54 constexpr char kTSanOptionsEnvVar[] = "TSAN_OPTIONS";
55 constexpr char kUBSanOptionsEnvVar[] = "UBSAN_OPTIONS";
56
57 [[maybe_unused]] constexpr char kVkLoaderDisableDLLUnloadingEnvVar[] =
58 "VK_LOADER_DISABLE_DYNAMIC_LIBRARY_UNLOADING";
59
60 // Note: we use a fairly high test timeout to allow for the first test in a batch to be slow.
61 // Ideally we could use a separate timeout for the slow first test.
62 // Allow sanitized tests to run more slowly.
63 #if defined(NDEBUG) && !defined(ANGLE_WITH_SANITIZER)
64 constexpr int kDefaultTestTimeout = 60;
65 constexpr int kDefaultBatchTimeout = 300;
66 #else
67 constexpr int kDefaultTestTimeout = 120;
68 constexpr int kDefaultBatchTimeout = 700;
69 #endif
70 constexpr int kSlowTestTimeoutScale = 3;
71 constexpr int kDefaultBatchSize = 256;
72 constexpr double kIdleMessageTimeout = 15.0;
73 constexpr int kDefaultMaxProcesses = 16;
74 constexpr int kDefaultMaxFailures = 100;
75
ResultTypeToString(TestResultType type)76 const char *ResultTypeToString(TestResultType type)
77 {
78 switch (type)
79 {
80 case TestResultType::Crash:
81 return "CRASH";
82 case TestResultType::Fail:
83 return "FAIL";
84 case TestResultType::NoResult:
85 return "NOTRUN";
86 case TestResultType::Pass:
87 return "PASS";
88 case TestResultType::Skip:
89 return "SKIP";
90 case TestResultType::Timeout:
91 return "TIMEOUT";
92 case TestResultType::Unknown:
93 default:
94 return "UNKNOWN";
95 }
96 }
97
GetResultTypeFromString(const std::string & str)98 TestResultType GetResultTypeFromString(const std::string &str)
99 {
100 if (str == "CRASH")
101 return TestResultType::Crash;
102 if (str == "FAIL")
103 return TestResultType::Fail;
104 if (str == "PASS")
105 return TestResultType::Pass;
106 if (str == "NOTRUN")
107 return TestResultType::NoResult;
108 if (str == "SKIP")
109 return TestResultType::Skip;
110 if (str == "TIMEOUT")
111 return TestResultType::Timeout;
112 return TestResultType::Unknown;
113 }
114
IsFailedResult(TestResultType resultType)115 bool IsFailedResult(TestResultType resultType)
116 {
117 return resultType != TestResultType::Pass && resultType != TestResultType::Skip;
118 }
119
ResultTypeToJSString(TestResultType type,js::Document::AllocatorType * allocator)120 js::Value ResultTypeToJSString(TestResultType type, js::Document::AllocatorType *allocator)
121 {
122 js::Value jsName;
123 jsName.SetString(ResultTypeToString(type), *allocator);
124 return jsName;
125 }
126
WriteJsonFile(const std::string & outputFile,js::Document * doc)127 bool WriteJsonFile(const std::string &outputFile, js::Document *doc)
128 {
129 FILE *fp = fopen(outputFile.c_str(), "w");
130 if (!fp)
131 {
132 return false;
133 }
134
135 constexpr size_t kBufferSize = 0xFFFF;
136 std::vector<char> writeBuffer(kBufferSize);
137 js::FileWriteStream os(fp, writeBuffer.data(), kBufferSize);
138 js::PrettyWriter<js::FileWriteStream> writer(os);
139 if (!doc->Accept(writer))
140 {
141 fclose(fp);
142 return false;
143 }
144 fclose(fp);
145 return true;
146 }
147
148 // Writes out a TestResults to the Chromium JSON Test Results format.
149 // https://chromium.googlesource.com/chromium/src.git/+/main/docs/testing/json_test_results_format.md
WriteResultsFile(bool interrupted,const TestResults & testResults,const std::string & outputFile)150 void WriteResultsFile(bool interrupted,
151 const TestResults &testResults,
152 const std::string &outputFile)
153 {
154 time_t ltime;
155 time(<ime);
156 struct tm *timeinfo = gmtime(<ime);
157 ltime = mktime(timeinfo);
158
159 uint64_t secondsSinceEpoch = static_cast<uint64_t>(ltime);
160
161 js::Document doc;
162 doc.SetObject();
163
164 js::Document::AllocatorType &allocator = doc.GetAllocator();
165
166 doc.AddMember("interrupted", interrupted, allocator);
167 doc.AddMember("path_delimiter", ".", allocator);
168 doc.AddMember("version", 3, allocator);
169 doc.AddMember("seconds_since_epoch", secondsSinceEpoch, allocator);
170
171 js::Value tests;
172 tests.SetObject();
173
174 // If we have any test artifacts, make a fake test to house them.
175 if (!testResults.testArtifactPaths.empty())
176 {
177 js::Value artifactsTest;
178 artifactsTest.SetObject();
179
180 artifactsTest.AddMember("actual", "PASS", allocator);
181 artifactsTest.AddMember("expected", "PASS", allocator);
182
183 js::Value artifacts;
184 artifacts.SetObject();
185
186 for (const std::string &testArtifactPath : testResults.testArtifactPaths)
187 {
188 std::vector<std::string> pieces =
189 SplitString(testArtifactPath, "/\\", WhitespaceHandling::TRIM_WHITESPACE,
190 SplitResult::SPLIT_WANT_NONEMPTY);
191 ASSERT(!pieces.empty());
192
193 js::Value basename;
194 basename.SetString(pieces.back(), allocator);
195
196 js::Value artifactPath;
197 artifactPath.SetString(testArtifactPath, allocator);
198
199 js::Value artifactArray;
200 artifactArray.SetArray();
201 artifactArray.PushBack(artifactPath, allocator);
202
203 artifacts.AddMember(basename, artifactArray, allocator);
204 }
205
206 artifactsTest.AddMember("artifacts", artifacts, allocator);
207
208 js::Value fakeTestName;
209 fakeTestName.SetString(testResults.testArtifactsFakeTestName, allocator);
210 tests.AddMember(fakeTestName, artifactsTest, allocator);
211 }
212
213 std::map<TestResultType, uint32_t> counts;
214
215 for (const auto &resultIter : testResults.results)
216 {
217 const TestIdentifier &id = resultIter.first;
218 const TestResult &result = resultIter.second;
219
220 js::Value jsResult;
221 jsResult.SetObject();
222
223 counts[result.type]++;
224
225 std::string actualResult;
226 for (uint32_t fail = 0; fail < result.flakyFailures; ++fail)
227 {
228 actualResult += "FAIL ";
229 }
230
231 actualResult += ResultTypeToString(result.type);
232
233 std::string expectedResult = "PASS";
234 if (result.type == TestResultType::Skip)
235 {
236 expectedResult = "SKIP";
237 }
238
239 // Handle flaky passing tests.
240 if (result.flakyFailures > 0 && result.type == TestResultType::Pass)
241 {
242 expectedResult = "FAIL PASS";
243 jsResult.AddMember("is_flaky", true, allocator);
244 }
245
246 jsResult.AddMember("actual", actualResult, allocator);
247 jsResult.AddMember("expected", expectedResult, allocator);
248
249 if (IsFailedResult(result.type))
250 {
251 jsResult.AddMember("is_unexpected", true, allocator);
252 }
253
254 js::Value times;
255 times.SetArray();
256 for (double elapsedTimeSeconds : result.elapsedTimeSeconds)
257 {
258 times.PushBack(elapsedTimeSeconds, allocator);
259 }
260
261 jsResult.AddMember("times", times, allocator);
262
263 char testName[500];
264 id.snprintfName(testName, sizeof(testName));
265 js::Value jsName;
266 jsName.SetString(testName, allocator);
267
268 tests.AddMember(jsName, jsResult, allocator);
269 }
270
271 js::Value numFailuresByType;
272 numFailuresByType.SetObject();
273
274 for (const auto &countIter : counts)
275 {
276 TestResultType type = countIter.first;
277 uint32_t count = countIter.second;
278
279 js::Value jsCount(count);
280 numFailuresByType.AddMember(ResultTypeToJSString(type, &allocator), jsCount, allocator);
281 }
282
283 doc.AddMember("num_failures_by_type", numFailuresByType, allocator);
284
285 doc.AddMember("tests", tests, allocator);
286
287 printf("Writing test results to %s\n", outputFile.c_str());
288
289 if (!WriteJsonFile(outputFile, &doc))
290 {
291 printf("Error writing test results file.\n");
292 }
293 }
294
WriteHistogramJson(const HistogramWriter & histogramWriter,const std::string & outputFile)295 void WriteHistogramJson(const HistogramWriter &histogramWriter, const std::string &outputFile)
296 {
297 js::Document doc;
298 doc.SetArray();
299
300 histogramWriter.getAsJSON(&doc);
301
302 printf("Writing histogram json to %s\n", outputFile.c_str());
303
304 if (!WriteJsonFile(outputFile, &doc))
305 {
306 printf("Error writing histogram json file.\n");
307 }
308 }
309
UpdateCurrentTestResult(const testing::TestResult & resultIn,TestResults * resultsOut)310 void UpdateCurrentTestResult(const testing::TestResult &resultIn, TestResults *resultsOut)
311 {
312 TestResult &resultOut = resultsOut->results[resultsOut->currentTest];
313
314 // Note: Crashes and Timeouts are detected by the crash handler and a watchdog thread.
315 if (resultIn.Skipped())
316 {
317 resultOut.type = TestResultType::Skip;
318 }
319 else if (resultIn.Failed())
320 {
321 resultOut.type = TestResultType::Fail;
322 }
323 else
324 {
325 // With --gtest_repeat the same test is seen multiple times, so resultOut.type may have been
326 // previously set to e.g. ::Fail. Only set to ::Pass if there was no other result yet.
327 if (resultOut.type == TestResultType::NoResult)
328 {
329 resultOut.type = TestResultType::Pass;
330 }
331 }
332
333 resultOut.elapsedTimeSeconds.back() = resultsOut->currentTestTimer.getElapsedWallClockTime();
334 }
335
GetTestIdentifier(const testing::TestInfo & testInfo)336 TestIdentifier GetTestIdentifier(const testing::TestInfo &testInfo)
337 {
338 return {testInfo.test_suite_name(), testInfo.name()};
339 }
340
IsTestDisabled(const testing::TestInfo & testInfo)341 bool IsTestDisabled(const testing::TestInfo &testInfo)
342 {
343 return ::strstr(testInfo.name(), "DISABLED_") == testInfo.name();
344 }
345
346 using TestIdentifierFilter = std::function<bool(const TestIdentifier &id)>;
347
FilterTests(std::map<TestIdentifier,FileLine> * fileLinesOut,TestIdentifierFilter filter,bool alsoRunDisabledTests)348 std::vector<TestIdentifier> FilterTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
349 TestIdentifierFilter filter,
350 bool alsoRunDisabledTests)
351 {
352 std::vector<TestIdentifier> tests;
353
354 const testing::UnitTest &testProgramInfo = *testing::UnitTest::GetInstance();
355 for (int suiteIndex = 0; suiteIndex < testProgramInfo.total_test_suite_count(); ++suiteIndex)
356 {
357 const testing::TestSuite &testSuite = *testProgramInfo.GetTestSuite(suiteIndex);
358 for (int testIndex = 0; testIndex < testSuite.total_test_count(); ++testIndex)
359 {
360 const testing::TestInfo &testInfo = *testSuite.GetTestInfo(testIndex);
361 TestIdentifier id = GetTestIdentifier(testInfo);
362 if (filter(id) && (!IsTestDisabled(testInfo) || alsoRunDisabledTests))
363 {
364 tests.emplace_back(id);
365
366 if (fileLinesOut)
367 {
368 (*fileLinesOut)[id] = {testInfo.file(), testInfo.line()};
369 }
370 }
371 }
372 }
373
374 return tests;
375 }
376
GetFilteredTests(std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)377 std::vector<TestIdentifier> GetFilteredTests(std::map<TestIdentifier, FileLine> *fileLinesOut,
378 bool alsoRunDisabledTests)
379 {
380 TestIdentifierFilter gtestIDFilter = [](const TestIdentifier &id) {
381 return testing::internal::UnitTestOptions::FilterMatchesTest(id.testSuiteName, id.testName);
382 };
383
384 return FilterTests(fileLinesOut, gtestIDFilter, alsoRunDisabledTests);
385 }
386
GetShardTests(const std::vector<TestIdentifier> & allTests,int shardIndex,int shardCount,std::map<TestIdentifier,FileLine> * fileLinesOut,bool alsoRunDisabledTests)387 std::vector<TestIdentifier> GetShardTests(const std::vector<TestIdentifier> &allTests,
388 int shardIndex,
389 int shardCount,
390 std::map<TestIdentifier, FileLine> *fileLinesOut,
391 bool alsoRunDisabledTests)
392 {
393 std::vector<TestIdentifier> shardTests;
394
395 for (int testIndex = shardIndex; testIndex < static_cast<int>(allTests.size());
396 testIndex += shardCount)
397 {
398 shardTests.emplace_back(allTests[testIndex]);
399 }
400
401 return shardTests;
402 }
403
GetTestFilter(const std::vector<TestIdentifier> & tests)404 std::string GetTestFilter(const std::vector<TestIdentifier> &tests)
405 {
406 std::stringstream filterStream;
407
408 filterStream << "--gtest_filter=";
409
410 for (size_t testIndex = 0; testIndex < tests.size(); ++testIndex)
411 {
412 if (testIndex != 0)
413 {
414 filterStream << ":";
415 }
416
417 filterStream << ReplaceDashesWithQuestionMark(tests[testIndex].testSuiteName) << "."
418 << ReplaceDashesWithQuestionMark(tests[testIndex].testName);
419 }
420
421 return filterStream.str();
422 }
423
GetTestArtifactsFromJSON(const js::Value::ConstObject & obj,std::vector<std::string> * testArtifactPathsOut)424 bool GetTestArtifactsFromJSON(const js::Value::ConstObject &obj,
425 std::vector<std::string> *testArtifactPathsOut)
426 {
427 if (!obj.HasMember("artifacts"))
428 {
429 printf("No artifacts member.\n");
430 return false;
431 }
432
433 const js::Value &jsArtifacts = obj["artifacts"];
434 if (!jsArtifacts.IsObject())
435 {
436 printf("Artifacts are not an object.\n");
437 return false;
438 }
439
440 const js::Value::ConstObject &artifacts = jsArtifacts.GetObj();
441 for (const auto &artifactMember : artifacts)
442 {
443 const js::Value &artifact = artifactMember.value;
444 if (!artifact.IsArray())
445 {
446 printf("Artifact is not an array of strings of size 1.\n");
447 return false;
448 }
449
450 const js::Value::ConstArray &artifactArray = artifact.GetArray();
451 if (artifactArray.Size() != 1)
452 {
453 printf("Artifact is not an array of strings of size 1.\n");
454 return false;
455 }
456
457 const js::Value &artifactName = artifactArray[0];
458 if (!artifactName.IsString())
459 {
460 printf("Artifact is not an array of strings of size 1.\n");
461 return false;
462 }
463
464 testArtifactPathsOut->push_back(artifactName.GetString());
465 }
466
467 return true;
468 }
469
GetSingleTestResultFromJSON(const js::Value & name,const js::Value::ConstObject & obj,TestResults * resultsOut)470 bool GetSingleTestResultFromJSON(const js::Value &name,
471 const js::Value::ConstObject &obj,
472 TestResults *resultsOut)
473 {
474
475 TestIdentifier id;
476 if (!TestIdentifier::ParseFromString(name.GetString(), &id))
477 {
478 printf("Could not parse test identifier.\n");
479 return false;
480 }
481
482 if (!obj.HasMember("expected") || !obj.HasMember("actual"))
483 {
484 printf("No expected or actual member.\n");
485 return false;
486 }
487
488 const js::Value &expected = obj["expected"];
489 const js::Value &actual = obj["actual"];
490
491 if (!expected.IsString() || !actual.IsString())
492 {
493 printf("Expected or actual member is not a string.\n");
494 return false;
495 }
496
497 const std::string actualStr = actual.GetString();
498
499 TestResultType resultType = TestResultType::Unknown;
500 int flakyFailures = 0;
501 if (actualStr.find(' '))
502 {
503 std::istringstream strstr(actualStr);
504 std::string token;
505 while (std::getline(strstr, token, ' '))
506 {
507 resultType = GetResultTypeFromString(token);
508 if (resultType == TestResultType::Unknown)
509 {
510 printf("Failed to parse result type.\n");
511 return false;
512 }
513 if (IsFailedResult(resultType))
514 {
515 flakyFailures++;
516 }
517 }
518 }
519 else
520 {
521 resultType = GetResultTypeFromString(actualStr);
522 if (resultType == TestResultType::Unknown)
523 {
524 printf("Failed to parse result type.\n");
525 return false;
526 }
527 }
528
529 std::vector<double> elapsedTimeSeconds;
530 if (obj.HasMember("times"))
531 {
532 const js::Value × = obj["times"];
533 if (!times.IsArray())
534 {
535 return false;
536 }
537
538 const js::Value::ConstArray ×Array = times.GetArray();
539 if (timesArray.Size() < 1)
540 {
541 return false;
542 }
543 for (const js::Value &time : timesArray)
544 {
545 if (!time.IsDouble())
546 {
547 return false;
548 }
549
550 elapsedTimeSeconds.push_back(time.GetDouble());
551 }
552 }
553
554 TestResult &result = resultsOut->results[id];
555 result.elapsedTimeSeconds = elapsedTimeSeconds;
556 result.type = resultType;
557 result.flakyFailures = flakyFailures;
558 return true;
559 }
560
GetTestResultsFromJSON(const js::Document & document,TestResults * resultsOut)561 bool GetTestResultsFromJSON(const js::Document &document, TestResults *resultsOut)
562 {
563 if (!document.HasMember("tests") || !document["tests"].IsObject())
564 {
565 printf("JSON document has no tests member.\n");
566 return false;
567 }
568
569 const js::Value::ConstObject &tests = document["tests"].GetObj();
570 for (const auto &testMember : tests)
571 {
572 // Get test identifier.
573 const js::Value &name = testMember.name;
574 if (!name.IsString())
575 {
576 printf("Name is not a string.\n");
577 return false;
578 }
579
580 // Get test result.
581 const js::Value &value = testMember.value;
582 if (!value.IsObject())
583 {
584 printf("Test result is not an object.\n");
585 return false;
586 }
587
588 const js::Value::ConstObject &obj = value.GetObj();
589
590 if (BeginsWith(name.GetString(), kArtifactsFakeTestName))
591 {
592 if (!GetTestArtifactsFromJSON(obj, &resultsOut->testArtifactPaths))
593 {
594 return false;
595 }
596 }
597 else
598 {
599 if (!GetSingleTestResultFromJSON(name, obj, resultsOut))
600 {
601 return false;
602 }
603 }
604 }
605
606 return true;
607 }
608
MergeTestResults(TestResults * input,TestResults * output,int flakyRetries)609 bool MergeTestResults(TestResults *input, TestResults *output, int flakyRetries)
610 {
611 for (auto &resultsIter : input->results)
612 {
613 const TestIdentifier &id = resultsIter.first;
614 TestResult &inputResult = resultsIter.second;
615 TestResult &outputResult = output->results[id];
616
617 if (inputResult.type != TestResultType::NoResult)
618 {
619 if (outputResult.type != TestResultType::NoResult)
620 {
621 printf("Warning: duplicate entry for %s.%s.\n", id.testSuiteName.c_str(),
622 id.testName.c_str());
623 return false;
624 }
625
626 // Mark the tests that haven't exhausted their retries as 'SKIP'. This makes ANGLE
627 // attempt the test again.
628 uint32_t runCount = outputResult.flakyFailures + 1;
629 if (IsFailedResult(inputResult.type) && runCount < static_cast<uint32_t>(flakyRetries))
630 {
631 printf("Retrying flaky test: %s.%s.\n", id.testSuiteName.c_str(),
632 id.testName.c_str());
633 inputResult.type = TestResultType::NoResult;
634 outputResult.flakyFailures++;
635 }
636 else
637 {
638 outputResult.type = inputResult.type;
639 }
640 if (runCount == 1)
641 {
642 outputResult.elapsedTimeSeconds = inputResult.elapsedTimeSeconds;
643 }
644 else
645 {
646 outputResult.elapsedTimeSeconds.insert(outputResult.elapsedTimeSeconds.end(),
647 inputResult.elapsedTimeSeconds.begin(),
648 inputResult.elapsedTimeSeconds.end());
649 }
650 }
651 }
652
653 output->testArtifactPaths.insert(output->testArtifactPaths.end(),
654 input->testArtifactPaths.begin(),
655 input->testArtifactPaths.end());
656
657 return true;
658 }
659
PrintTestOutputSnippet(const TestIdentifier & id,const TestResult & result,const std::string & fullOutput)660 void PrintTestOutputSnippet(const TestIdentifier &id,
661 const TestResult &result,
662 const std::string &fullOutput)
663 {
664 std::stringstream nameStream;
665 nameStream << id;
666 std::string fullName = nameStream.str();
667
668 size_t runPos = fullOutput.find(std::string(kStartedTestString) + fullName);
669 if (runPos == std::string::npos)
670 {
671 printf("Cannot locate test output snippet.\n");
672 return;
673 }
674
675 size_t endPos = fullOutput.find(std::string(kFailedTestString) + fullName, runPos);
676 // Only clip the snippet to the "OK" message if the test really
677 // succeeded. It still might have e.g. crashed after printing it.
678 if (endPos == std::string::npos && result.type == TestResultType::Pass)
679 {
680 endPos = fullOutput.find(std::string(kPassedTestString) + fullName, runPos);
681 }
682 if (endPos != std::string::npos)
683 {
684 size_t newline_pos = fullOutput.find("\n", endPos);
685 if (newline_pos != std::string::npos)
686 endPos = newline_pos + 1;
687 }
688
689 std::cout << "\n";
690 if (endPos != std::string::npos)
691 {
692 std::cout << fullOutput.substr(runPos, endPos - runPos);
693 }
694 else
695 {
696 std::cout << fullOutput.substr(runPos);
697 }
698 }
699
GetConfigNameFromTestIdentifier(const TestIdentifier & id)700 std::string GetConfigNameFromTestIdentifier(const TestIdentifier &id)
701 {
702 size_t slashPos = id.testName.find('/');
703 if (slashPos == std::string::npos)
704 {
705 return "default";
706 }
707
708 size_t doubleUnderscorePos = id.testName.find("__");
709 if (doubleUnderscorePos == std::string::npos)
710 {
711 std::string configName = id.testName.substr(slashPos + 1);
712
713 if (!BeginsWith(configName, "ES"))
714 {
715 return "default";
716 }
717
718 return configName;
719 }
720 else
721 {
722 return id.testName.substr(slashPos + 1, doubleUnderscorePos - slashPos - 1);
723 }
724 }
725
BatchTests(const std::vector<TestIdentifier> & tests,int batchSize)726 TestQueue BatchTests(const std::vector<TestIdentifier> &tests, int batchSize)
727 {
728 // First sort tests by configuration.
729 angle::HashMap<std::string, std::vector<TestIdentifier>> testsSortedByConfig;
730 for (const TestIdentifier &id : tests)
731 {
732 std::string config = GetConfigNameFromTestIdentifier(id);
733 testsSortedByConfig[config].push_back(id);
734 }
735
736 // Then group into batches by 'batchSize'.
737 TestQueue testQueue;
738 for (const auto &configAndIds : testsSortedByConfig)
739 {
740 const std::vector<TestIdentifier> &configTests = configAndIds.second;
741
742 // Count the number of batches needed for this config.
743 int batchesForConfig = static_cast<int>(configTests.size() + batchSize - 1) / batchSize;
744
745 // Create batches with striping to split up slow tests.
746 for (int batchIndex = 0; batchIndex < batchesForConfig; ++batchIndex)
747 {
748 std::vector<TestIdentifier> batchTests;
749 for (size_t testIndex = batchIndex; testIndex < configTests.size();
750 testIndex += batchesForConfig)
751 {
752 batchTests.push_back(configTests[testIndex]);
753 }
754 testQueue.emplace(std::move(batchTests));
755 ASSERT(batchTests.empty());
756 }
757 }
758
759 return testQueue;
760 }
761
ListTests(const std::map<TestIdentifier,TestResult> & resultsMap)762 void ListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
763 {
764 std::cout << "Tests list:\n";
765
766 for (const auto &resultIt : resultsMap)
767 {
768 const TestIdentifier &id = resultIt.first;
769 std::cout << id << "\n";
770 }
771
772 std::cout << "End tests list.\n";
773 }
774
775 // Prints the names of the tests matching the user-specified filter flag.
776 // This matches the output from googletest/src/gtest.cc but is much much faster for large filters.
777 // See http://anglebug.com/42263725
GTestListTests(const std::map<TestIdentifier,TestResult> & resultsMap)778 void GTestListTests(const std::map<TestIdentifier, TestResult> &resultsMap)
779 {
780 std::map<std::string, std::vector<std::string>> suites;
781
782 for (const auto &resultIt : resultsMap)
783 {
784 const TestIdentifier &id = resultIt.first;
785 suites[id.testSuiteName].push_back(id.testName);
786 }
787
788 for (const auto &testSuiteIt : suites)
789 {
790 bool printedTestSuiteName = false;
791
792 const std::string &suiteName = testSuiteIt.first;
793 const std::vector<std::string> &testNames = testSuiteIt.second;
794
795 for (const std::string &testName : testNames)
796 {
797 if (!printedTestSuiteName)
798 {
799 printedTestSuiteName = true;
800 printf("%s.\n", suiteName.c_str());
801 }
802 printf(" %s\n", testName.c_str());
803 }
804 }
805 }
806
807 // On Android, batching is done on the host, i.e. externally.
808 // TestSuite executes on the device and should just passthrough all args to GTest.
UsesExternalBatching()809 bool UsesExternalBatching()
810 {
811 #if defined(ANGLE_PLATFORM_ANDROID)
812 return true;
813 #else
814 return false;
815 #endif
816 }
817 } // namespace
818
enable(const std::string & testArtifactDirectory)819 void MetricWriter::enable(const std::string &testArtifactDirectory)
820 {
821 mPath = testArtifactDirectory + GetPathSeparator() + "angle_metrics";
822 }
823
writeInfo(const std::string & name,const std::string & backend,const std::string & story,const std::string & metric,const std::string & units)824 void MetricWriter::writeInfo(const std::string &name,
825 const std::string &backend,
826 const std::string &story,
827 const std::string &metric,
828 const std::string &units)
829 {
830 if (mPath.empty())
831 {
832 return;
833 }
834
835 if (mFile == nullptr)
836 {
837 mFile = fopen(mPath.c_str(), "w");
838 }
839 ASSERT(mFile != nullptr);
840
841 fprintf(mFile, "{\"name\":\"%s\",", name.c_str());
842 fprintf(mFile, "\"backend\":\"%s\",", backend.c_str());
843 fprintf(mFile, "\"story\":\"%s\",", story.c_str());
844 fprintf(mFile, "\"metric\":\"%s\",", metric.c_str());
845 fprintf(mFile, "\"units\":\"%s\",", units.c_str());
846 // followed by writing value, so no closing bracket yet
847 }
848
writeDoubleValue(double value)849 void MetricWriter::writeDoubleValue(double value)
850 {
851 if (mFile != nullptr)
852 {
853 fprintf(mFile, "\"value\":\"%lf\"}\n", value);
854 }
855 }
856
writeIntegerValue(size_t value)857 void MetricWriter::writeIntegerValue(size_t value)
858 {
859 if (mFile != nullptr)
860 {
861 fprintf(mFile, "\"value\":\"%zu\"}\n", value);
862 }
863 }
864
close()865 void MetricWriter::close()
866 {
867 if (mFile != nullptr)
868 {
869 fclose(mFile);
870 mFile = nullptr;
871 }
872 }
873
874 // static
875 TestSuite *TestSuite::mInstance = nullptr;
876
877 TestIdentifier::TestIdentifier() = default;
878
TestIdentifier(const std::string & suiteNameIn,const std::string & nameIn)879 TestIdentifier::TestIdentifier(const std::string &suiteNameIn, const std::string &nameIn)
880 : testSuiteName(suiteNameIn), testName(nameIn)
881 {}
882
883 TestIdentifier::TestIdentifier(const TestIdentifier &other) = default;
884
885 TestIdentifier::~TestIdentifier() = default;
886
887 TestIdentifier &TestIdentifier::operator=(const TestIdentifier &other) = default;
888
snprintfName(char * outBuffer,size_t maxLen) const889 void TestIdentifier::snprintfName(char *outBuffer, size_t maxLen) const
890 {
891 snprintf(outBuffer, maxLen, "%s.%s", testSuiteName.c_str(), testName.c_str());
892 }
893
894 // static
ParseFromString(const std::string & str,TestIdentifier * idOut)895 bool TestIdentifier::ParseFromString(const std::string &str, TestIdentifier *idOut)
896 {
897 size_t separator = str.find(".");
898 if (separator == std::string::npos)
899 {
900 return false;
901 }
902
903 idOut->testSuiteName = str.substr(0, separator);
904 idOut->testName = str.substr(separator + 1, str.length() - separator - 1);
905 return true;
906 }
907
908 TestResults::TestResults() = default;
909
910 TestResults::~TestResults() = default;
911
912 ProcessInfo::ProcessInfo() = default;
913
operator =(ProcessInfo && rhs)914 ProcessInfo &ProcessInfo::operator=(ProcessInfo &&rhs)
915 {
916 process = std::move(rhs.process);
917 testsInBatch = std::move(rhs.testsInBatch);
918 resultsFileName = std::move(rhs.resultsFileName);
919 filterFileName = std::move(rhs.filterFileName);
920 commandLine = std::move(rhs.commandLine);
921 filterString = std::move(rhs.filterString);
922 return *this;
923 }
924
925 ProcessInfo::~ProcessInfo() = default;
926
ProcessInfo(ProcessInfo && other)927 ProcessInfo::ProcessInfo(ProcessInfo &&other)
928 {
929 *this = std::move(other);
930 }
931
932 class TestSuite::TestEventListener : public testing::EmptyTestEventListener
933 {
934 public:
935 // Note: TestResults is owned by the TestSuite. It should outlive TestEventListener.
TestEventListener(TestSuite * testSuite)936 TestEventListener(TestSuite *testSuite) : mTestSuite(testSuite) {}
937
OnTestStart(const testing::TestInfo & testInfo)938 void OnTestStart(const testing::TestInfo &testInfo) override
939 {
940 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
941 mTestSuite->mTestResults.currentTest = GetTestIdentifier(testInfo);
942 mTestSuite->mTestResults.currentTestTimer.start();
943 }
944
OnTestEnd(const testing::TestInfo & testInfo)945 void OnTestEnd(const testing::TestInfo &testInfo) override
946 {
947 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
948 mTestSuite->mTestResults.currentTestTimer.stop();
949 const testing::TestResult &resultIn = *testInfo.result();
950 UpdateCurrentTestResult(resultIn, &mTestSuite->mTestResults);
951 mTestSuite->mTestResults.currentTest = TestIdentifier();
952 }
953
OnTestProgramEnd(const testing::UnitTest & testProgramInfo)954 void OnTestProgramEnd(const testing::UnitTest &testProgramInfo) override
955 {
956 std::lock_guard<std::mutex> guard(mTestSuite->mTestResults.currentTestMutex);
957 mTestSuite->mTestResults.allDone = true;
958 mTestSuite->writeOutputFiles(false);
959 }
960
961 private:
962 TestSuite *mTestSuite;
963 };
964
TestSuite(int * argc,char ** argv)965 TestSuite::TestSuite(int *argc, char **argv) : TestSuite(argc, argv, []() {}) {}
966
TestSuite(int * argc,char ** argv,std::function<void ()> registerTestsCallback)967 TestSuite::TestSuite(int *argc, char **argv, std::function<void()> registerTestsCallback)
968 : mShardCount(-1),
969 mShardIndex(-1),
970 mBotMode(false),
971 mDebugTestGroups(false),
972 mGTestListTests(false),
973 mListTests(false),
974 mPrintTestStdout(false),
975 mDisableCrashHandler(false),
976 mBatchSize(kDefaultBatchSize),
977 mCurrentResultCount(0),
978 mTotalResultCount(0),
979 mMaxProcesses(std::min(NumberOfProcessors(), kDefaultMaxProcesses)),
980 mTestTimeout(kDefaultTestTimeout),
981 mBatchTimeout(kDefaultBatchTimeout),
982 mBatchId(-1),
983 mFlakyRetries(0),
984 mMaxFailures(kDefaultMaxFailures),
985 mFailureCount(0),
986 mModifiedPreferredDevice(false)
987 {
988 ASSERT(mInstance == nullptr);
989 mInstance = this;
990
991 Optional<int> filterArgIndex;
992 bool alsoRunDisabledTests = false;
993
994 #if defined(ANGLE_PLATFORM_MACOS)
995 // By default, we should hook file API functions on macOS to avoid slow Metal shader caching
996 // file access.
997 angle::InitMetalFileAPIHooking(*argc, argv);
998 #endif
999
1000 #if defined(ANGLE_PLATFORM_WINDOWS)
1001 GTEST_FLAG_SET(catch_exceptions, false);
1002 #endif
1003
1004 if (*argc <= 0)
1005 {
1006 printf("Missing test arguments.\n");
1007 exit(EXIT_FAILURE);
1008 }
1009
1010 mTestExecutableName = argv[0];
1011
1012 for (int argIndex = 1; argIndex < *argc;)
1013 {
1014 if (parseSingleArg(argc, argv, argIndex))
1015 {
1016 continue;
1017 }
1018
1019 if (strstr(argv[argIndex], "--gtest_filter=") == argv[argIndex])
1020 {
1021 filterArgIndex = argIndex;
1022 }
1023 else
1024 {
1025 // Don't include disabled tests in test lists unless the user asks for them.
1026 if (strcmp("--gtest_also_run_disabled_tests", argv[argIndex]) == 0)
1027 {
1028 alsoRunDisabledTests = true;
1029 }
1030
1031 mChildProcessArgs.push_back(argv[argIndex]);
1032 }
1033 ++argIndex;
1034 }
1035
1036 if (mTestArtifactDirectory.empty())
1037 {
1038 mTestArtifactDirectory = GetEnvironmentVar("ISOLATED_OUTDIR");
1039 }
1040
1041 #if defined(ANGLE_PLATFORM_FUCHSIA)
1042 if (mBotMode)
1043 {
1044 printf("Note: Bot mode is not available on Fuchsia. See http://anglebug.com/42265786\n");
1045 mBotMode = false;
1046 }
1047 #endif
1048
1049 if (UsesExternalBatching() && mBotMode)
1050 {
1051 printf("Bot mode is mutually exclusive with external batching.\n");
1052 exit(EXIT_FAILURE);
1053 }
1054
1055 mTestResults.currentTestTimeout = mTestTimeout;
1056
1057 if (!mDisableCrashHandler)
1058 {
1059 // Note that the crash callback must be owned and not use global constructors.
1060 mCrashCallback = [this]() { onCrashOrTimeout(TestResultType::Crash); };
1061 InitCrashHandler(&mCrashCallback);
1062 }
1063
1064 #if defined(ANGLE_PLATFORM_WINDOWS) || defined(ANGLE_PLATFORM_LINUX)
1065 if (IsASan())
1066 {
1067 // Set before `registerTestsCallback()` call
1068 SetEnvironmentVar(kVkLoaderDisableDLLUnloadingEnvVar, "1");
1069 }
1070 #endif
1071
1072 registerTestsCallback();
1073
1074 std::string envShardIndex = angle::GetEnvironmentVar("GTEST_SHARD_INDEX");
1075 if (!envShardIndex.empty())
1076 {
1077 angle::UnsetEnvironmentVar("GTEST_SHARD_INDEX");
1078 if (mShardIndex == -1)
1079 {
1080 std::stringstream shardIndexStream(envShardIndex);
1081 shardIndexStream >> mShardIndex;
1082 }
1083 }
1084
1085 std::string envTotalShards = angle::GetEnvironmentVar("GTEST_TOTAL_SHARDS");
1086 if (!envTotalShards.empty())
1087 {
1088 angle::UnsetEnvironmentVar("GTEST_TOTAL_SHARDS");
1089 if (mShardCount == -1)
1090 {
1091 std::stringstream shardCountStream(envTotalShards);
1092 shardCountStream >> mShardCount;
1093 }
1094 }
1095
1096 // The test harness reads the active GPU from SystemInfo and uses that for test expectations.
1097 // However, some ANGLE backends don't have a concept of an "active" GPU, and instead use power
1098 // preference to select GPU. We can use the environment variable ANGLE_PREFERRED_DEVICE to
1099 // ensure ANGLE's selected GPU matches the GPU expected for this test suite.
1100 const GPUTestConfig testConfig = GPUTestConfig();
1101 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1102 if (GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1103 {
1104 mModifiedPreferredDevice = true;
1105 const GPUTestConfig::ConditionArray &conditions = testConfig.getConditions();
1106 if (conditions[GPUTestConfig::kConditionAMD])
1107 {
1108 SetEnvironmentVar(kPreferredDeviceEnvVar, "amd");
1109 }
1110 else if (conditions[GPUTestConfig::kConditionNVIDIA])
1111 {
1112 SetEnvironmentVar(kPreferredDeviceEnvVar, "nvidia");
1113 }
1114 else if (conditions[GPUTestConfig::kConditionIntel])
1115 {
1116 SetEnvironmentVar(kPreferredDeviceEnvVar, "intel");
1117 }
1118 else if (conditions[GPUTestConfig::kConditionApple])
1119 {
1120 SetEnvironmentVar(kPreferredDeviceEnvVar, "apple");
1121 }
1122 else if (conditions[GPUTestConfig::kConditionQualcomm])
1123 {
1124 SetEnvironmentVar(kPreferredDeviceEnvVar, "qualcomm");
1125 }
1126 }
1127
1128 // Special handling for TSAN and UBSAN to force crashes when run in automated testing.
1129 if (IsTSan())
1130 {
1131 std::string tsanOptions = GetEnvironmentVar(kTSanOptionsEnvVar);
1132 tsanOptions += " halt_on_error=1";
1133 SetEnvironmentVar(kTSanOptionsEnvVar, tsanOptions.c_str());
1134 }
1135
1136 if (IsUBSan())
1137 {
1138 std::string ubsanOptions = GetEnvironmentVar(kUBSanOptionsEnvVar);
1139 ubsanOptions += " halt_on_error=1";
1140 SetEnvironmentVar(kUBSanOptionsEnvVar, ubsanOptions.c_str());
1141 }
1142
1143 if ((mShardIndex == -1) != (mShardCount == -1))
1144 {
1145 printf("Shard index and shard count must be specified together.\n");
1146 exit(EXIT_FAILURE);
1147 }
1148
1149 if (!mFilterFile.empty())
1150 {
1151 if (filterArgIndex.valid())
1152 {
1153 printf("Cannot use gtest_filter in conjunction with a filter file.\n");
1154 exit(EXIT_FAILURE);
1155 }
1156
1157 std::string fileContents;
1158 if (!ReadEntireFileToString(mFilterFile.c_str(), &fileContents))
1159 {
1160 printf("Error loading filter file: %s\n", mFilterFile.c_str());
1161 exit(EXIT_FAILURE);
1162 }
1163 mFilterString.assign(fileContents.data());
1164
1165 if (mFilterString.substr(0, strlen("--gtest_filter=")) != std::string("--gtest_filter="))
1166 {
1167 printf("Filter file must start with \"--gtest_filter=\".\n");
1168 exit(EXIT_FAILURE);
1169 }
1170
1171 // Note that we only add a filter string if we previously deleted a shader filter file
1172 // argument. So we will have space for the new filter string in argv.
1173 AddArg(argc, argv, mFilterString.c_str());
1174 }
1175
1176 // Call into gtest internals to force parameterized test name registration.
1177 testing::internal::UnitTestImpl *impl = testing::internal::GetUnitTestImpl();
1178 impl->RegisterParameterizedTests();
1179
1180 // Initialize internal GoogleTest filter arguments so we can call "FilterMatchesTest".
1181 testing::internal::ParseGoogleTestFlagsOnly(argc, argv);
1182
1183 std::vector<TestIdentifier> testSet = GetFilteredTests(&mTestFileLines, alsoRunDisabledTests);
1184
1185 if (mShardCount == 0)
1186 {
1187 printf("Shard count must be > 0.\n");
1188 exit(EXIT_FAILURE);
1189 }
1190 else if (mShardCount > 0)
1191 {
1192 if (mShardIndex >= mShardCount)
1193 {
1194 printf("Shard index must be less than shard count.\n");
1195 exit(EXIT_FAILURE);
1196 }
1197
1198 // If there's only one shard, we can use the testSet as defined above.
1199 if (mShardCount > 1)
1200 {
1201 if (!mBotMode && !UsesExternalBatching())
1202 {
1203 printf("Sharding is only supported in bot mode or external batching.\n");
1204 exit(EXIT_FAILURE);
1205 }
1206 // With external batching, we must use exactly the testSet as defined externally.
1207 // But when listing tests, we do need to apply sharding ourselves,
1208 // since we use our own implementation for listing tests and not GTest directly.
1209 if (!UsesExternalBatching() || mGTestListTests || mListTests)
1210 {
1211 testSet = GetShardTests(testSet, mShardIndex, mShardCount, &mTestFileLines,
1212 alsoRunDisabledTests);
1213 }
1214 }
1215 }
1216
1217 if (!testSet.empty())
1218 {
1219 std::stringstream fakeTestName;
1220 fakeTestName << kArtifactsFakeTestName << '-' << testSet[0].testName;
1221 mTestResults.testArtifactsFakeTestName = fakeTestName.str();
1222 }
1223
1224 if (mBotMode)
1225 {
1226 // Split up test batches.
1227 mTestQueue = BatchTests(testSet, mBatchSize);
1228
1229 if (mDebugTestGroups)
1230 {
1231 std::cout << "Test Groups:\n";
1232
1233 while (!mTestQueue.empty())
1234 {
1235 const std::vector<TestIdentifier> &tests = mTestQueue.front();
1236 std::cout << GetConfigNameFromTestIdentifier(tests[0]) << " ("
1237 << static_cast<int>(tests.size()) << ")\n";
1238 mTestQueue.pop();
1239 }
1240
1241 exit(EXIT_SUCCESS);
1242 }
1243 }
1244
1245 testing::InitGoogleTest(argc, argv);
1246
1247 mTotalResultCount = testSet.size();
1248
1249 if ((mBotMode || !mResultsDirectory.empty()) && mResultsFile.empty())
1250 {
1251 // Create a default output file in bot mode.
1252 mResultsFile = "output.json";
1253 }
1254
1255 if (!mResultsDirectory.empty())
1256 {
1257 std::stringstream resultFileName;
1258 resultFileName << mResultsDirectory << GetPathSeparator() << mResultsFile;
1259 mResultsFile = resultFileName.str();
1260 }
1261
1262 if (!mTestArtifactDirectory.empty())
1263 {
1264 mMetricWriter.enable(mTestArtifactDirectory);
1265 }
1266
1267 if (!mBotMode)
1268 {
1269 testing::TestEventListeners &listeners = testing::UnitTest::GetInstance()->listeners();
1270 listeners.Append(new TestEventListener(this));
1271
1272 for (const TestIdentifier &id : testSet)
1273 {
1274 mTestResults.results[id].type = TestResultType::NoResult;
1275 }
1276 }
1277 }
1278
~TestSuite()1279 TestSuite::~TestSuite()
1280 {
1281 const char kPreferredDeviceEnvVar[] = "ANGLE_PREFERRED_DEVICE";
1282 if (mModifiedPreferredDevice && !angle::GetEnvironmentVar(kPreferredDeviceEnvVar).empty())
1283 {
1284 angle::UnsetEnvironmentVar(kPreferredDeviceEnvVar);
1285 }
1286
1287 if (mWatchdogThread.joinable())
1288 {
1289 mWatchdogThread.detach();
1290 }
1291 TerminateCrashHandler();
1292 }
1293
parseSingleArg(int * argc,char ** argv,int argIndex)1294 bool TestSuite::parseSingleArg(int *argc, char **argv, int argIndex)
1295 {
1296 // Note: Flags should be documented in README.md.
1297 return ParseIntArg("--shard-count", argc, argv, argIndex, &mShardCount) ||
1298 ParseIntArg("--shard-index", argc, argv, argIndex, &mShardIndex) ||
1299 ParseIntArg("--batch-size", argc, argv, argIndex, &mBatchSize) ||
1300 ParseIntArg("--max-processes", argc, argv, argIndex, &mMaxProcesses) ||
1301 ParseIntArg(kTestTimeoutArg, argc, argv, argIndex, &mTestTimeout) ||
1302 ParseIntArg("--batch-timeout", argc, argv, argIndex, &mBatchTimeout) ||
1303 ParseIntArg("--flaky-retries", argc, argv, argIndex, &mFlakyRetries) ||
1304 ParseIntArg("--max-failures", argc, argv, argIndex, &mMaxFailures) ||
1305 // Other test functions consume the batch ID, so keep it in the list.
1306 ParseIntArgWithHandling(kBatchId, argc, argv, argIndex, &mBatchId,
1307 ArgHandling::Preserve) ||
1308 ParseStringArg("--results-directory", argc, argv, argIndex, &mResultsDirectory) ||
1309 ParseStringArg(kResultFileArg, argc, argv, argIndex, &mResultsFile) ||
1310 ParseStringArg("--isolated-script-test-output", argc, argv, argIndex, &mResultsFile) ||
1311 ParseStringArg(kFilterFileArg, argc, argv, argIndex, &mFilterFile) ||
1312 ParseStringArg("--histogram-json-file", argc, argv, argIndex, &mHistogramJsonFile) ||
1313 // We need these overloads to work around technical debt in the Android test runner.
1314 ParseStringArg("--isolated-script-test-perf-output", argc, argv, argIndex,
1315 &mHistogramJsonFile) ||
1316 ParseStringArg("--isolated_script_test_perf_output", argc, argv, argIndex,
1317 &mHistogramJsonFile) ||
1318 ParseStringArg("--render-test-output-dir", argc, argv, argIndex,
1319 &mTestArtifactDirectory) ||
1320 ParseStringArg("--isolated-outdir", argc, argv, argIndex, &mTestArtifactDirectory) ||
1321 ParseFlag("--test-launcher-bot-mode", argc, argv, argIndex, &mBotMode) ||
1322 ParseFlag("--bot-mode", argc, argv, argIndex, &mBotMode) ||
1323 ParseFlag("--debug-test-groups", argc, argv, argIndex, &mDebugTestGroups) ||
1324 ParseFlag("--gtest_list_tests", argc, argv, argIndex, &mGTestListTests) ||
1325 ParseFlag("--list-tests", argc, argv, argIndex, &mListTests) ||
1326 ParseFlag("--print-test-stdout", argc, argv, argIndex, &mPrintTestStdout) ||
1327 ParseFlag(kDisableCrashHandler, argc, argv, argIndex, &mDisableCrashHandler);
1328 }
1329
onCrashOrTimeout(TestResultType crashOrTimeout)1330 void TestSuite::onCrashOrTimeout(TestResultType crashOrTimeout)
1331 {
1332 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1333 if (mTestResults.currentTest.valid())
1334 {
1335 TestResult &result = mTestResults.results[mTestResults.currentTest];
1336 result.type = crashOrTimeout;
1337 result.elapsedTimeSeconds.back() = mTestResults.currentTestTimer.getElapsedWallClockTime();
1338 }
1339
1340 if (mResultsFile.empty())
1341 {
1342 printf("No results file specified.\n");
1343 return;
1344 }
1345
1346 writeOutputFiles(true);
1347 }
1348
launchChildTestProcess(uint32_t batchId,const std::vector<TestIdentifier> & testsInBatch)1349 bool TestSuite::launchChildTestProcess(uint32_t batchId,
1350 const std::vector<TestIdentifier> &testsInBatch)
1351 {
1352 // Create a temporary file to store the test list
1353 ProcessInfo processInfo;
1354
1355 Optional<std::string> filterBuffer = CreateTemporaryFile();
1356 if (!filterBuffer.valid())
1357 {
1358 std::cerr << "Error creating temporary file for test list.\n";
1359 return false;
1360 }
1361 processInfo.filterFileName.assign(filterBuffer.value());
1362
1363 std::string filterString = GetTestFilter(testsInBatch);
1364
1365 FILE *fp = fopen(processInfo.filterFileName.c_str(), "w");
1366 if (!fp)
1367 {
1368 std::cerr << "Error opening temporary file for test list.\n";
1369 return false;
1370 }
1371 fprintf(fp, "%s", filterString.c_str());
1372 fclose(fp);
1373
1374 processInfo.filterString = filterString;
1375
1376 std::string filterFileArg = kFilterFileArg + std::string("=") + processInfo.filterFileName;
1377
1378 // Create a temporary file to store the test output.
1379 Optional<std::string> resultsBuffer = CreateTemporaryFile();
1380 if (!resultsBuffer.valid())
1381 {
1382 std::cerr << "Error creating temporary file for test list.\n";
1383 return false;
1384 }
1385 processInfo.resultsFileName.assign(resultsBuffer.value());
1386
1387 std::string resultsFileArg = kResultFileArg + std::string("=") + processInfo.resultsFileName;
1388
1389 // Construct command line for child process.
1390 std::vector<const char *> args;
1391
1392 args.push_back(mTestExecutableName.c_str());
1393 args.push_back(filterFileArg.c_str());
1394 args.push_back(resultsFileArg.c_str());
1395
1396 std::stringstream batchIdStream;
1397 batchIdStream << kBatchId << "=" << batchId;
1398 std::string batchIdString = batchIdStream.str();
1399 args.push_back(batchIdString.c_str());
1400
1401 for (const std::string &arg : mChildProcessArgs)
1402 {
1403 args.push_back(arg.c_str());
1404 }
1405
1406 if (mDisableCrashHandler)
1407 {
1408 args.push_back(kDisableCrashHandler);
1409 }
1410
1411 std::string timeoutStr;
1412 if (mTestTimeout != kDefaultTestTimeout)
1413 {
1414 std::stringstream timeoutStream;
1415 timeoutStream << kTestTimeoutArg << "=" << mTestTimeout;
1416 timeoutStr = timeoutStream.str();
1417 args.push_back(timeoutStr.c_str());
1418 }
1419
1420 std::string artifactsDir;
1421 if (!mTestArtifactDirectory.empty())
1422 {
1423 std::stringstream artifactsDirStream;
1424 artifactsDirStream << kIsolatedOutDir << "=" << mTestArtifactDirectory;
1425 artifactsDir = artifactsDirStream.str();
1426 args.push_back(artifactsDir.c_str());
1427 }
1428
1429 // Launch child process and wait for completion.
1430 processInfo.process = LaunchProcess(args, ProcessOutputCapture::StdoutAndStderrInterleaved);
1431
1432 if (!processInfo.process->started())
1433 {
1434 std::cerr << "Error launching child process.\n";
1435 return false;
1436 }
1437
1438 std::stringstream commandLineStr;
1439 for (const char *arg : args)
1440 {
1441 commandLineStr << arg << " ";
1442 }
1443
1444 processInfo.commandLine = commandLineStr.str();
1445 processInfo.testsInBatch = testsInBatch;
1446 mCurrentProcesses.emplace_back(std::move(processInfo));
1447 return true;
1448 }
1449
ParseTestIdentifierAndSetResult(const std::string & testName,TestResultType result,TestResults * results)1450 void ParseTestIdentifierAndSetResult(const std::string &testName,
1451 TestResultType result,
1452 TestResults *results)
1453 {
1454 // Trim off any whitespace + extra stuff at the end of the string.
1455 std::string modifiedTestName = testName.substr(0, testName.find(' '));
1456 modifiedTestName = modifiedTestName.substr(0, testName.find('\r'));
1457 TestIdentifier id;
1458 bool ok = TestIdentifier::ParseFromString(modifiedTestName, &id);
1459 ASSERT(ok);
1460 results->results[id] = {result};
1461 }
1462
finishProcess(ProcessInfo * processInfo)1463 bool TestSuite::finishProcess(ProcessInfo *processInfo)
1464 {
1465 // Get test results and merge into main list.
1466 TestResults batchResults;
1467
1468 if (!GetTestResultsFromFile(processInfo->resultsFileName.c_str(), &batchResults))
1469 {
1470 std::cerr << "Warning: could not find test results file from child process.\n";
1471
1472 // First assume all tests get skipped.
1473 for (const TestIdentifier &id : processInfo->testsInBatch)
1474 {
1475 batchResults.results[id] = {TestResultType::NoResult};
1476 }
1477
1478 // Attempt to reconstruct passing list from stdout snippets.
1479 const std::string &batchStdout = processInfo->process->getStdout();
1480 std::istringstream linesStream(batchStdout);
1481
1482 std::string line;
1483 while (std::getline(linesStream, line))
1484 {
1485 size_t startPos = line.find(kStartedTestString);
1486 size_t failPos = line.find(kFailedTestString);
1487 size_t passPos = line.find(kPassedTestString);
1488 size_t skippedPos = line.find(kSkippedTestString);
1489
1490 if (startPos != std::string::npos)
1491 {
1492 // Assume a test that's started crashed until we see it completed.
1493 std::string testName = line.substr(strlen(kStartedTestString));
1494 ParseTestIdentifierAndSetResult(testName, TestResultType::Crash, &batchResults);
1495 }
1496 else if (failPos != std::string::npos)
1497 {
1498 std::string testName = line.substr(strlen(kFailedTestString));
1499 ParseTestIdentifierAndSetResult(testName, TestResultType::Fail, &batchResults);
1500 }
1501 else if (passPos != std::string::npos)
1502 {
1503 std::string testName = line.substr(strlen(kPassedTestString));
1504 ParseTestIdentifierAndSetResult(testName, TestResultType::Pass, &batchResults);
1505 }
1506 else if (skippedPos != std::string::npos)
1507 {
1508 std::string testName = line.substr(strlen(kSkippedTestString));
1509 ParseTestIdentifierAndSetResult(testName, TestResultType::Skip, &batchResults);
1510 }
1511 }
1512 }
1513
1514 if (!MergeTestResults(&batchResults, &mTestResults, mFlakyRetries))
1515 {
1516 std::cerr << "Error merging batch test results.\n";
1517 return false;
1518 }
1519
1520 if (!batchResults.results.empty())
1521 {
1522 const TestIdentifier &id = batchResults.results.begin()->first;
1523 std::string config = GetConfigNameFromTestIdentifier(id);
1524 printf("Completed batch with config: %s\n", config.c_str());
1525
1526 for (const auto &resultIter : batchResults.results)
1527 {
1528 const TestResult &result = resultIter.second;
1529 if (result.type != TestResultType::NoResult && IsFailedResult(result.type))
1530 {
1531 printf("To reproduce the batch, use filter:\n%s\n",
1532 processInfo->filterString.c_str());
1533 break;
1534 }
1535 }
1536 }
1537
1538 // Process results and print unexpected errors.
1539 for (const auto &resultIter : batchResults.results)
1540 {
1541 const TestIdentifier &id = resultIter.first;
1542 const TestResult &result = resultIter.second;
1543
1544 // Skip results aren't procesed since they're added back to the test queue below.
1545 if (result.type == TestResultType::NoResult)
1546 {
1547 continue;
1548 }
1549
1550 mCurrentResultCount++;
1551
1552 printf("[%d/%d] %s.%s", mCurrentResultCount, mTotalResultCount, id.testSuiteName.c_str(),
1553 id.testName.c_str());
1554
1555 if (mPrintTestStdout)
1556 {
1557 const std::string &batchStdout = processInfo->process->getStdout();
1558 PrintTestOutputSnippet(id, result, batchStdout);
1559 }
1560 else if (result.type == TestResultType::Pass)
1561 {
1562 printf(" (%0.1lf ms)\n", result.elapsedTimeSeconds.back() * 1000.0);
1563 }
1564 else if (result.type == TestResultType::Skip)
1565 {
1566 printf(" (skipped)\n");
1567 }
1568 else if (result.type == TestResultType::Timeout)
1569 {
1570 printf(" (TIMEOUT in %0.1lf s)\n", result.elapsedTimeSeconds.back());
1571 mFailureCount++;
1572
1573 const std::string &batchStdout = processInfo->process->getStdout();
1574 PrintTestOutputSnippet(id, result, batchStdout);
1575 }
1576 else
1577 {
1578 printf(" (%s)\n", ResultTypeToString(result.type));
1579 mFailureCount++;
1580
1581 const std::string &batchStdout = processInfo->process->getStdout();
1582 PrintTestOutputSnippet(id, result, batchStdout);
1583 }
1584 }
1585
1586 // On unexpected exit, re-queue any unfinished tests.
1587 std::vector<TestIdentifier> unfinishedTests;
1588 for (const auto &resultIter : batchResults.results)
1589 {
1590 const TestIdentifier &id = resultIter.first;
1591 const TestResult &result = resultIter.second;
1592
1593 if (result.type == TestResultType::NoResult)
1594 {
1595 unfinishedTests.push_back(id);
1596 }
1597 }
1598
1599 if (!unfinishedTests.empty())
1600 {
1601 mTestQueue.emplace(std::move(unfinishedTests));
1602 }
1603
1604 // Clean up any dirty temporary files.
1605 for (const std::string &tempFile : {processInfo->filterFileName, processInfo->resultsFileName})
1606 {
1607 // Note: we should be aware that this cleanup won't happen if the harness itself
1608 // crashes. If this situation comes up in the future we should add crash cleanup to the
1609 // harness.
1610 if (!angle::DeleteSystemFile(tempFile.c_str()))
1611 {
1612 std::cerr << "Warning: Error cleaning up temp file: " << tempFile << "\n";
1613 }
1614 }
1615
1616 processInfo->process.reset();
1617 return true;
1618 }
1619
run()1620 int TestSuite::run()
1621 {
1622 #if defined(ANGLE_PLATFORM_ANDROID)
1623 if (mListTests && mGTestListTests)
1624 {
1625 // Workaround for the Android test runner requiring a GTest test list.
1626 printf("PlaceholderTest.\n Placeholder\n");
1627 return EXIT_SUCCESS;
1628 }
1629 #endif // defined(ANGLE_PLATFORM_ANDROID)
1630
1631 if (mListTests)
1632 {
1633 ListTests(mTestResults.results);
1634
1635 #if defined(ANGLE_PLATFORM_ANDROID)
1636 // Because of quirks with the Chromium-provided Android test runner, we need to use a few
1637 // tricks to get the test list output. We add placeholder output for a single test to trick
1638 // the test runner into thinking it ran the tests successfully. We also add an end marker
1639 // for the tests list so we can parse the list from the more spammy Android stdout log.
1640 static constexpr char kPlaceholderTestTest[] = R"(
1641 [==========] Running 1 test from 1 test suite.
1642 [----------] Global test environment set-up.
1643 [----------] 1 test from PlaceholderTest
1644 [ RUN ] PlaceholderTest.Placeholder
1645 [ OK ] PlaceholderTest.Placeholder (0 ms)
1646 [----------] 1 test from APITest (0 ms total)
1647
1648 [----------] Global test environment tear-down
1649 [==========] 1 test from 1 test suite ran. (24 ms total)
1650 [ PASSED ] 1 test.
1651 )";
1652 printf(kPlaceholderTestTest);
1653 #endif // defined(ANGLE_PLATFORM_ANDROID)
1654
1655 return EXIT_SUCCESS;
1656 }
1657
1658 if (mGTestListTests)
1659 {
1660 GTestListTests(mTestResults.results);
1661 return EXIT_SUCCESS;
1662 }
1663
1664 // Run tests serially.
1665 if (!mBotMode)
1666 {
1667 // Only start the watchdog if the debugger is not attached and we're a child process.
1668 if (!angle::IsDebuggerAttached() && mBatchId != -1)
1669 {
1670 startWatchdog();
1671 }
1672
1673 int retVal = RUN_ALL_TESTS();
1674 {
1675 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1676 mTestResults.allDone = true;
1677 }
1678
1679 if (mWatchdogThread.joinable())
1680 {
1681 mWatchdogThread.join();
1682 }
1683 return retVal;
1684 }
1685
1686 Timer totalRunTime;
1687 totalRunTime.start();
1688
1689 Timer messageTimer;
1690 messageTimer.start();
1691
1692 uint32_t batchId = 0;
1693
1694 while (!mTestQueue.empty() || !mCurrentProcesses.empty())
1695 {
1696 bool progress = false;
1697
1698 // Spawn a process if needed and possible.
1699 if (static_cast<int>(mCurrentProcesses.size()) < mMaxProcesses && !mTestQueue.empty())
1700 {
1701 std::vector<TestIdentifier> testsInBatch = mTestQueue.front();
1702 mTestQueue.pop();
1703
1704 if (!launchChildTestProcess(++batchId, testsInBatch))
1705 {
1706 return 1;
1707 }
1708
1709 progress = true;
1710 }
1711
1712 // Check for process completion.
1713 uint32_t totalTestCount = 0;
1714 for (auto processIter = mCurrentProcesses.begin(); processIter != mCurrentProcesses.end();)
1715 {
1716 ProcessInfo &processInfo = *processIter;
1717 if (processInfo.process->finished())
1718 {
1719 if (!finishProcess(&processInfo))
1720 {
1721 return 1;
1722 }
1723 processIter = mCurrentProcesses.erase(processIter);
1724 progress = true;
1725 }
1726 else if (processInfo.process->getElapsedTimeSeconds() > mBatchTimeout)
1727 {
1728 // Terminate the process and record timeouts for the batch.
1729 // Because we can't determine which sub-test caused a timeout, record the whole
1730 // batch as a timeout failure. Can be improved by using socket message passing.
1731 if (!processInfo.process->kill())
1732 {
1733 return 1;
1734 }
1735
1736 const std::string &batchStdout = processInfo.process->getStdout();
1737 std::vector<std::string> lines =
1738 SplitString(batchStdout, "\r\n", WhitespaceHandling::TRIM_WHITESPACE,
1739 SplitResult::SPLIT_WANT_NONEMPTY);
1740 constexpr size_t kKeepLines = 10;
1741 printf("\nBatch timeout! Last %zu lines of batch stdout:\n", kKeepLines);
1742 printf("---------------------------------------------\n");
1743 for (size_t lineNo = lines.size() - std::min(lines.size(), kKeepLines);
1744 lineNo < lines.size(); ++lineNo)
1745 {
1746 printf("%s\n", lines[lineNo].c_str());
1747 }
1748 printf("---------------------------------------------\n\n");
1749
1750 for (const TestIdentifier &testIdentifier : processInfo.testsInBatch)
1751 {
1752 // Because the whole batch failed we can't know how long each test took.
1753 mTestResults.results[testIdentifier].type = TestResultType::Timeout;
1754 mFailureCount++;
1755 }
1756
1757 processIter = mCurrentProcesses.erase(processIter);
1758 progress = true;
1759 }
1760 else
1761 {
1762 totalTestCount += static_cast<uint32_t>(processInfo.testsInBatch.size());
1763 processIter++;
1764 }
1765 }
1766
1767 if (progress)
1768 {
1769 messageTimer.start();
1770 }
1771 else if (messageTimer.getElapsedWallClockTime() > kIdleMessageTimeout)
1772 {
1773 const ProcessInfo &processInfo = mCurrentProcesses[0];
1774 double processTime = processInfo.process->getElapsedTimeSeconds();
1775 printf("Running %d tests in %d processes, longest for %d seconds.\n", totalTestCount,
1776 static_cast<int>(mCurrentProcesses.size()), static_cast<int>(processTime));
1777 messageTimer.start();
1778 }
1779
1780 // Early exit if we passed the maximum failure threshold. Still wait for current tests.
1781 if (mFailureCount > mMaxFailures && !mTestQueue.empty())
1782 {
1783 printf("Reached maximum failure count (%d), clearing test queue.\n", mMaxFailures);
1784 TestQueue emptyTestQueue;
1785 std::swap(mTestQueue, emptyTestQueue);
1786 }
1787
1788 // Sleep briefly and continue.
1789 angle::Sleep(100);
1790 }
1791
1792 // Dump combined results.
1793 if (mFailureCount > mMaxFailures)
1794 {
1795 printf(
1796 "Omitted results files because the failure count (%d) exceeded the maximum number of "
1797 "failures (%d).\n",
1798 mFailureCount, mMaxFailures);
1799 }
1800 else
1801 {
1802 writeOutputFiles(false);
1803 }
1804
1805 totalRunTime.stop();
1806 printf("Tests completed in %lf seconds\n", totalRunTime.getElapsedWallClockTime());
1807
1808 return printFailuresAndReturnCount() == 0 ? 0 : 1;
1809 }
1810
printFailuresAndReturnCount() const1811 int TestSuite::printFailuresAndReturnCount() const
1812 {
1813 std::vector<std::string> failures;
1814 uint32_t skipCount = 0;
1815
1816 for (const auto &resultIter : mTestResults.results)
1817 {
1818 const TestIdentifier &id = resultIter.first;
1819 const TestResult &result = resultIter.second;
1820
1821 if (result.type == TestResultType::Skip)
1822 {
1823 skipCount++;
1824 }
1825 else if (result.type != TestResultType::Pass)
1826 {
1827 const FileLine &fileLine = mTestFileLines.find(id)->second;
1828
1829 std::stringstream failureMessage;
1830 failureMessage << id << " (" << fileLine.file << ":" << fileLine.line << ") ("
1831 << ResultTypeToString(result.type) << ")";
1832 failures.emplace_back(failureMessage.str());
1833 }
1834 }
1835
1836 if (failures.empty())
1837 return 0;
1838
1839 printf("%zu test%s failed:\n", failures.size(), failures.size() > 1 ? "s" : "");
1840 for (const std::string &failure : failures)
1841 {
1842 printf(" %s\n", failure.c_str());
1843 }
1844 if (skipCount > 0)
1845 {
1846 printf("%u tests skipped.\n", skipCount);
1847 }
1848
1849 return static_cast<int>(failures.size());
1850 }
1851
startWatchdog()1852 void TestSuite::startWatchdog()
1853 {
1854 auto watchdogMain = [this]() {
1855 do
1856 {
1857 {
1858 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1859 if (mTestResults.currentTestTimer.getElapsedWallClockTime() >
1860 mTestResults.currentTestTimeout)
1861 {
1862 break;
1863 }
1864
1865 if (mTestResults.allDone)
1866 return;
1867 }
1868
1869 angle::Sleep(500);
1870 } while (true);
1871 onCrashOrTimeout(TestResultType::Timeout);
1872 ::_Exit(EXIT_FAILURE);
1873 };
1874 mWatchdogThread = std::thread(watchdogMain);
1875 }
1876
addHistogramSample(const std::string & measurement,const std::string & story,double value,const std::string & units)1877 void TestSuite::addHistogramSample(const std::string &measurement,
1878 const std::string &story,
1879 double value,
1880 const std::string &units)
1881 {
1882 mHistogramWriter.addSample(measurement, story, value, units);
1883 }
1884
hasTestArtifactsDirectory() const1885 bool TestSuite::hasTestArtifactsDirectory() const
1886 {
1887 return !mTestArtifactDirectory.empty();
1888 }
1889
reserveTestArtifactPath(const std::string & artifactName)1890 std::string TestSuite::reserveTestArtifactPath(const std::string &artifactName)
1891 {
1892 mTestResults.testArtifactPaths.push_back(artifactName);
1893
1894 if (mTestArtifactDirectory.empty())
1895 {
1896 return artifactName;
1897 }
1898
1899 std::stringstream pathStream;
1900 pathStream << mTestArtifactDirectory << GetPathSeparator() << artifactName;
1901 return pathStream.str();
1902 }
1903
GetTestResultsFromFile(const char * fileName,TestResults * resultsOut)1904 bool GetTestResultsFromFile(const char *fileName, TestResults *resultsOut)
1905 {
1906 std::ifstream ifs(fileName);
1907 if (!ifs.is_open())
1908 {
1909 std::cerr << "Error opening " << fileName << "\n";
1910 return false;
1911 }
1912
1913 js::IStreamWrapper ifsWrapper(ifs);
1914 js::Document document;
1915 document.ParseStream(ifsWrapper);
1916
1917 if (document.HasParseError())
1918 {
1919 std::cerr << "Parse error reading JSON document: " << document.GetParseError() << "\n";
1920 return false;
1921 }
1922
1923 if (!GetTestResultsFromJSON(document, resultsOut))
1924 {
1925 std::cerr << "Error getting test results from JSON.\n";
1926 return false;
1927 }
1928
1929 return true;
1930 }
1931
dumpTestExpectationsErrorMessages()1932 void TestSuite::dumpTestExpectationsErrorMessages()
1933 {
1934 std::stringstream errorMsgStream;
1935 for (const auto &message : mTestExpectationsParser.getErrorMessages())
1936 {
1937 errorMsgStream << std::endl << " " << message;
1938 }
1939
1940 std::cerr << "Failed to load test expectations." << errorMsgStream.str() << std::endl;
1941 }
1942
loadTestExpectationsFromFileWithConfig(const GPUTestConfig & config,const std::string & fileName)1943 bool TestSuite::loadTestExpectationsFromFileWithConfig(const GPUTestConfig &config,
1944 const std::string &fileName)
1945 {
1946 if (!mTestExpectationsParser.loadTestExpectationsFromFile(config, fileName))
1947 {
1948 dumpTestExpectationsErrorMessages();
1949 return false;
1950 }
1951 return true;
1952 }
1953
loadAllTestExpectationsFromFile(const std::string & fileName)1954 bool TestSuite::loadAllTestExpectationsFromFile(const std::string &fileName)
1955 {
1956 if (!mTestExpectationsParser.loadAllTestExpectationsFromFile(fileName))
1957 {
1958 dumpTestExpectationsErrorMessages();
1959 return false;
1960 }
1961 return true;
1962 }
1963
logAnyUnusedTestExpectations()1964 bool TestSuite::logAnyUnusedTestExpectations()
1965 {
1966 std::stringstream unusedMsgStream;
1967 bool anyUnused = false;
1968 for (const auto &message : mTestExpectationsParser.getUnusedExpectationsMessages())
1969 {
1970 anyUnused = true;
1971 unusedMsgStream << std::endl << " " << message;
1972 }
1973 if (anyUnused)
1974 {
1975 std::cerr << "Found unused test expectations:" << unusedMsgStream.str() << std::endl;
1976 return true;
1977 }
1978 return false;
1979 }
1980
getTestExpectation(const std::string & testName)1981 int32_t TestSuite::getTestExpectation(const std::string &testName)
1982 {
1983 return mTestExpectationsParser.getTestExpectation(testName);
1984 }
1985
maybeUpdateTestTimeout(uint32_t testExpectation)1986 void TestSuite::maybeUpdateTestTimeout(uint32_t testExpectation)
1987 {
1988 double testTimeout = (testExpectation == GPUTestExpectationsParser::kGpuTestTimeout)
1989 ? getSlowTestTimeout()
1990 : mTestTimeout;
1991 std::lock_guard<std::mutex> guard(mTestResults.currentTestMutex);
1992 mTestResults.currentTestTimeout = testTimeout;
1993 }
1994
getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig & config,const std::string & testName)1995 int32_t TestSuite::getTestExpectationWithConfigAndUpdateTimeout(const GPUTestConfig &config,
1996 const std::string &testName)
1997 {
1998 uint32_t expectation = mTestExpectationsParser.getTestExpectationWithConfig(config, testName);
1999 maybeUpdateTestTimeout(expectation);
2000 return expectation;
2001 }
2002
getSlowTestTimeout() const2003 int TestSuite::getSlowTestTimeout() const
2004 {
2005 return mTestTimeout * kSlowTestTimeoutScale;
2006 }
2007
writeOutputFiles(bool interrupted)2008 void TestSuite::writeOutputFiles(bool interrupted)
2009 {
2010 if (!mResultsFile.empty())
2011 {
2012 WriteResultsFile(interrupted, mTestResults, mResultsFile);
2013 }
2014
2015 if (!mHistogramJsonFile.empty())
2016 {
2017 WriteHistogramJson(mHistogramWriter, mHistogramJsonFile);
2018 }
2019
2020 mMetricWriter.close();
2021 }
2022
TestResultTypeToString(TestResultType type)2023 const char *TestResultTypeToString(TestResultType type)
2024 {
2025 switch (type)
2026 {
2027 case TestResultType::Crash:
2028 return "Crash";
2029 case TestResultType::Fail:
2030 return "Fail";
2031 case TestResultType::NoResult:
2032 return "NoResult";
2033 case TestResultType::Pass:
2034 return "Pass";
2035 case TestResultType::Skip:
2036 return "Skip";
2037 case TestResultType::Timeout:
2038 return "Timeout";
2039 case TestResultType::Unknown:
2040 default:
2041 return "Unknown";
2042 }
2043 }
2044
2045 // This code supports using "-" in test names, which happens often in dEQP. GTest uses as a marker
2046 // for the beginning of the exclusion filter. Work around this by replacing "-" with "?" which
2047 // matches any single character.
ReplaceDashesWithQuestionMark(std::string dashesString)2048 std::string ReplaceDashesWithQuestionMark(std::string dashesString)
2049 {
2050 std::string noDashesString = dashesString;
2051 ReplaceAllSubstrings(&noDashesString, "-", "?");
2052 return noDashesString;
2053 }
2054 } // namespace angle
2055