1// Copyright 2019 The SwiftShader Authors. All Rights Reserved. 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// http://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15// regres is a tool that detects test regressions with SwiftShader changes. 16// 17// Regres monitors changes that have been put up for review with Gerrit. 18// Once a new patchset has been found, regres will checkout, build and test the 19// change against the parent changelist. Any differences in results are reported 20// as a review comment on the change. 21// 22// Once a day regres will also test another, larger set of tests, and post the 23// full test results as a Gerrit changelist. The CI test lists can be based from 24// this daily test list, so testing can be limited to tests that were known to 25// pass. 26package main 27 28import ( 29 "bytes" 30 "crypto/sha1" 31 "encoding/hex" 32 "encoding/json" 33 "errors" 34 "flag" 35 "fmt" 36 "io/ioutil" 37 "log" 38 "math" 39 "os" 40 "os/exec" 41 "path" 42 "path/filepath" 43 "regexp" 44 "runtime" 45 "sort" 46 "strings" 47 "time" 48 49 "../../cause" 50 "../../consts" 51 "../../cov" 52 "../../deqp" 53 "../../git" 54 "../../llvm" 55 "../../shell" 56 "../../testlist" 57 "../../util" 58 59 gerrit "github.com/andygrunwald/go-gerrit" 60) 61 62const ( 63 gitURL = "https://swiftshader.googlesource.com/SwiftShader" 64 gitDailyBranch = "HEAD" 65 gerritURL = "https://swiftshader-review.googlesource.com/" 66 coverageURL = "https://$USERNAME:$PASSWORD@github.com/swiftshader-regres/swiftshader-coverage.git" 67 coverageBranch = "gh-pages" 68 coveragePath = "coverage/coverage.zip" 69 reportHeader = "Regres report:" 70 changeUpdateFrequency = time.Minute * 5 71 changeQueryFrequency = time.Minute * 5 72 testTimeout = time.Minute * 2 // timeout for a single test 73 buildTimeout = time.Minute * 10 // timeout for a build 74 fullTestListRelPath = "tests/regres/full-tests.json" 75 ciTestListRelPath = "tests/regres/ci-tests.json" 76 deqpConfigRelPath = "tests/regres/deqp.json" 77 swsTestLists = "tests/regres/testlists" 78 deqpTestLists = "external/vulkancts/mustpass/main" 79) 80 81var ( 82 numParallelTests = runtime.NumCPU() 83 llvmVersion = llvm.Version{Major: 10} 84 85 cacheDir = flag.String("cache", "cache", "path to the output cache directory") 86 gerritEmail = flag.String("email", "$SS_REGRES_EMAIL", "gerrit email address for posting regres results") 87 gerritUser = flag.String("user", "$SS_REGRES_USER", "gerrit username for posting regres results") 88 gerritPass = flag.String("pass", "$SS_REGRES_PASS", "gerrit password for posting regres results") 89 githubUser = flag.String("gh-user", "$SS_GITHUB_USER", "github user for posting coverage results") 90 githubPass = flag.String("gh-pass", "$SS_GITHUB_PASS", "github password for posting coverage results") 91 keepCheckouts = flag.Bool("keep", false, "don't delete checkout directories after use") 92 dryRun = flag.Bool("dry", false, "don't post regres reports to gerrit") 93 maxProcMemory = flag.Uint64("max-proc-mem", shell.MaxProcMemory, "maximum virtual memory per child process") 94 dailyNow = flag.Bool("dailynow", false, "Start by running the daily pass") 95 dailyOnly = flag.Bool("dailyonly", false, "Run only the daily pass") 96 dailyChange = flag.String("dailychange", "", "Change hash to use for daily pass, HEAD if not provided") 97 priority = flag.Int("priority", 0, "Prioritize a single change with the given number") 98 limit = flag.Int("limit", 0, "only run a maximum of this number of tests") 99) 100 101func main() { 102 flag.ErrHelp = errors.New("regres is a tool to detect regressions between versions of SwiftShader") 103 flag.Parse() 104 105 shell.MaxProcMemory = *maxProcMemory 106 107 r := regres{ 108 cacheRoot: *cacheDir, 109 gerritEmail: os.ExpandEnv(*gerritEmail), 110 gerritUser: os.ExpandEnv(*gerritUser), 111 gerritPass: os.ExpandEnv(*gerritPass), 112 githubUser: os.ExpandEnv(*githubUser), 113 githubPass: os.ExpandEnv(*githubPass), 114 keepCheckouts: *keepCheckouts, 115 dryRun: *dryRun, 116 dailyNow: *dailyNow, 117 dailyOnly: *dailyOnly, 118 dailyChange: *dailyChange, 119 priority: *priority, 120 } 121 122 if err := r.run(); err != nil { 123 fmt.Fprintln(os.Stderr, err) 124 os.Exit(-1) 125 } 126} 127 128type regres struct { 129 cmake string // path to cmake executable 130 make string // path to make executable 131 python string // path to python executable 132 tar string // path to tar executable 133 cacheRoot string // path to the regres cache directory 134 toolchain *llvm.Toolchain // the LLVM toolchain used to build SwiftShader 135 gerritEmail string // gerrit email address used for posting results 136 gerritUser string // gerrit username used for posting results 137 gerritPass string // gerrit password used for posting results 138 githubUser string // github username used for posting results 139 githubPass string // github password used for posting results 140 keepCheckouts bool // don't delete source & build checkouts after testing 141 dryRun bool // don't post any reviews 142 maxProcMemory uint64 // max virtual memory for child processes 143 dailyNow bool // start with a daily run 144 dailyOnly bool // run only the daily run 145 dailyChange string // Change hash to use for daily pass, HEAD if not provided 146 priority int // Prioritize a single change with the given number 147} 148 149// getToolchain returns the LLVM toolchain, possibly downloading and 150// decompressing it if it wasn't found in the cache directory. 151func getToolchain(tarExe, cacheRoot string) (*llvm.Toolchain, error) { 152 path := filepath.Join(cacheRoot, "llvm") 153 154 if toolchain := llvm.Search(path).Find(llvmVersion); toolchain != nil { 155 return toolchain, nil 156 } 157 158 // LLVM toolchain may have been updated, remove the directory if it exists. 159 os.RemoveAll(path) 160 161 log.Printf("Downloading LLVM %v toolchain...\n", llvmVersion) 162 tar, err := llvmVersion.Download() 163 if err != nil { 164 return nil, fmt.Errorf("Couldn't download LLVM %v: %v", llvmVersion, err) 165 } 166 167 tarFile := filepath.Join(cacheRoot, "llvm.tar.xz") 168 if err := ioutil.WriteFile(tarFile, tar, 0666); err != nil { 169 return nil, fmt.Errorf("Couldn't write '%v': %v", tarFile, err) 170 } 171 defer os.Remove(tarFile) 172 173 log.Printf("Decompressing LLVM %v toolchain...\n", llvmVersion) 174 target := filepath.Join(cacheRoot, "llvm-tmp") 175 os.MkdirAll(target, 0755) 176 defer os.RemoveAll(target) 177 if err := exec.Command(tarExe, "-xf", tarFile, "-C", target).Run(); err != nil { 178 return nil, fmt.Errorf("Couldn't decompress LLVM tar download: %v", err) 179 } 180 181 // The tar, once decompressed, holds a single root directory with a name 182 // starting with 'clang+llvm'. Move this to path. 183 files, err := filepath.Glob(filepath.Join(target, "*")) 184 if err != nil { 185 return nil, fmt.Errorf("Couldn't glob decompressed files: %v", err) 186 } 187 if len(files) != 1 || !util.IsDir(files[0]) { 188 return nil, fmt.Errorf("Unexpected decompressed files: %+v", files) 189 } 190 if err := os.Rename(files[0], path); err != nil { 191 return nil, fmt.Errorf("Couldn't move %v to %v", files[0], path) 192 } 193 194 // We should now have everything in the right place. 195 toolchain := llvm.Search(path).Find(llvmVersion) 196 if toolchain == nil { 197 return nil, fmt.Errorf("Couldn't find LLVM toolchain after downloading") 198 } 199 200 return toolchain, nil 201} 202 203// toolchainEnv() returns the environment variables for executing CMake commands. 204func (r *regres) toolchainEnv() []string { 205 return append([]string{ 206 "CC=" + r.toolchain.Clang(), 207 "CXX=" + r.toolchain.ClangXX(), 208 }, os.Environ()...) 209} 210 211// resolveDirs ensures that the necessary directories used can be found, and 212// expands them to absolute paths. 213func (r *regres) resolveDirs() error { 214 allDirs := []*string{ 215 &r.cacheRoot, 216 } 217 218 for _, path := range allDirs { 219 abs, err := filepath.Abs(*path) 220 if err != nil { 221 return cause.Wrap(err, "Couldn't find path '%v'", *path) 222 } 223 *path = abs 224 } 225 226 if err := os.MkdirAll(r.cacheRoot, 0777); err != nil { 227 return cause.Wrap(err, "Couldn't create cache root directory") 228 } 229 230 for _, path := range allDirs { 231 if !util.IsDir(*path) { 232 return fmt.Errorf("Couldn't find path '%v'", *path) 233 } 234 } 235 236 return nil 237} 238 239// resolveExes resolves all external executables used by regres. 240func (r *regres) resolveExes() error { 241 type exe struct { 242 name string 243 path *string 244 } 245 for _, e := range []exe{ 246 {"cmake", &r.cmake}, 247 {"make", &r.make}, 248 {"python", &r.python}, 249 {"tar", &r.tar}, 250 } { 251 path, err := exec.LookPath(e.name) 252 if err != nil { 253 return cause.Wrap(err, "Couldn't find path to %s", e.name) 254 } 255 *e.path = path 256 } 257 return nil 258} 259 260// run performs the main processing loop for the regress tool. It: 261// * Scans for open and recently updated changes in gerrit using queryChanges() 262// and changeInfo.update(). 263// * Builds the most recent patchset and the commit's parent CL using 264// r.newTest(<hash>).lazyRun(). 265// * Compares the results of the tests using compare(). 266// * Posts the results of the compare to gerrit as a review. 267// * Repeats the above steps until the process is interrupted. 268func (r *regres) run() error { 269 if err := r.resolveExes(); err != nil { 270 return cause.Wrap(err, "Couldn't resolve all exes") 271 } 272 273 if err := r.resolveDirs(); err != nil { 274 return cause.Wrap(err, "Couldn't resolve all directories") 275 } 276 277 toolchain, err := getToolchain(r.tar, r.cacheRoot) 278 if err != nil { 279 return cause.Wrap(err, "Couldn't download LLVM toolchain") 280 } 281 r.toolchain = toolchain 282 283 client, err := gerrit.NewClient(gerritURL, nil) 284 if err != nil { 285 return cause.Wrap(err, "Couldn't create gerrit client") 286 } 287 if r.gerritUser != "" { 288 client.Authentication.SetBasicAuth(r.gerritUser, r.gerritPass) 289 } 290 291 changes := map[int]*changeInfo{} // Change number -> changeInfo 292 lastUpdatedTestLists := toDate(time.Now()) 293 lastQueriedChanges := time.Time{} 294 295 if r.dailyNow || r.dailyOnly { 296 lastUpdatedTestLists = date{} 297 } 298 299 for { 300 if now := time.Now(); toDate(now) != lastUpdatedTestLists { 301 lastUpdatedTestLists = toDate(now) 302 if err := r.runDaily(client, backendLLVM, false); err != nil { 303 log.Println(err.Error()) 304 } 305 if err := r.runDaily(client, backendSubzero, true); err != nil { 306 log.Println(err.Error()) 307 } 308 } 309 310 if r.dailyOnly { 311 log.Println("Daily finished with --dailyonly. Stopping") 312 return nil 313 } 314 315 // Update list of tracked changes. 316 if time.Since(lastQueriedChanges) > changeQueryFrequency { 317 lastQueriedChanges = time.Now() 318 if err := queryChanges(client, changes); err != nil { 319 log.Println(err.Error()) 320 } 321 } 322 323 // Update change info. 324 for _, change := range changes { 325 if time.Since(change.lastUpdated) > changeUpdateFrequency { 326 change.lastUpdated = time.Now() 327 err := change.update(client) 328 if err != nil { 329 log.Println(cause.Wrap(err, "Couldn't update info for change '%v'", change.number)) 330 } 331 } 332 } 333 334 for _, c := range changes { 335 if c.pending && r.priority == c.number { 336 log.Printf("Prioritizing change '%v'\n", c.number) 337 c.priority = 1e6 338 } 339 } 340 341 // Find the change with the highest priority. 342 var change *changeInfo 343 numPending := 0 344 for _, c := range changes { 345 if c.pending { 346 numPending++ 347 if change == nil || c.priority > change.priority { 348 change = c 349 } 350 } 351 } 352 353 if change == nil { 354 // Everything up to date. Take a break. 355 log.Println("Nothing to do. Sleeping") 356 time.Sleep(time.Minute) 357 continue 358 } 359 360 log.Printf("%d changes queued for testing\n", numPending) 361 362 log.Printf("Testing change '%v'\n", change.number) 363 364 // Test the latest patchset in the change, diff against parent change. 365 msg, alert, err := r.test(change) 366 if err != nil { 367 log.Println(cause.Wrap(err, "Failed to test changelist '%s'", change.latest)) 368 time.Sleep(time.Minute) 369 change.pending = false 370 continue 371 } 372 373 // Always include the reportHeader in the message. 374 // changeInfo.update() uses this header to detect whether a patchset has 375 // already got a test result. 376 msg = reportHeader + "\n\n" + msg 377 378 // Limit the message length to prevent '400 Bad Request' response. 379 maxMsgLength := 16000 380 if len(msg) > maxMsgLength { 381 trunc := " [truncated]\n" 382 msg = msg[0:maxMsgLength-len(trunc)] + trunc 383 } 384 385 if r.dryRun { 386 log.Printf("DRY RUN: add review to change '%v':\n%v\n", change.number, msg) 387 } else { 388 log.Printf("Posting review to '%v'\n", change.number) 389 notify := "OWNER" 390 if alert { 391 notify = "OWNER_REVIEWERS" 392 } 393 _, _, err = client.Changes.SetReview(fmt.Sprintf("%v", change.number), change.latest.String(), &gerrit.ReviewInput{ 394 Message: msg, 395 Tag: "autogenerated:regress", 396 Notify: notify, 397 }) 398 if err != nil { 399 return cause.Wrap(err, "Failed to post comments on change '%v'", change.number) 400 } 401 } 402 change.pending = false 403 } 404} 405 406func (r *regres) test(change *changeInfo) (string, bool, error) { 407 latest := r.newTest(change.latest) 408 defer latest.cleanup() 409 410 if err := latest.checkout(); err != nil { 411 return "", true, cause.Wrap(err, "Failed to checkout '%s'", change.latest) 412 } 413 414 deqpBuild, err := r.getOrBuildDEQP(latest) 415 if err != nil { 416 return "", true, cause.Wrap(err, "Failed to build dEQP '%v' for change", change.number) 417 } 418 419 log.Printf("Testing latest patchset for change '%v'\n", change.number) 420 latestResults, testlists, err := r.testLatest(change, latest, deqpBuild) 421 if err != nil { 422 return "", true, cause.Wrap(err, "Failed to test latest change of '%v'", change.number) 423 } 424 425 log.Printf("Testing parent of change '%v'\n", change.number) 426 parentResults, err := r.testParent(change, testlists, deqpBuild) 427 if err != nil { 428 return "", true, cause.Wrap(err, "Failed to test parent change of '%v'", change.number) 429 } 430 431 log.Println("Comparing latest patchset's results with parent") 432 msg, alert := compare(parentResults, latestResults) 433 434 return msg, alert, nil 435} 436 437type deqpBuild struct { 438 path string // path to deqp directory 439 hash string // hash of the deqp config 440} 441 442func (r *regres) getOrBuildDEQP(test *test) (deqpBuild, error) { 443 checkoutDir := test.checkoutDir 444 if p := path.Join(checkoutDir, deqpConfigRelPath); !util.IsFile(p) { 445 checkoutDir, _ = os.Getwd() 446 log.Printf("Couldn't open dEQP config file from change (%v), falling back to internal version\n", p) 447 } else { 448 log.Println("Using dEQP config file from change") 449 } 450 file, err := os.Open(path.Join(checkoutDir, deqpConfigRelPath)) 451 if err != nil { 452 return deqpBuild{}, cause.Wrap(err, "Couldn't open dEQP config file") 453 } 454 defer file.Close() 455 456 cfg := struct { 457 Remote string `json:"remote"` 458 Branch string `json:"branch"` 459 SHA string `json:"sha"` 460 Patches []string `json:"patches"` 461 }{} 462 if err := json.NewDecoder(file).Decode(&cfg); err != nil { 463 return deqpBuild{}, cause.Wrap(err, "Couldn't parse %s", deqpConfigRelPath) 464 } 465 466 hasher := sha1.New() 467 if err := json.NewEncoder(hasher).Encode(&cfg); err != nil { 468 return deqpBuild{}, cause.Wrap(err, "Couldn't re-encode %s", deqpConfigRelPath) 469 } 470 hash := hex.EncodeToString(hasher.Sum(nil)) 471 cacheDir := path.Join(r.cacheRoot, "deqp", hash) 472 buildDir := path.Join(cacheDir, "build") 473 if !util.IsDir(cacheDir) { 474 if err := os.MkdirAll(cacheDir, 0777); err != nil { 475 return deqpBuild{}, cause.Wrap(err, "Couldn't make deqp cache directory '%s'", cacheDir) 476 } 477 478 success := false 479 defer func() { 480 if !success { 481 os.RemoveAll(cacheDir) 482 } 483 }() 484 485 if cfg.Branch != "" { 486 // If a branch is specified, then fetch the branch then checkout the 487 // commit by SHA. This is a workaround for git repos that error when 488 // attempting to directly checkout a remote commit. 489 log.Printf("Checking out deqp %v branch %v into %v\n", cfg.Remote, cfg.Branch, cacheDir) 490 if err := git.CheckoutRemoteBranch(cacheDir, cfg.Remote, cfg.Branch); err != nil { 491 return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp branch %v @ %v", cfg.Remote, cfg.Branch) 492 } 493 log.Printf("Checking out deqp %v commit %v \n", cfg.Remote, cfg.SHA) 494 if err := git.CheckoutCommit(cacheDir, git.ParseHash(cfg.SHA)); err != nil { 495 return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp commit %v @ %v", cfg.Remote, cfg.SHA) 496 } 497 } else { 498 log.Printf("Checking out deqp %v @ %v into %v\n", cfg.Remote, cfg.SHA, cacheDir) 499 if err := git.CheckoutRemoteCommit(cacheDir, cfg.Remote, git.ParseHash(cfg.SHA)); err != nil { 500 return deqpBuild{}, cause.Wrap(err, "Couldn't checkout deqp commit %v @ %v", cfg.Remote, cfg.SHA) 501 } 502 } 503 504 log.Println("Fetching deqp dependencies") 505 if err := shell.Shell(buildTimeout, r.python, cacheDir, "external/fetch_sources.py"); err != nil { 506 return deqpBuild{}, cause.Wrap(err, "Couldn't fetch deqp sources %v @ %v", cfg.Remote, cfg.SHA) 507 } 508 509 log.Println("Applying deqp patches") 510 for _, patch := range cfg.Patches { 511 fullPath := path.Join(checkoutDir, patch) 512 if err := git.Apply(cacheDir, fullPath); err != nil { 513 return deqpBuild{}, cause.Wrap(err, "Couldn't apply deqp patch %v for %v @ %v", patch, cfg.Remote, cfg.SHA) 514 } 515 } 516 517 log.Printf("Building deqp into %v\n", buildDir) 518 if err := os.MkdirAll(buildDir, 0777); err != nil { 519 return deqpBuild{}, cause.Wrap(err, "Couldn't make deqp build directory '%v'", buildDir) 520 } 521 522 if err := shell.Shell(buildTimeout, r.cmake, buildDir, 523 "-DDEQP_TARGET=default", 524 "-DCMAKE_BUILD_TYPE=Release", 525 ".."); err != nil { 526 return deqpBuild{}, cause.Wrap(err, "Couldn't generate build rules for deqp %v @ %v", cfg.Remote, cfg.SHA) 527 } 528 529 if err := shell.Shell(buildTimeout, r.make, buildDir, 530 fmt.Sprintf("-j%d", runtime.NumCPU()), 531 "deqp-vk"); err != nil { 532 return deqpBuild{}, cause.Wrap(err, "Couldn't build deqp %v @ %v", cfg.Remote, cfg.SHA) 533 } 534 535 success = true 536 } 537 538 return deqpBuild{ 539 path: cacheDir, 540 hash: hash, 541 }, nil 542} 543 544var additionalTestsRE = regexp.MustCompile(`\n\s*Test[s]?:\s*([^\s]+)[^\n]*`) 545 546func (r *regres) testLatest(change *changeInfo, test *test, d deqpBuild) (*deqp.Results, testlist.Lists, error) { 547 // Get the test results for the latest patchset in the change. 548 testlists, err := test.loadTestLists(ciTestListRelPath) 549 if err != nil { 550 return nil, nil, cause.Wrap(err, "Failed to load '%s'", change.latest) 551 } 552 553 if matches := additionalTestsRE.FindAllStringSubmatch(change.commitMessage, -1); len(matches) > 0 { 554 log.Println("Change description contains additional test patterns") 555 556 // Change specifies additional tests to try. Load the full test list. 557 fullTestLists, err := test.loadTestLists(fullTestListRelPath) 558 if err != nil { 559 return nil, nil, cause.Wrap(err, "Failed to load '%s'", change.latest) 560 } 561 562 // Add any tests in the full list that match the pattern to the list to test. 563 for _, match := range matches { 564 if len(match) > 1 { 565 pattern := match[1] 566 log.Printf("Adding custom tests with pattern '%s'\n", pattern) 567 filtered := fullTestLists.Filter(func(name string) bool { 568 ok, _ := filepath.Match(pattern, name) 569 return ok 570 }) 571 testlists = append(testlists, filtered...) 572 } 573 } 574 } 575 576 cachePath := test.resultsCachePath(testlists, d) 577 578 if results, err := deqp.LoadResults(cachePath); err == nil { 579 return results, testlists, nil // Use cached results 580 } 581 582 // Build the change and test it. 583 results := test.buildAndRun(testlists, d) 584 585 // Cache the results for future tests 586 if err := results.Save(cachePath); err != nil { 587 log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath) 588 } 589 590 return results, testlists, nil 591} 592 593func (r *regres) testParent(change *changeInfo, testlists testlist.Lists, d deqpBuild) (*deqp.Results, error) { 594 // Get the test results for the changes's parent changelist. 595 test := r.newTest(change.parent) 596 defer test.cleanup() 597 598 cachePath := test.resultsCachePath(testlists, d) 599 600 if results, err := deqp.LoadResults(cachePath); err == nil { 601 return results, nil // Use cached results 602 } 603 604 // Couldn't load cached results. Have to build them. 605 if err := test.checkout(); err != nil { 606 return nil, cause.Wrap(err, "Failed to checkout '%s'", change.parent) 607 } 608 609 // Build the parent change and test it. 610 results := test.buildAndRun(testlists, d) 611 612 // Store the results of the parent change to the cache. 613 if err := results.Save(cachePath); err != nil { 614 log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath) 615 } 616 617 return results, nil 618} 619 620// runDaily runs a full deqp run on the HEAD change, posting the results to a 621// new or existing gerrit change. If genCov is true, then coverage 622// information will be generated for the run, and commiteed to the 623// coverageBranch. 624func (r *regres) runDaily(client *gerrit.Client, reactorBackend reactorBackend, genCov bool) error { 625 // TODO(b/152192800): Generating coverage data is currently broken. 626 genCov = false 627 628 log.Printf("Updating test lists (Backend: %v)\n", reactorBackend) 629 630 if genCov { 631 if r.githubUser == "" { 632 log.Println("--gh-user not specified and SS_GITHUB_USER not set. Disabling code coverage generation") 633 genCov = false 634 } else if r.githubPass == "" { 635 log.Println("--gh-pass not specified and SS_GITHUB_PASS not set. Disabling code coverage generation") 636 genCov = false 637 } 638 } 639 640 dailyHash := git.Hash{} 641 if r.dailyChange == "" { 642 headHash, err := git.FetchRefHash(gitDailyBranch, gitURL) 643 if err != nil { 644 return cause.Wrap(err, "Could not get hash of master HEAD") 645 } 646 dailyHash = headHash 647 } else { 648 dailyHash = git.ParseHash(r.dailyChange) 649 } 650 651 return r.runDailyTest(dailyHash, reactorBackend, genCov, 652 func(test *test, testLists testlist.Lists, results *deqp.Results) error { 653 errs := []error{} 654 655 if err := r.postDailyResults(client, test, testLists, results, reactorBackend, dailyHash); err != nil { 656 errs = append(errs, err) 657 } 658 659 if genCov { 660 if err := r.postCoverageResults(results.Coverage, dailyHash); err != nil { 661 errs = append(errs, err) 662 } 663 } 664 665 return cause.Merge(errs...) 666 }) 667} 668 669// runDailyTest performs the full deqp run on the HEAD change, calling 670// withResults with the test results. 671func (r *regres) runDailyTest(dailyHash git.Hash, reactorBackend reactorBackend, genCov bool, withResults func(*test, testlist.Lists, *deqp.Results) error) error { 672 // Get the full test results. 673 test := r.newTest(dailyHash).setReactorBackend(reactorBackend) 674 defer test.cleanup() 675 676 // Always need to checkout the change. 677 if err := test.checkout(); err != nil { 678 return cause.Wrap(err, "Failed to checkout '%s'", dailyHash) 679 } 680 681 d, err := r.getOrBuildDEQP(test) 682 if err != nil { 683 return cause.Wrap(err, "Failed to build deqp for '%s'", dailyHash) 684 } 685 686 // Load the test lists. 687 testLists, err := test.loadTestLists(fullTestListRelPath) 688 if err != nil { 689 return cause.Wrap(err, "Failed to load full test lists for '%s'", dailyHash) 690 } 691 692 if genCov { 693 test.coverageEnv = &cov.Env{ 694 LLVM: *r.toolchain, 695 RootDir: test.checkoutDir, 696 ExePath: filepath.Join(test.buildDir, "libvk_swiftshader.so"), 697 TurboCov: filepath.Join(test.buildDir, "turbo-cov"), 698 } 699 } 700 701 // Build the change. 702 if err := test.build(); err != nil { 703 return cause.Wrap(err, "Failed to build '%s'", dailyHash) 704 } 705 706 // Run the tests on the change. 707 results, err := test.run(testLists, d) 708 if err != nil { 709 return cause.Wrap(err, "Failed to test '%s'", dailyHash) 710 } 711 712 return withResults(test, testLists, results) 713} 714 715// copyFileIfDifferent copies src to dst if src doesn't exist or if there are differences 716// between the files. 717func copyFileIfDifferent(dst, src string) error { 718 srcFileInfo, err := os.Stat(src) 719 if err != nil { 720 return err 721 } 722 srcContents, err := os.ReadFile(src) 723 if err != nil { 724 return err 725 } 726 727 dstContents, err := os.ReadFile(dst) 728 if err != nil && !errors.Is(err, os.ErrNotExist) { 729 return err 730 } 731 732 if !bytes.Equal(srcContents, dstContents) { 733 if err := os.WriteFile(dst, srcContents, srcFileInfo.Mode()); err != nil { 734 return err 735 } 736 } 737 return nil 738} 739 740// updateLocalDeqpFiles sets the SHA in deqp.json to the latest dEQP revision, 741// then it uses getOrBuildDEQP to checkout that revision and copy over its testlists 742func (r *regres) updateLocalDeqpFiles(test *test) ([]string, error) { 743 out := []string{} 744 // Update deqp.json 745 deqpJsonPath := path.Join(test.checkoutDir, deqpConfigRelPath) 746 if !util.IsFile(deqpJsonPath) { 747 return nil, fmt.Errorf("Failed to locate %s while trying to update the dEQP SHA", deqpConfigRelPath) 748 } 749 file, err := os.Open(deqpJsonPath) 750 if err != nil { 751 return nil, cause.Wrap(err, "Couldn't open dEQP config file") 752 } 753 defer file.Close() 754 755 cfg := struct { 756 Remote string `json:"remote"` 757 Branch string `json:"branch"` 758 SHA string `json:"sha"` 759 Patches []string `json:"patches"` 760 }{} 761 if err := json.NewDecoder(file).Decode(&cfg); err != nil { 762 return nil, cause.Wrap(err, "Couldn't parse %s", deqpConfigRelPath) 763 } 764 765 hash, err := git.FetchRefHash("HEAD", cfg.Remote) 766 if err != nil { 767 return nil, cause.Wrap(err, "Failed to fetch dEQP ref") 768 } 769 cfg.SHA = hash.String() 770 log.Println("New dEQP revision: %s", cfg.SHA) 771 772 newFile, err := os.Create(deqpJsonPath) 773 if err != nil { 774 return nil, cause.Wrap(err, "Failed to open %s for encoding", deqpConfigRelPath) 775 } 776 defer newFile.Close() 777 778 encoder := json.NewEncoder(newFile) 779 // Make the encoder create a new-line and space-based indents for each field 780 encoder.SetIndent("", " ") 781 if err := encoder.Encode(&cfg); err != nil { 782 return nil, cause.Wrap(err, "Failed to re-encode %s", deqpConfigRelPath) 783 } 784 out = append(out, deqpJsonPath) 785 786 // Use getOrBuildDEQP as it'll prevent us from copying data from a revision of dEQP that has build errors. 787 deqpBuild, err := r.getOrBuildDEQP(test) 788 789 if err != nil { 790 return nil, cause.Wrap(err, "Failed to retrieve dEQP build information") 791 } 792 793 log.Println("Copying deqp's vulkan testlist to checkout %s", test.commit) 794 deqpTestlistDir := path.Join(deqpBuild.path, deqpTestLists) 795 swsTestlistDir := path.Join(test.checkoutDir, swsTestLists) 796 797 deqpDefault := path.Join(deqpTestlistDir, "vk-default.txt") 798 swsDefault := path.Join(swsTestlistDir, "vk-master.txt") 799 800 if err := copyFileIfDifferent(swsDefault, deqpDefault); err != nil { 801 return nil, cause.Wrap(err, "Failed to copy '%s' to '%s'", deqpDefault, swsDefault) 802 } 803 804 out = append(out, swsDefault) 805 806 files, err := ioutil.ReadDir(path.Join(deqpTestlistDir, "vk-default")) 807 if err != nil { 808 return nil, cause.Wrap(err, "Could not read files from %s/vk-default/", deqpTestlistDir) 809 } 810 811 for _, f := range files { 812 if f.IsDir() { 813 continue 814 } 815 816 swsFile := path.Join(swsTestlistDir, "vk-default", f.Name()) 817 deqpFile := path.Join(deqpTestlistDir, "vk-default", f.Name()) 818 819 if err := copyFileIfDifferent(swsFile, deqpFile); err != nil { 820 return nil, cause.Wrap(err, "Failed to copy '%s' to '%s'", deqpFile, swsFile) 821 } 822 out = append(out, swsFile) 823 } 824 return out, nil 825} 826 827// postDailyResults posts the results of the daily full deqp run to gerrit as 828// a new change, or reusing an old, unsubmitted change. 829// This change contains the updated test lists, an updated deqp.json that 830// points to the latest dEQP commit, and updated dEQP test files, along with a 831// summary of the test results. 832func (r *regres) postDailyResults( 833 client *gerrit.Client, 834 test *test, 835 testLists testlist.Lists, 836 results *deqp.Results, 837 reactorBackend reactorBackend, 838 dailyHash git.Hash) error { 839 840 // Write out the test list status files. 841 filePaths, err := test.writeTestListsByStatus(testLists, results) 842 if err != nil { 843 return cause.Wrap(err, "Failed to write test lists by status") 844 } 845 846 newPaths, err := r.updateLocalDeqpFiles(test) 847 if err != nil { 848 return cause.Wrap(err, "Failed to update test lists from dEQP") 849 } 850 851 filePaths = append(filePaths, newPaths...) 852 853 // Stage all the updated test files. 854 for _, path := range filePaths { 855 log.Println("Staging", path) 856 if err := git.Add(test.checkoutDir, path); err != nil { 857 return err 858 } 859 } 860 861 log.Println("Checking for existing test list") 862 existingChange, err := r.findTestListChange(client) 863 if err != nil { 864 return err 865 } 866 867 commitMsg := strings.Builder{} 868 commitMsg.WriteString(consts.TestListUpdateCommitSubjectPrefix + dailyHash.String()[:8]) 869 commitMsg.WriteString("\n\nReactor backend: " + string(reactorBackend)) 870 if existingChange != nil { 871 // Reuse gerrit change ID if there's already a change up for review. 872 commitMsg.WriteString("\n\n") 873 commitMsg.WriteString("Change-Id: " + existingChange.ChangeID + "\n") 874 } 875 876 if err := git.Commit(test.checkoutDir, commitMsg.String(), git.CommitFlags{ 877 Name: "SwiftShader Regression Bot", 878 Email: r.gerritEmail, 879 }); err != nil { 880 return cause.Wrap(err, "Failed to commit test results") 881 } 882 883 if r.dryRun { 884 log.Printf("DRY RUN: post results for review") 885 } else { 886 log.Println("Pushing test results for review") 887 if err := git.Push(test.checkoutDir, gitURL, "HEAD", "refs/for/master", git.PushFlags{ 888 Username: r.gerritUser, 889 Password: r.gerritPass, 890 }); err != nil { 891 return cause.Wrap(err, "Failed to push test results for review") 892 } 893 log.Println("Test results posted for review") 894 } 895 896 // We've just pushed a new commit. Let's reset back to the parent commit 897 // (dailyHash), so that we can run runDaily again for another backend, 898 // and have it update the commit with the same change-id. 899 if err := git.CheckoutCommit(test.checkoutDir, dailyHash); err != nil { 900 return cause.Wrap(err, "Failed to checkout parent commit") 901 } 902 log.Println("Checked out parent commit") 903 904 change, err := r.findTestListChange(client) 905 if err != nil { 906 return err 907 } 908 909 if err := r.postMostCommonFailures(client, change, results); err != nil { 910 return err 911 } 912 913 return nil 914} 915 916func (r *regres) postCoverageResults(cov *cov.Tree, revision git.Hash) error { 917 log.Printf("Committing coverage for %v\n", revision.String()) 918 919 url := coverageURL 920 url = strings.ReplaceAll(url, "$USERNAME", r.githubUser) 921 url = strings.ReplaceAll(url, "$PASSWORD", r.githubPass) 922 923 dir := filepath.Join(r.cacheRoot, "coverage") 924 defer os.RemoveAll(dir) 925 if err := git.CheckoutRemoteBranch(dir, url, coverageBranch); err != nil { 926 return cause.Wrap(err, "Failed to checkout gh-pages branch") 927 } 928 929 filePath := filepath.Join(dir, "coverage.dat") 930 file, err := os.Create(filePath) 931 if err != nil { 932 return cause.Wrap(err, "Failed to create file '%s'", filePath) 933 } 934 defer file.Close() 935 936 if err := cov.Encode(revision.String(), file); err != nil { 937 return cause.Wrap(err, "Failed to encode coverage") 938 } 939 file.Close() 940 941 if err := git.Add(dir, filePath); err != nil { 942 return cause.Wrap(err, "Failed to git add '%s'", filePath) 943 } 944 945 shortHash := revision.String()[:8] 946 947 err = git.Commit(dir, "Update coverage data @ "+shortHash, git.CommitFlags{ 948 Name: "SwiftShader Regression Bot", 949 Email: r.gerritEmail, 950 }) 951 if err != nil { 952 return cause.Wrap(err, "Failed to git commit") 953 } 954 955 if !r.dryRun { 956 err = git.Push(dir, url, coverageBranch, coverageBranch, git.PushFlags{}) 957 if err != nil { 958 return cause.Wrap(err, "Failed to 'git push'") 959 } 960 log.Printf("Coverage for %v pushed to Github\n", shortHash) 961 } 962 963 return nil 964} 965 966// postMostCommonFailures posts the most common failure cases as a review 967// comment on the given change. 968func (r *regres) postMostCommonFailures(client *gerrit.Client, change *gerrit.ChangeInfo, results *deqp.Results) error { 969 const limit = 25 970 971 failures := commonFailures(results) 972 if len(failures) > limit { 973 failures = failures[:limit] 974 } 975 sb := strings.Builder{} 976 sb.WriteString(fmt.Sprintf("Top %v most common failures:\n", len(failures))) 977 for _, f := range failures { 978 lines := strings.Split(f.error, "\n") 979 if len(lines) == 1 { 980 line := lines[0] 981 if line != "" { 982 sb.WriteString(fmt.Sprintf(" • %d occurrences: %v: %v\n", f.count, f.status, line)) 983 } else { 984 sb.WriteString(fmt.Sprintf(" • %d occurrences: %v\n", f.count, f.status)) 985 } 986 } else { 987 sb.WriteString(fmt.Sprintf(" • %d occurrences: %v:\n", f.count, f.status)) 988 for _, l := range lines { 989 sb.WriteString(" > ") 990 sb.WriteString(l) 991 sb.WriteString("\n") 992 } 993 } 994 sb.WriteString(fmt.Sprintf(" Example test: %v\n", f.exampleTest)) 995 996 } 997 msg := sb.String() 998 999 if r.dryRun { 1000 log.Printf("DRY RUN: add most common failures: %v\n", msg) 1001 } else { 1002 log.Printf("Posting most common failures to '%v'\n", change.Number) 1003 _, _, err := client.Changes.SetReview(fmt.Sprintf("%v", change.Number), change.CurrentRevision, &gerrit.ReviewInput{ 1004 Message: msg, 1005 Tag: "autogenerated:regress", 1006 }) 1007 if err != nil { 1008 return cause.Wrap(err, "Failed to post comments on change '%v'", change.Number) 1009 } 1010 } 1011 return nil 1012} 1013 1014func (r *regres) findTestListChange(client *gerrit.Client) (*gerrit.ChangeInfo, error) { 1015 log.Println("Checking for existing test list change") 1016 changes, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{ 1017 QueryOptions: gerrit.QueryOptions{ 1018 Query: []string{fmt.Sprintf(`status:open+owner:"%v"`, r.gerritEmail)}, 1019 Limit: 1, 1020 }, 1021 ChangeOptions: gerrit.ChangeOptions{ 1022 AdditionalFields: []string{"CURRENT_REVISION"}, 1023 }, 1024 }) 1025 if err != nil { 1026 return nil, cause.Wrap(err, "Failed to checking for existing test list") 1027 } 1028 if len(*changes) > 0 { 1029 // TODO: This currently assumes that only change changes from 1030 // gerritEmail are test lists updates. This may not always be true. 1031 return &(*changes)[0], nil 1032 } 1033 return nil, nil 1034} 1035 1036// changeInfo holds the important information about a single, open change in 1037// gerrit. 1038type changeInfo struct { 1039 pending bool // Is this change waiting a test for the latest patchset? 1040 priority int // Calculated priority based on Gerrit labels. 1041 latest git.Hash // Git hash of the latest patchset in the change. 1042 parent git.Hash // Git hash of the changelist this change is based on. 1043 lastUpdated time.Time // Time the change was last fetched. 1044 number int // The number gerrit assigned to the change 1045 commitMessage string 1046} 1047 1048// queryChanges updates the changes map by querying gerrit for the latest open 1049// changes. 1050func queryChanges(client *gerrit.Client, changes map[int]*changeInfo) error { 1051 log.Println("Checking for latest changes") 1052 results, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{ 1053 QueryOptions: gerrit.QueryOptions{ 1054 Query: []string{"status:open+-age:3d"}, 1055 Limit: 100, 1056 }, 1057 }) 1058 if err != nil { 1059 return cause.Wrap(err, "Failed to get list of changes") 1060 } 1061 1062 ids := map[int]bool{} 1063 for _, r := range *results { 1064 ids[r.Number] = true 1065 } 1066 1067 // Add new changes 1068 for number := range ids { 1069 if _, found := changes[number]; !found { 1070 log.Printf("Tracking new change '%v'\n", number) 1071 changes[number] = &changeInfo{number: number} 1072 } 1073 } 1074 1075 // Remove old changes 1076 for number := range changes { 1077 if _, found := ids[number]; !found { 1078 log.Printf("Untracking change '%v'\n", number) 1079 delete(changes, number) 1080 } 1081 } 1082 1083 return nil 1084} 1085 1086// update queries gerrit for information about the given change. 1087func (c *changeInfo) update(client *gerrit.Client) error { 1088 change, _, err := client.Changes.GetChange(fmt.Sprintf("%v", c.number), &gerrit.ChangeOptions{ 1089 AdditionalFields: []string{"CURRENT_REVISION", "CURRENT_COMMIT", "MESSAGES", "LABELS", "DETAILED_ACCOUNTS"}, 1090 }) 1091 if err != nil { 1092 return cause.Wrap(err, "Getting info for change '%v'", c.number) 1093 } 1094 1095 current, ok := change.Revisions[change.CurrentRevision] 1096 if !ok { 1097 return fmt.Errorf("Couldn't find current revision for change '%s'", c.number) 1098 } 1099 1100 if len(current.Commit.Parents) == 0 { 1101 return fmt.Errorf("Couldn't find current commit for change '%s' has no parents(?)", c.number) 1102 } 1103 1104 kokoroPresubmit := change.Labels["Kokoro-Presubmit"].Approved.AccountID != 0 1105 codeReviewScore := change.Labels["Code-Review"].Value 1106 codeReviewApproved := change.Labels["Code-Review"].Approved.AccountID != 0 1107 presubmitReady := change.Labels["Presubmit-Ready"].Approved.AccountID != 0 1108 verifiedScore := change.Labels["Verified"].Value 1109 1110 c.priority = 0 1111 if presubmitReady { 1112 c.priority += 10 1113 } 1114 c.priority += codeReviewScore 1115 if codeReviewApproved { 1116 c.priority += 2 1117 } 1118 if kokoroPresubmit { 1119 c.priority++ 1120 } 1121 1122 // Is the change from a Googler or reviewed by a Googler? 1123 canTest := strings.HasSuffix(current.Commit.Committer.Email, "@google.com") || 1124 strings.HasSuffix(change.Labels["Code-Review"].Approved.Email, "@google.com") || 1125 strings.HasSuffix(change.Labels["Code-Review"].Recommended.Email, "@google.com") || 1126 strings.HasSuffix(change.Labels["Presubmit-Ready"].Approved.Email, "@google.com") 1127 1128 // Don't test if the change has negative scores. 1129 if canTest { 1130 if codeReviewScore < 0 || verifiedScore < 0 { 1131 canTest = false 1132 } 1133 } 1134 1135 // Has the latest patchset already been tested? 1136 if canTest { 1137 for _, msg := range change.Messages { 1138 if msg.RevisionNumber == current.Number && 1139 strings.Contains(msg.Message, reportHeader) { 1140 canTest = false 1141 break 1142 } 1143 } 1144 } 1145 1146 c.pending = canTest 1147 c.latest = git.ParseHash(change.CurrentRevision) 1148 c.parent = git.ParseHash(current.Commit.Parents[0].Commit) 1149 c.commitMessage = current.Commit.Message 1150 1151 return nil 1152} 1153 1154func (r *regres) newTest(commit git.Hash) *test { 1155 checkoutDir := filepath.Join(r.cacheRoot, "checkout", commit.String()) 1156 resDir := filepath.Join(r.cacheRoot, "res", commit.String()) 1157 return &test{ 1158 r: r, 1159 commit: commit, 1160 checkoutDir: checkoutDir, 1161 resDir: resDir, 1162 buildDir: filepath.Join(checkoutDir, "build"), 1163 reactorBackend: backendSubzero, 1164 } 1165} 1166 1167func (t *test) setReactorBackend(reactorBackend reactorBackend) *test { 1168 t.reactorBackend = reactorBackend 1169 return t 1170} 1171 1172type reactorBackend string 1173 1174const ( 1175 backendLLVM reactorBackend = "LLVM" 1176 backendSubzero reactorBackend = "Subzero" 1177) 1178 1179type test struct { 1180 r *regres 1181 commit git.Hash // hash of the commit to test 1182 checkoutDir string // directory for the SwiftShader checkout 1183 resDir string // directory for the test results 1184 buildDir string // directory for SwiftShader build 1185 toolchain llvm.Toolchain // the toolchain used for building 1186 reactorBackend reactorBackend // backend for SwiftShader build 1187 coverageEnv *cov.Env // coverage generation environment (optional). 1188} 1189 1190// cleanup removes any temporary files used by the test. 1191func (t *test) cleanup() { 1192 if t.checkoutDir != "" && !t.r.keepCheckouts { 1193 os.RemoveAll(t.checkoutDir) 1194 } 1195} 1196 1197// checkout clones the test's source commit into t.src. 1198func (t *test) checkout() error { 1199 if util.IsDir(t.checkoutDir) && t.r.keepCheckouts { 1200 log.Printf("Reusing source cache for commit '%s'\n", t.commit) 1201 return nil 1202 } 1203 log.Printf("Checking out '%s'\n", t.commit) 1204 os.RemoveAll(t.checkoutDir) 1205 if err := git.CheckoutRemoteCommit(t.checkoutDir, gitURL, t.commit); err != nil { 1206 return cause.Wrap(err, "Checking out commit '%s'", t.commit) 1207 } 1208 log.Printf("Checked out commit '%s'\n", t.commit) 1209 return nil 1210} 1211 1212// buildAndRun calls t.build() followed by t.run(). Errors are logged and 1213// reported in the returned deqprun.Results.Error field. 1214func (t *test) buildAndRun(testLists testlist.Lists, d deqpBuild) *deqp.Results { 1215 // Build the parent change. 1216 if err := t.build(); err != nil { 1217 msg := fmt.Sprintf("Failed to build '%s'", t.commit) 1218 log.Println(cause.Wrap(err, msg)) 1219 return &deqp.Results{Error: msg} 1220 } 1221 1222 // Run the tests on the parent change. 1223 results, err := t.run(testLists, d) 1224 if err != nil { 1225 msg := fmt.Sprintf("Failed to test change '%s'", t.commit) 1226 log.Println(cause.Wrap(err, msg)) 1227 return &deqp.Results{Error: msg} 1228 } 1229 1230 return results 1231} 1232 1233// build builds the SwiftShader source into t.buildDir. 1234func (t *test) build() error { 1235 log.Printf("Building '%s'\n", t.commit) 1236 1237 if err := os.MkdirAll(t.buildDir, 0777); err != nil { 1238 return cause.Wrap(err, "Failed to create build directory") 1239 } 1240 1241 args := []string{ 1242 `..`, 1243 `-DCMAKE_BUILD_TYPE=Release`, 1244 `-DSWIFTSHADER_DCHECK_ALWAYS_ON=1`, 1245 `-DREACTOR_VERIFY_LLVM_IR=1`, 1246 `-DREACTOR_BACKEND=` + string(t.reactorBackend), 1247 `-DSWIFTSHADER_LLVM_VERSION=10.0`, 1248 `-DSWIFTSHADER_WARNINGS_AS_ERRORS=0`, 1249 } 1250 1251 if t.coverageEnv != nil { 1252 args = append(args, "-DSWIFTSHADER_EMIT_COVERAGE=1") 1253 } 1254 1255 if err := shell.Env(buildTimeout, t.r.cmake, t.buildDir, t.r.toolchainEnv(), args...); err != nil { 1256 return err 1257 } 1258 1259 if err := shell.Shell(buildTimeout, t.r.make, t.buildDir, fmt.Sprintf("-j%d", runtime.NumCPU())); err != nil { 1260 return err 1261 } 1262 1263 return nil 1264} 1265 1266func (t *test) run(testLists testlist.Lists, d deqpBuild) (*deqp.Results, error) { 1267 log.Printf("Running tests for '%s'\n", t.commit) 1268 1269 swiftshaderICDSo := filepath.Join(t.buildDir, "libvk_swiftshader.so") 1270 if !util.IsFile(swiftshaderICDSo) { 1271 return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDSo) 1272 } 1273 1274 swiftshaderICDJSON := filepath.Join(t.buildDir, "Linux", "vk_swiftshader_icd.json") 1275 if !util.IsFile(swiftshaderICDJSON) { 1276 return nil, fmt.Errorf("Couldn't find '%s'", swiftshaderICDJSON) 1277 } 1278 1279 if *limit != 0 { 1280 log.Printf("Limiting tests to %d\n", *limit) 1281 testLists = append(testlist.Lists{}, testLists...) 1282 for i := range testLists { 1283 testLists[i] = testLists[i].Limit(*limit) 1284 } 1285 } 1286 1287 // Directory for per-test small transient files, such as log files, 1288 // coverage output, etc. 1289 // TODO(bclayton): consider using tmpfs here. 1290 tempDir := filepath.Join(t.buildDir, "temp") 1291 os.MkdirAll(tempDir, 0777) 1292 1293 // Path to SwiftShader's libvulkan.so.1, which can be loaded directly by 1294 // dEQP without use of the Vulkan Loader. 1295 swiftshaderLibvulkanPath := filepath.Join(t.buildDir, "Linux") 1296 1297 config := deqp.Config{ 1298 ExeEgl: filepath.Join(d.path, "build", "modules", "egl", "deqp-egl"), 1299 ExeGles2: filepath.Join(d.path, "build", "modules", "gles2", "deqp-gles2"), 1300 ExeGles3: filepath.Join(d.path, "build", "modules", "gles3", "deqp-gles3"), 1301 ExeVulkan: filepath.Join(d.path, "build", "external", "vulkancts", "modules", "vulkan", "deqp-vk"), 1302 TempDir: tempDir, 1303 TestLists: testLists, 1304 Env: []string{ 1305 "LD_LIBRARY_PATH=" + os.Getenv("LD_LIBRARY_PATH") + ":" + swiftshaderLibvulkanPath, 1306 "VK_ICD_FILENAMES=" + swiftshaderICDJSON, 1307 "DISPLAY=" + os.Getenv("DISPLAY"), 1308 "LIBC_FATAL_STDERR_=1", // Put libc explosions into logs. 1309 }, 1310 LogReplacements: map[string]string{ 1311 t.checkoutDir: "<SwiftShader>", 1312 }, 1313 NumParallelTests: numParallelTests, 1314 TestTimeout: testTimeout, 1315 CoverageEnv: t.coverageEnv, 1316 } 1317 1318 return config.Run() 1319} 1320 1321func (t *test) writeTestListsByStatus(testLists testlist.Lists, results *deqp.Results) ([]string, error) { 1322 out := []string{} 1323 1324 for _, list := range testLists { 1325 files := map[testlist.Status]*os.File{} 1326 for _, status := range testlist.Statuses { 1327 path := testlist.FilePathWithStatus(filepath.Join(t.checkoutDir, list.File), status) 1328 dir := filepath.Dir(path) 1329 os.MkdirAll(dir, 0777) 1330 f, err := os.Create(path) 1331 if err != nil { 1332 return nil, cause.Wrap(err, "Couldn't create file '%v'", path) 1333 } 1334 defer f.Close() 1335 files[status] = f 1336 1337 out = append(out, path) 1338 } 1339 1340 for _, testName := range list.Tests { 1341 if r, found := results.Tests[testName]; found { 1342 fmt.Fprintln(files[r.Status], testName) 1343 } 1344 } 1345 } 1346 1347 return out, nil 1348} 1349 1350// resultsCachePath returns the path to the cache results file for the given 1351// test, testlists and deqpBuild. 1352func (t *test) resultsCachePath(testLists testlist.Lists, d deqpBuild) string { 1353 return filepath.Join(t.resDir, testLists.Hash(), d.hash) 1354} 1355 1356type testStatusAndError struct { 1357 status testlist.Status 1358 error string 1359} 1360 1361type commonFailure struct { 1362 count int 1363 testStatusAndError 1364 exampleTest string 1365} 1366 1367func commonFailures(results *deqp.Results) []commonFailure { 1368 failures := map[testStatusAndError]int{} 1369 examples := map[testStatusAndError]string{} 1370 for name, test := range results.Tests { 1371 if !test.Status.Failing() { 1372 continue 1373 } 1374 key := testStatusAndError{test.Status, test.Err} 1375 if count, ok := failures[key]; ok { 1376 failures[key] = count + 1 1377 } else { 1378 failures[key] = 1 1379 examples[key] = name 1380 } 1381 } 1382 out := make([]commonFailure, 0, len(failures)) 1383 for failure, count := range failures { 1384 out = append(out, commonFailure{count, failure, examples[failure]}) 1385 } 1386 sort.Slice(out, func(i, j int) bool { return out[i].count > out[j].count }) 1387 return out 1388} 1389 1390// compare returns a string describing all differences between two 1391// deqprun.Results, and a boolean indicating that this there are differences 1392// that are considered important. 1393// This string is used as the report message posted to the gerrit code review. 1394func compare(old, new *deqp.Results) (msg string, alert bool) { 1395 if old.Error != "" { 1396 return old.Error, false 1397 } 1398 if new.Error != "" { 1399 return new.Error, true 1400 } 1401 1402 oldStatusCounts, newStatusCounts := map[testlist.Status]int{}, map[testlist.Status]int{} 1403 totalTests := 0 1404 1405 broken, fixed, failing, removed, changed := []string{}, []string{}, []string{}, []string{}, []string{} 1406 1407 for test, new := range new.Tests { 1408 old, found := old.Tests[test] 1409 if !found { 1410 log.Printf("Test result for '%s' not found on old change\n", test) 1411 continue 1412 } 1413 switch { 1414 case !old.Status.Failing() && new.Status.Failing(): 1415 broken = append(broken, test) 1416 alert = true 1417 case !old.Status.Passing() && new.Status.Passing(): 1418 fixed = append(fixed, test) 1419 case old.Status != new.Status: 1420 changed = append(changed, test) 1421 alert = true 1422 case old.Status.Failing() && new.Status.Failing(): 1423 failing = append(failing, test) // Still broken 1424 alert = true 1425 } 1426 totalTests++ 1427 if found { 1428 oldStatusCounts[old.Status] = oldStatusCounts[old.Status] + 1 1429 } 1430 newStatusCounts[new.Status] = newStatusCounts[new.Status] + 1 1431 } 1432 1433 for test := range old.Tests { 1434 if _, found := new.Tests[test]; !found { 1435 removed = append(removed, test) 1436 } 1437 } 1438 1439 sb := strings.Builder{} 1440 1441 // list prints the list l to sb, truncating after a limit. 1442 list := func(l []string) { 1443 const max = 10 1444 for i, s := range l { 1445 sb.WriteString(" ") 1446 if i == max { 1447 sb.WriteString(fmt.Sprintf("> %d more\n", len(l)-i)) 1448 break 1449 } 1450 sb.WriteString(fmt.Sprintf("> %s", s)) 1451 if n, ok := new.Tests[s]; ok { 1452 if o, ok := old.Tests[s]; ok && n != o { 1453 sb.WriteString(fmt.Sprintf(" - [%s -> %s]", o.Status, n.Status)) 1454 } else { 1455 sb.WriteString(fmt.Sprintf(" - [%s]", n.Status)) 1456 } 1457 sb.WriteString("\n") 1458 for _, line := range strings.Split(n.Err, "\n") { 1459 if line != "" { 1460 sb.WriteString(fmt.Sprintf(" %v\n", line)) 1461 } 1462 } 1463 } else { 1464 sb.WriteString("\n") 1465 } 1466 } 1467 } 1468 1469 if n := len(broken); n > 0 { 1470 sort.Strings(broken) 1471 sb.WriteString(fmt.Sprintf("\n--- This change breaks %d tests: ---\n", n)) 1472 list(broken) 1473 } 1474 if n := len(fixed); n > 0 { 1475 sort.Strings(fixed) 1476 sb.WriteString(fmt.Sprintf("\n--- This change fixes %d tests: ---\n", n)) 1477 list(fixed) 1478 } 1479 if n := len(removed); n > 0 { 1480 sort.Strings(removed) 1481 sb.WriteString(fmt.Sprintf("\n--- This change removes %d tests: ---\n", n)) 1482 list(removed) 1483 } 1484 if n := len(changed); n > 0 { 1485 sort.Strings(changed) 1486 sb.WriteString(fmt.Sprintf("\n--- This change alters %d tests: ---\n", n)) 1487 list(changed) 1488 } 1489 1490 if len(broken) == 0 && len(fixed) == 0 && len(removed) == 0 && len(changed) == 0 { 1491 sb.WriteString(fmt.Sprintf("\n--- No change in test results ---\n")) 1492 } 1493 1494 sb.WriteString(fmt.Sprintf(" Total tests: %d\n", totalTests)) 1495 for _, s := range []struct { 1496 label string 1497 status testlist.Status 1498 }{ 1499 {" Pass", testlist.Pass}, 1500 {" Fail", testlist.Fail}, 1501 {" Timeout", testlist.Timeout}, 1502 {" UNIMPLEMENTED()", testlist.Unimplemented}, 1503 {" UNSUPPORTED()", testlist.Unsupported}, 1504 {" UNREACHABLE()", testlist.Unreachable}, 1505 {" ASSERT()", testlist.Assert}, 1506 {" ABORT()", testlist.Abort}, 1507 {" Crash", testlist.Crash}, 1508 {" Not Supported", testlist.NotSupported}, 1509 {"Compatibility Warning", testlist.CompatibilityWarning}, 1510 {" Quality Warning", testlist.QualityWarning}, 1511 } { 1512 old, new := oldStatusCounts[s.status], newStatusCounts[s.status] 1513 if old == 0 && new == 0 { 1514 continue 1515 } 1516 change := util.Percent64(int64(new-old), int64(old)) 1517 switch { 1518 case old == new: 1519 sb.WriteString(fmt.Sprintf("%s: %v\n", s.label, new)) 1520 case change == 0: 1521 sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d)\n", s.label, old, new, new-old)) 1522 default: 1523 sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d %+d%%)\n", s.label, old, new, new-old, change)) 1524 } 1525 } 1526 1527 if old, new := old.Duration, new.Duration; old != 0 && new != 0 { 1528 label := " Time taken" 1529 change := util.Percent64(int64(new-old), int64(old)) 1530 switch { 1531 case old == new: 1532 sb.WriteString(fmt.Sprintf("%s: %v\n", label, new)) 1533 case change == 0: 1534 sb.WriteString(fmt.Sprintf("%s: %v -> %v\n", label, old, new)) 1535 default: 1536 sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d%%)\n", label, old, new, change)) 1537 } 1538 } 1539 1540 type timingDiff struct { 1541 old time.Duration 1542 new time.Duration 1543 relDelta float64 1544 name string 1545 } 1546 1547 timingDiffs := []timingDiff{} 1548 for name, new := range new.Tests { 1549 if old, ok := old.Tests[name]; ok { 1550 old, new := old.TimeTaken, new.TimeTaken 1551 delta := new.Seconds() - old.Seconds() 1552 absDelta := math.Abs(delta) 1553 relDelta := delta / old.Seconds() 1554 if absDelta > 2.0 && math.Abs(relDelta) > 0.05 { // If change > ±2s and > than ±5% old time... 1555 timingDiffs = append(timingDiffs, timingDiff{ 1556 old: old, 1557 new: new, 1558 name: name, 1559 relDelta: relDelta, 1560 }) 1561 } 1562 } 1563 } 1564 if len(timingDiffs) > 0 { 1565 sb.WriteString(fmt.Sprintf("\n--- Test duration changes ---\n")) 1566 const limit = 10 1567 if len(timingDiffs) > limit { 1568 sort.Slice(timingDiffs, func(i, j int) bool { return math.Abs(timingDiffs[i].relDelta) > math.Abs(timingDiffs[j].relDelta) }) 1569 timingDiffs = timingDiffs[:limit] 1570 } 1571 sort.Slice(timingDiffs, func(i, j int) bool { return timingDiffs[i].relDelta < timingDiffs[j].relDelta }) 1572 for _, d := range timingDiffs { 1573 percent := util.Percent64(int64(d.new-d.old), int64(d.old)) 1574 sb.WriteString(fmt.Sprintf(" > %v: %v -> %v (%+d%%)\n", d.name, d.old, d.new, percent)) 1575 } 1576 } 1577 1578 return sb.String(), alert 1579} 1580 1581// loadTestLists loads the full test lists from the json file. 1582// The file is first searched at {t.srcDir}/{relPath} 1583// If this cannot be found, then the file is searched at the fallback path 1584// {CWD}/{relPath} 1585// This allows CLs to alter the list of tests to be run, as well as providing 1586// a default set. 1587func (t *test) loadTestLists(relPath string) (testlist.Lists, error) { 1588 // Seach for the test.json file in the checked out source directory. 1589 if path := filepath.Join(t.checkoutDir, relPath); util.IsFile(path) { 1590 log.Printf("Loading test list '%v' from commit\n", relPath) 1591 return testlist.Load(t.checkoutDir, path) 1592 } 1593 1594 // Not found there. Search locally. 1595 wd, err := os.Getwd() 1596 if err != nil { 1597 return testlist.Lists{}, cause.Wrap(err, "Couldn't get current working directory") 1598 } 1599 if path := filepath.Join(wd, relPath); util.IsFile(path) { 1600 log.Printf("Loading test list '%v' from regres\n", relPath) 1601 return testlist.Load(wd, relPath) 1602 } 1603 1604 return nil, errors.New("Couldn't find a test list file") 1605} 1606 1607type date struct { 1608 year int 1609 month time.Month 1610 day int 1611} 1612 1613func toDate(t time.Time) date { 1614 d := date{} 1615 d.year, d.month, d.day = t.Date() 1616 return d 1617} 1618