• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//    http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15// regres is a tool that detects test regressions with SwiftShader changes.
16//
17// Regres monitors changes that have been put up for review with Gerrit.
18// Once a new patchset has been found, regres will checkout, build and test the
19// change against the parent changelist. Any differences in results are reported
20// as a review comment on the change.
21//
22// Once a day regres will also test another, larger set of tests, and post the
23// full test results as a Gerrit changelist. The CI test lists can be based from
24// this daily test list, so testing can be limited to tests that were known to
25// pass.
26package main
27
28import (
29	"bytes"
30	"crypto/sha1"
31	"encoding/hex"
32	"encoding/json"
33	"errors"
34	"flag"
35	"fmt"
36	"io/fs"
37	"io/ioutil"
38	"log"
39	"math"
40	"os"
41	"os/exec"
42	"path"
43	"path/filepath"
44	"regexp"
45	"runtime"
46	"sort"
47	"strings"
48	"time"
49
50	"swiftshader.googlesource.com/SwiftShader/tests/regres/consts"
51	"swiftshader.googlesource.com/SwiftShader/tests/regres/cov"
52	"swiftshader.googlesource.com/SwiftShader/tests/regres/deqp"
53	"swiftshader.googlesource.com/SwiftShader/tests/regres/git"
54	"swiftshader.googlesource.com/SwiftShader/tests/regres/llvm"
55	"swiftshader.googlesource.com/SwiftShader/tests/regres/shell"
56	"swiftshader.googlesource.com/SwiftShader/tests/regres/testlist"
57	"swiftshader.googlesource.com/SwiftShader/tests/regres/util"
58
59	gerrit "github.com/andygrunwald/go-gerrit"
60)
61
62const (
63	gitURL                = "https://swiftshader.googlesource.com/SwiftShader"
64	gitDailyBranch        = "HEAD"
65	gerritURL             = "https://swiftshader-review.googlesource.com/"
66	coverageURL           = "https://$USERNAME:$PASSWORD@github.com/swiftshader-regres/swiftshader-coverage.git"
67	coverageBranch        = "gh-pages"
68	coveragePath          = "coverage/coverage.zip"
69	reportHeader          = "Regres report:"
70	changeUpdateFrequency = time.Minute * 5
71	changeQueryFrequency  = time.Minute * 5
72	testTimeout           = time.Minute * 2  // timeout for a single test
73	buildTimeout          = time.Minute * 10 // timeout for a build
74	fullTestListRelPath   = "tests/regres/full-tests.json"
75	ciTestListRelPath     = "tests/regres/ci-tests.json"
76	deqpConfigRelPath     = "tests/regres/deqp.json"
77	swsTestLists          = "tests/regres/testlists"
78	deqpTestLists         = "external/vulkancts/mustpass/main"
79)
80
81var (
82	numParallelTests = runtime.NumCPU()
83	llvmVersion      = llvm.Version{Major: 10}
84
85	cacheDir        = flag.String("cache", "cache", "path to the output cache directory")
86	gerritEmail     = flag.String("email", "$SS_REGRES_EMAIL", "gerrit email address for posting regres results")
87	gerritUser      = flag.String("user", "$SS_REGRES_USER", "gerrit username for posting regres results")
88	gerritPass      = flag.String("pass", "$SS_REGRES_PASS", "gerrit password for posting regres results")
89	githubUser      = flag.String("gh-user", "$SS_GITHUB_USER", "github user for posting coverage results")
90	githubPass      = flag.String("gh-pass", "$SS_GITHUB_PASS", "github password for posting coverage results")
91	keepCheckouts   = flag.Bool("keep", false, "don't delete checkout directories after use")
92	dryRun          = flag.Bool("dry", false, "don't post regres reports to gerrit")
93	maxTestsPerProc = flag.Int("max-tests-per-proc", 100, "maximum number of tests running in a single process")
94	maxProcMemory   = flag.Uint64("max-proc-mem", shell.MaxProcMemory, "maximum virtual memory per child process")
95	dailyNow        = flag.Bool("dailynow", false, "Start by running the daily pass")
96	dailyOnly       = flag.Bool("dailyonly", false, "Run only the daily pass")
97	dailyChange     = flag.String("dailychange", "", "Change hash to use for daily pass, HEAD if not provided")
98	priority        = flag.Int("priority", 0, "Prioritize a single change with the given number")
99	limit           = flag.Int("limit", 0, "only run a maximum of this number of tests")
100)
101
102func main() {
103	flag.ErrHelp = errors.New("regres is a tool to detect regressions between versions of SwiftShader")
104	flag.Parse()
105
106	shell.MaxProcMemory = *maxProcMemory
107
108	r := regres{
109		cacheRoot:     *cacheDir,
110		gerritEmail:   os.ExpandEnv(*gerritEmail),
111		gerritUser:    os.ExpandEnv(*gerritUser),
112		gerritPass:    os.ExpandEnv(*gerritPass),
113		githubUser:    os.ExpandEnv(*githubUser),
114		githubPass:    os.ExpandEnv(*githubPass),
115		keepCheckouts: *keepCheckouts,
116		dryRun:        *dryRun,
117		dailyNow:      *dailyNow,
118		dailyOnly:     *dailyOnly,
119		dailyChange:   *dailyChange,
120		priority:      *priority,
121	}
122
123	if err := r.run(); err != nil {
124		fmt.Fprintln(os.Stderr, err)
125		os.Exit(-1)
126	}
127}
128
129type regres struct {
130	cmake         string          // path to cmake executable
131	make          string          // path to make executable
132	python        string          // path to python executable
133	tar           string          // path to tar executable
134	cacheRoot     string          // path to the regres cache directory
135	toolchain     *llvm.Toolchain // the LLVM toolchain used to build SwiftShader
136	gerritEmail   string          // gerrit email address used for posting results
137	gerritUser    string          // gerrit username used for posting results
138	gerritPass    string          // gerrit password used for posting results
139	githubUser    string          // github username used for posting results
140	githubPass    string          // github password used for posting results
141	keepCheckouts bool            // don't delete source & build checkouts after testing
142	dryRun        bool            // don't post any reviews
143	maxProcMemory uint64          // max virtual memory for child processes
144	dailyNow      bool            // start with a daily run
145	dailyOnly     bool            // run only the daily run
146	dailyChange   string          // Change hash to use for daily pass, HEAD if not provided
147	priority      int             // Prioritize a single change with the given number
148}
149
150// getToolchain returns the LLVM toolchain, possibly downloading and
151// decompressing it if it wasn't found in the cache directory.
152func getToolchain(tarExe, cacheRoot string) (*llvm.Toolchain, error) {
153	path := filepath.Join(cacheRoot, "llvm")
154
155	if toolchain := llvm.Search(path).Find(llvmVersion); toolchain != nil {
156		return toolchain, nil
157	}
158
159	// LLVM toolchain may have been updated, remove the directory if it exists.
160	os.RemoveAll(path)
161
162	log.Printf("Downloading LLVM %v toolchain...\n", llvmVersion)
163	tar, err := llvmVersion.Download()
164	if err != nil {
165		return nil, fmt.Errorf("failed to download LLVM %v: %w", llvmVersion, err)
166	}
167
168	tarFile := filepath.Join(cacheRoot, "llvm.tar.xz")
169	if err := ioutil.WriteFile(tarFile, tar, 0666); err != nil {
170		return nil, fmt.Errorf("failed to write '%v': %w", tarFile, err)
171	}
172	defer os.Remove(tarFile)
173
174	log.Printf("Decompressing LLVM %v toolchain...\n", llvmVersion)
175	target := filepath.Join(cacheRoot, "llvm-tmp")
176	os.MkdirAll(target, 0755)
177	defer os.RemoveAll(target)
178	if err := exec.Command(tarExe, "-xf", tarFile, "-C", target).Run(); err != nil {
179		return nil, fmt.Errorf("failed to decompress LLVM tar download: %w", err)
180	}
181
182	// The tar, once decompressed, holds a single root directory with a name
183	// starting with 'clang+llvm'. Move this to path.
184	files, err := filepath.Glob(filepath.Join(target, "*"))
185	if err != nil {
186		return nil, fmt.Errorf("failed to glob decompressed files: %w", err)
187	}
188	if len(files) != 1 || !util.IsDir(files[0]) {
189		return nil, fmt.Errorf("Unexpected decompressed files: %+v", files)
190	}
191	if err := os.Rename(files[0], path); err != nil {
192		return nil, fmt.Errorf("failed to move %v to %v: %w", files[0], path, err)
193	}
194
195	// We should now have everything in the right place.
196	toolchain := llvm.Search(path).Find(llvmVersion)
197	if toolchain == nil {
198		return nil, fmt.Errorf("failed to find LLVM toolchain after downloading")
199	}
200
201	return toolchain, nil
202}
203
204// toolchainEnv() returns the environment variables for executing CMake commands.
205func (r *regres) toolchainEnv() []string {
206	return append([]string{
207		"CC=" + r.toolchain.Clang(),
208		"CXX=" + r.toolchain.ClangXX(),
209	}, os.Environ()...)
210}
211
212// resolveDirs ensures that the necessary directories used can be found, and
213// expands them to absolute paths.
214func (r *regres) resolveDirs() error {
215	allDirs := []*string{
216		&r.cacheRoot,
217	}
218
219	for _, path := range allDirs {
220		abs, err := filepath.Abs(*path)
221		if err != nil {
222			return fmt.Errorf("failed to find path '%v': %w", *path, err)
223		}
224		*path = abs
225	}
226
227	if err := os.MkdirAll(r.cacheRoot, 0777); err != nil {
228		return fmt.Errorf("failed to create cache root directory: %w", err)
229	}
230
231	for _, path := range allDirs {
232		if !util.IsDir(*path) {
233			return fmt.Errorf("failed to find path '%v'", *path)
234		}
235	}
236
237	return nil
238}
239
240// resolveExes resolves all external executables used by regres.
241func (r *regres) resolveExes() error {
242	type exe struct {
243		name string
244		path *string
245	}
246	for _, e := range []exe{
247		{"cmake", &r.cmake},
248		{"make", &r.make},
249		{"python", &r.python},
250		{"tar", &r.tar},
251	} {
252		path, err := exec.LookPath(e.name)
253		if err != nil {
254			return fmt.Errorf("failed to find path to %s: %w", e.name, err)
255		}
256		*e.path = path
257	}
258	return nil
259}
260
261// run performs the main processing loop for the regress tool. It:
262// * Scans for open and recently updated changes in gerrit using queryChanges()
263//   and changeInfo.update().
264// * Builds the most recent patchset and the commit's parent CL using
265//   r.newTest(<hash>).lazyRun().
266// * Compares the results of the tests using compare().
267// * Posts the results of the compare to gerrit as a review.
268// * Repeats the above steps until the process is interrupted.
269func (r *regres) run() error {
270	if err := r.resolveExes(); err != nil {
271		return fmt.Errorf("failed to resolve all exes: %w", err)
272	}
273
274	if err := r.resolveDirs(); err != nil {
275		return fmt.Errorf("failed to resolve all directories: %w", err)
276	}
277
278	toolchain, err := getToolchain(r.tar, r.cacheRoot)
279	if err != nil {
280		return fmt.Errorf("failed to download LLVM toolchain: %w", err)
281	}
282	r.toolchain = toolchain
283
284	client, err := gerrit.NewClient(gerritURL, nil)
285	if err != nil {
286		return fmt.Errorf("failed to create gerrit client: %w", err)
287	}
288	if r.gerritUser != "" {
289		client.Authentication.SetBasicAuth(r.gerritUser, r.gerritPass)
290	}
291
292	changes := map[int]*changeInfo{} // Change number -> changeInfo
293	lastUpdatedTestLists := toDate(time.Now())
294	lastQueriedChanges := time.Time{}
295
296	if r.dailyNow || r.dailyOnly {
297		lastUpdatedTestLists = date{}
298	}
299
300	for {
301		if now := time.Now(); toDate(now) != lastUpdatedTestLists {
302			lastUpdatedTestLists = toDate(now)
303			if err := r.runDaily(client, backendLLVM, false); err != nil {
304				log.Println(err.Error())
305			}
306			if err := r.runDaily(client, backendSubzero, true); err != nil {
307				log.Println(err.Error())
308			}
309		}
310
311		if r.dailyOnly {
312			log.Println("Daily finished with --dailyonly. Stopping")
313			return nil
314		}
315
316		// Update list of tracked changes.
317		if time.Since(lastQueriedChanges) > changeQueryFrequency {
318			lastQueriedChanges = time.Now()
319			if err := queryChanges(client, changes); err != nil {
320				log.Println(err.Error())
321			}
322		}
323
324		// Update change info.
325		for _, change := range changes {
326			if time.Since(change.lastUpdated) > changeUpdateFrequency {
327				change.lastUpdated = time.Now()
328				err := change.update(client)
329				if err != nil {
330					log.Println(fmt.Errorf("failed to update info for change '%v': %w", change.number, err))
331				}
332			}
333		}
334
335		for _, c := range changes {
336			if c.pending && r.priority == c.number {
337				log.Printf("Prioritizing change '%v'\n", c.number)
338				c.priority = 1e6
339			}
340		}
341
342		// Find the change with the highest priority.
343		var change *changeInfo
344		numPending := 0
345		for _, c := range changes {
346			if c.pending {
347				numPending++
348				if change == nil || c.priority > change.priority {
349					change = c
350				}
351			}
352		}
353
354		if change == nil {
355			// Everything up to date. Take a break.
356			log.Println("Nothing to do. Sleeping")
357			time.Sleep(time.Minute)
358			continue
359		}
360
361		log.Printf("%d changes queued for testing\n", numPending)
362
363		log.Printf("Testing change '%v'\n", change.number)
364
365		// Test the latest patchset in the change, diff against parent change.
366		msg, alert, err := r.test(change)
367		if err != nil {
368			log.Println(fmt.Errorf("failed to test changelist '%s': %w", change.latest, err))
369			time.Sleep(time.Minute)
370			change.pending = false
371			continue
372		}
373
374		// Always include the reportHeader in the message.
375		// changeInfo.update() uses this header to detect whether a patchset has
376		// already got a test result.
377		msg = reportHeader + "\n\n" + msg
378
379		// Limit the message length to prevent '400 Bad Request' response.
380		maxMsgLength := 16000
381		if len(msg) > maxMsgLength {
382			trunc := " [truncated]\n"
383			msg = msg[0:maxMsgLength-len(trunc)] + trunc
384		}
385
386		if r.dryRun {
387			log.Printf("DRY RUN: add review to change '%v':\n%v\n", change.number, msg)
388		} else {
389			log.Printf("Posting review to '%v'\n", change.number)
390			notify := "OWNER"
391			if alert {
392				notify = "OWNER_REVIEWERS"
393			}
394			_, _, err = client.Changes.SetReview(fmt.Sprintf("%v", change.number), change.latest.String(), &gerrit.ReviewInput{
395				Message: msg,
396				Tag:     "autogenerated:regress",
397				Notify:  notify,
398			})
399			if err != nil {
400				return fmt.Errorf("failed to post comments on change '%v': %w", change.number, err)
401			}
402		}
403		change.pending = false
404	}
405}
406
407func (r *regres) test(change *changeInfo) (string, bool, error) {
408	latest := r.newTest(change.latest)
409	defer latest.cleanup()
410
411	if err := latest.checkout(); err != nil {
412		return "", true, fmt.Errorf("failed to checkout '%s': %w", change.latest, err)
413	}
414
415	deqpBuild, err := r.getOrBuildDEQP(latest)
416	if err != nil {
417		return "", true, fmt.Errorf("failed to build dEQP '%v' for change: %w", change.number, err)
418	}
419
420	log.Printf("Testing latest patchset for change '%v'\n", change.number)
421	latestResults, testlists, err := r.testLatest(change, latest, deqpBuild)
422	if err != nil {
423		return "", true, fmt.Errorf("failed to test latest change of '%v': %w", change.number, err)
424	}
425
426	log.Printf("Testing parent of change '%v'\n", change.number)
427	parentResults, err := r.testParent(change, testlists, deqpBuild)
428	if err != nil {
429		return "", true, fmt.Errorf("failed to test parent change of '%v': %w", change.number, err)
430	}
431
432	log.Println("Comparing latest patchset's results with parent")
433	msg, alert := compare(parentResults, latestResults)
434
435	return msg, alert, nil
436}
437
438type deqpBuild struct {
439	path string // path to deqp directory
440	hash string // hash of the deqp config
441}
442
443// DeqpConfig holds the JSON payload of the deqp.json file
444type DeqpConfig struct {
445	Remote  string   `json:"remote"`
446	Branch  string   `json:"branch"`
447	SHA     string   `json:"sha"`
448	Patches []string `json:"patches"`
449}
450
451func loadConfigFromFile(deqpConfigFile string) (DeqpConfig, error) {
452	file, err := os.Open(deqpConfigFile)
453	if err != nil {
454		return DeqpConfig{}, fmt.Errorf("failed to open dEQP config file: %w", err)
455	}
456	defer file.Close()
457
458	cfg := DeqpConfig{}
459	if err := json.NewDecoder(file).Decode(&cfg); err != nil {
460		return DeqpConfig{}, fmt.Errorf("failed to parse %s: %w", deqpConfigRelPath, err)
461	}
462
463	return cfg, nil
464}
465
466func (r *regres) getOrBuildDEQP(test *test) (deqpBuild, error) {
467	checkoutDir := test.checkoutDir
468	if p := path.Join(checkoutDir, deqpConfigRelPath); !util.IsFile(p) {
469		checkoutDir, _ = os.Getwd()
470		log.Printf("failed to open dEQP config file from change (%v), falling back to internal version\n", p)
471	} else {
472		log.Println("Using dEQP config file from change")
473	}
474
475	cfg, err := loadConfigFromFile(path.Join(checkoutDir, deqpConfigRelPath))
476	if err != nil {
477		return deqpBuild{}, fmt.Errorf("failed to load config file: %w", err)
478	}
479
480	return r.getOrBuildDEQPFromConfig(test, cfg, checkoutDir)
481}
482
483func (r *regres) getOrBuildDEQPFromConfig(test *test, cfg DeqpConfig, checkoutDir string) (deqpBuild, error) {
484	hasher := sha1.New()
485	if err := json.NewEncoder(hasher).Encode(&cfg); err != nil {
486		return deqpBuild{}, fmt.Errorf("failed to re-encode %s: %w", deqpConfigRelPath, err)
487	}
488	hash := hex.EncodeToString(hasher.Sum(nil))
489	cacheDir := path.Join(r.cacheRoot, "deqp", hash)
490	buildDir := path.Join(cacheDir, "build")
491	if !util.IsDir(cacheDir) {
492		if err := os.MkdirAll(cacheDir, 0777); err != nil {
493			return deqpBuild{}, fmt.Errorf("failed to make deqp cache directory '%s': %w", cacheDir, err)
494		}
495
496		success := false
497		defer func() {
498			if !success {
499				os.RemoveAll(cacheDir)
500			}
501		}()
502
503		if cfg.Branch != "" {
504			// If a branch is specified, then fetch the branch then checkout the
505			// commit by SHA. This is a workaround for git repos that error when
506			// attempting to directly checkout a remote commit.
507			log.Printf("Checking out deqp %v branch %v into %v\n", cfg.Remote, cfg.Branch, cacheDir)
508			if err := git.CheckoutRemoteBranch(cacheDir, cfg.Remote, cfg.Branch); err != nil {
509				return deqpBuild{}, fmt.Errorf("failed to checkout deqp branch %v @ %v: %w", cfg.Remote, cfg.Branch, err)
510			}
511			log.Printf("Checking out deqp %v commit %v \n", cfg.Remote, cfg.SHA)
512			if err := git.CheckoutCommit(cacheDir, git.ParseHash(cfg.SHA)); err != nil {
513				return deqpBuild{}, fmt.Errorf("failed to checkout deqp commit %v @ %v: %w", cfg.Remote, cfg.SHA, err)
514			}
515		} else {
516			log.Printf("Checking out deqp %v @ %v into %v\n", cfg.Remote, cfg.SHA, cacheDir)
517			if err := git.CheckoutRemoteCommit(cacheDir, cfg.Remote, git.ParseHash(cfg.SHA)); err != nil {
518				return deqpBuild{}, fmt.Errorf("failed to checkout deqp commit %v @ %v: %w", cfg.Remote, cfg.SHA, err)
519			}
520		}
521
522		log.Println("Fetching deqp dependencies")
523		if err := shell.Shell(buildTimeout, r.python, cacheDir, "external/fetch_sources.py"); err != nil {
524			return deqpBuild{}, fmt.Errorf("failed to fetch deqp sources %v @ %v: %w", cfg.Remote, cfg.SHA, err)
525		}
526
527		log.Println("Applying deqp patches")
528		for _, patch := range cfg.Patches {
529			fullPath := path.Join(checkoutDir, patch)
530			if err := git.Apply(cacheDir, fullPath); err != nil {
531				return deqpBuild{}, fmt.Errorf("failed to apply deqp patch %v for %v @ %v: %w", patch, cfg.Remote, cfg.SHA, err)
532			}
533		}
534
535		log.Printf("Building deqp into %v\n", buildDir)
536		if err := os.MkdirAll(buildDir, 0777); err != nil {
537			return deqpBuild{}, fmt.Errorf("failed to make deqp build directory '%v': %w", buildDir, err)
538		}
539
540		if err := shell.Shell(buildTimeout, r.cmake, buildDir,
541			"-DDEQP_TARGET=default",
542			"-DCMAKE_BUILD_TYPE=Release",
543			".."); err != nil {
544			return deqpBuild{}, fmt.Errorf("failed to generate build rules for deqp %v @ %v: %w", cfg.Remote, cfg.SHA, err)
545		}
546
547		if err := shell.Shell(buildTimeout, r.make, buildDir,
548			fmt.Sprintf("-j%d", runtime.NumCPU()),
549			"deqp-vk"); err != nil {
550			return deqpBuild{}, fmt.Errorf("failed to build deqp %v @ %v: %w", cfg.Remote, cfg.SHA, err)
551		}
552
553		success = true
554	}
555
556	return deqpBuild{
557		path: cacheDir,
558		hash: hash,
559	}, nil
560}
561
562var additionalTestsRE = regexp.MustCompile(`\n\s*Test[s]?:\s*([^\s]+)[^\n]*`)
563
564func (r *regres) testLatest(change *changeInfo, test *test, d deqpBuild) (*deqp.Results, testlist.Lists, error) {
565	// Get the test results for the latest patchset in the change.
566	testlists, err := test.loadTestLists(ciTestListRelPath)
567	if err != nil {
568		return nil, nil, fmt.Errorf("failed to load '%s': %w", change.latest, err)
569	}
570
571	if matches := additionalTestsRE.FindAllStringSubmatch(change.commitMessage, -1); len(matches) > 0 {
572		log.Println("Change description contains additional test patterns")
573
574		// Change specifies additional tests to try. Load the full test list.
575		fullTestLists, err := test.loadTestLists(fullTestListRelPath)
576		if err != nil {
577			return nil, nil, fmt.Errorf("failed to load '%s': %w", change.latest, err)
578		}
579
580		// Add any tests in the full list that match the pattern to the list to test.
581		for _, match := range matches {
582			if len(match) > 1 {
583				pattern := match[1]
584				log.Printf("Adding custom tests with pattern '%s'\n", pattern)
585				filtered := fullTestLists.Filter(func(name string) bool {
586					ok, _ := filepath.Match(pattern, name)
587					return ok
588				})
589				testlists = append(testlists, filtered...)
590			}
591		}
592	}
593
594	cachePath := test.resultsCachePath(testlists, d)
595
596	if results, err := deqp.LoadResults(cachePath); err == nil {
597		return results, testlists, nil // Use cached results
598	}
599
600	// Build the change and test it.
601	results := test.buildAndRun(testlists, d)
602
603	// Cache the results for future tests
604	if err := results.Save(cachePath); err != nil {
605		log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath)
606	}
607
608	return results, testlists, nil
609}
610
611func (r *regres) testParent(change *changeInfo, testlists testlist.Lists, d deqpBuild) (*deqp.Results, error) {
612	// Get the test results for the changes's parent changelist.
613	test := r.newTest(change.parent)
614	defer test.cleanup()
615
616	cachePath := test.resultsCachePath(testlists, d)
617
618	if results, err := deqp.LoadResults(cachePath); err == nil {
619		return results, nil // Use cached results
620	}
621
622	// Couldn't load cached results. Have to build them.
623	if err := test.checkout(); err != nil {
624		return nil, fmt.Errorf("failed to checkout '%s': %w", change.parent, err)
625	}
626
627	// Build the parent change and test it.
628	results := test.buildAndRun(testlists, d)
629
630	// Store the results of the parent change to the cache.
631	if err := results.Save(cachePath); err != nil {
632		log.Printf("Warning: Couldn't save results of test to '%v'\n", cachePath)
633	}
634
635	return results, nil
636}
637
638// runDaily runs a full deqp run on the HEAD change, posting the results to a
639// new or existing gerrit change. If genCov is true, then coverage
640// information will be generated for the run, and commiteed to the
641// coverageBranch.
642func (r *regres) runDaily(client *gerrit.Client, reactorBackend reactorBackend, genCov bool) error {
643	// TODO(b/152192800): Generating coverage data is currently broken.
644	genCov = false
645
646	log.Printf("Updating test lists (Backend: %v)\n", reactorBackend)
647
648	if genCov {
649		if r.githubUser == "" {
650			log.Println("--gh-user not specified and SS_GITHUB_USER not set. Disabling code coverage generation")
651			genCov = false
652		} else if r.githubPass == "" {
653			log.Println("--gh-pass not specified and SS_GITHUB_PASS not set. Disabling code coverage generation")
654			genCov = false
655		}
656	}
657
658	dailyHash := git.Hash{}
659	if r.dailyChange == "" {
660		headHash, err := git.FetchRefHash(gitDailyBranch, gitURL)
661		if err != nil {
662			return fmt.Errorf("failed to get hash of master HEAD: %w", err)
663		}
664		dailyHash = headHash
665	} else {
666		dailyHash = git.ParseHash(r.dailyChange)
667	}
668
669	return r.runDailyTest(dailyHash, reactorBackend, genCov,
670		func(test *test, testLists testlist.Lists, results *deqp.Results) error {
671			errs := []error{}
672
673			if err := r.postDailyResults(client, test, testLists, results, reactorBackend, dailyHash); err != nil {
674				errs = append(errs, err)
675			}
676
677			if genCov {
678				if err := r.postCoverageResults(results.Coverage, dailyHash); err != nil {
679					errs = append(errs, err)
680				}
681			}
682
683			if len(errs) > 0 {
684				msg := strings.Builder{}
685				for _, err := range errs {
686					msg.WriteString(err.Error() + "\n")
687				}
688				return fmt.Errorf("%s", msg.String())
689			}
690			return nil
691		})
692}
693
694// runDailyTest performs the full deqp run on the HEAD change, calling
695// withResults with the test results.
696func (r *regres) runDailyTest(dailyHash git.Hash, reactorBackend reactorBackend, genCov bool, withResults func(*test, testlist.Lists, *deqp.Results) error) error {
697	// Get the full test results.
698	test := r.newTest(dailyHash).setReactorBackend(reactorBackend)
699	defer test.cleanup()
700
701	// Always need to checkout the change.
702	if err := test.checkout(); err != nil {
703		return fmt.Errorf("failed to checkout '%s': %w", dailyHash, err)
704	}
705
706	d, err := r.getOrBuildDEQP(test)
707	if err != nil {
708		return fmt.Errorf("failed to build deqp for '%s': %w", dailyHash, err)
709	}
710
711	// Load the test lists.
712	testLists, err := test.loadTestLists(fullTestListRelPath)
713	if err != nil {
714		return fmt.Errorf("failed to load full test lists for '%s': %w", dailyHash, err)
715	}
716
717	if genCov {
718		test.coverageEnv = &cov.Env{
719			LLVM:     *r.toolchain,
720			RootDir:  test.checkoutDir,
721			ExePath:  filepath.Join(test.buildDir, "libvk_swiftshader.so"),
722			TurboCov: filepath.Join(test.buildDir, "turbo-cov"),
723		}
724	}
725
726	// Build the change.
727	if err := test.build(); err != nil {
728		return fmt.Errorf("failed to build '%s': %w", dailyHash, err)
729	}
730
731	// Run the tests on the change.
732	results, err := test.run(testLists, d)
733	if err != nil {
734		return fmt.Errorf("failed to test '%s': %w", dailyHash, err)
735	}
736
737	return withResults(test, testLists, results)
738}
739
740// copyFileIfDifferent copies src to dst if src doesn't exist or if there are differences
741// between the files.
742func copyFileIfDifferent(dst, src string) error {
743	srcFileInfo, err := os.Stat(src)
744	if err != nil {
745		return err
746	}
747	srcContents, err := os.ReadFile(src)
748	if err != nil {
749		return err
750	}
751
752	dstContents, err := os.ReadFile(dst)
753	if err != nil && !errors.Is(err, os.ErrNotExist) {
754		return err
755	}
756
757	if !bytes.Equal(srcContents, dstContents) {
758		if err := os.WriteFile(dst, srcContents, srcFileInfo.Mode()); err != nil {
759			return err
760		}
761	}
762	return nil
763}
764
765// deleteFileIfNotPresent deletes a file if the corresponding file doesn't exist
766func deleteFileIfNotPresent(toDeleteFile, checkFile string) error {
767	if _, err := os.Stat(checkFile); errors.Is(err, os.ErrNotExist) {
768		return os.Remove(toDeleteFile)
769	}
770
771	return nil
772}
773
774// updateLocalDeqpFiles sets the SHA in deqp.json to the latest dEQP revision,
775// then it uses getOrBuildDEQP to checkout that revision and copy over its testlists
776func (r *regres) updateLocalDeqpFiles(test *test) ([]string, error) {
777	out := []string{}
778	// Update deqp.json
779	deqpJsonPath := path.Join(test.checkoutDir, deqpConfigRelPath)
780	if !util.IsFile(deqpJsonPath) {
781		return nil, fmt.Errorf("Failed to locate %s while trying to update the dEQP SHA", deqpConfigRelPath)
782	}
783	cfg, err := loadConfigFromFile(deqpJsonPath)
784	if err != nil {
785		return nil, fmt.Errorf("failed to open dEQP config file: %w", err)
786	}
787
788	hash, err := git.FetchRefHash("HEAD", cfg.Remote)
789	if err != nil {
790		return nil, fmt.Errorf("failed to fetch dEQP ref: %w", err)
791	}
792	cfg.SHA = hash.String()
793	log.Println("New dEQP revision: ", cfg.SHA)
794
795	newFile, err := os.Create(deqpJsonPath)
796	if err != nil {
797		return nil, fmt.Errorf("failed to open %s for encoding: %w", deqpConfigRelPath, err)
798	}
799	defer newFile.Close()
800
801	encoder := json.NewEncoder(newFile)
802	// Make the encoder create a new-line and space-based indents for each field
803	encoder.SetIndent("", "    ")
804	if err := encoder.Encode(&cfg); err != nil {
805		return nil, fmt.Errorf("failed to re-encode %s: %w", deqpConfigRelPath, err)
806	}
807	out = append(out, deqpJsonPath)
808
809	// Use getOrBuildDEQPFromConfig as it'll prevent us from copying data from a revision of dEQP that has build errors.
810	deqpBuild, err := r.getOrBuildDEQPFromConfig(test, cfg, test.checkoutDir)
811
812	if err != nil {
813		return nil, fmt.Errorf("failed to retrieve dEQP build information: %w", err)
814	}
815
816	log.Println("Copying deqp's vulkan testlist to checkout ", test.commit)
817	deqpTestlistDir := path.Join(deqpBuild.path, deqpTestLists)
818	swsTestlistDir := path.Join(test.checkoutDir, swsTestLists)
819
820	deqpDefault := path.Join(deqpTestlistDir, "vk-default.txt")
821	swsDefault := path.Join(swsTestlistDir, "vk-master.txt")
822
823	if err := copyFileIfDifferent(swsDefault, deqpDefault); err != nil {
824		return nil, fmt.Errorf("failed to copy '%s' to '%s': %w", deqpDefault, swsDefault, err)
825	}
826
827	out = append(out, swsDefault)
828
829	deqpTestlistVkDefaultDir := path.Join(deqpTestlistDir, "vk-default")
830	swsTestlistVkDefaultDir := path.Join(swsTestlistDir, "vk-default")
831
832	// First, copy over any existing dEQP file and add new dEQP files
833	err = filepath.WalkDir(deqpTestlistVkDefaultDir,
834		func(deqpFile string, d fs.DirEntry, err error) error {
835			if d.IsDir() || err != nil {
836				return err
837			}
838
839			relPath, err := filepath.Rel(deqpTestlistVkDefaultDir, deqpFile)
840			if err != nil {
841				return err
842			}
843
844			swsFile := path.Join(swsTestlistVkDefaultDir, relPath)
845
846			if err := copyFileIfDifferent(swsFile, deqpFile); err != nil {
847				return fmt.Errorf("failed to copy '%s' to '%s': %w", deqpFile, swsFile, err)
848			}
849			out = append(out, swsFile)
850
851			return nil
852		})
853	if err != nil {
854		return nil, fmt.Errorf("failed to read files from %s: %w", deqpTestlistVkDefaultDir, err)
855	}
856
857	// Second, delete files which no longer exist in dEQP
858	err = filepath.WalkDir(swsTestlistVkDefaultDir,
859		func(swsFile string, d fs.DirEntry, err error) error {
860			if d.IsDir() || err != nil {
861				return err
862			}
863
864			relPath, err := filepath.Rel(swsTestlistVkDefaultDir, swsFile)
865			if err != nil {
866				return err
867			}
868
869			deqpFile := path.Join(deqpTestlistVkDefaultDir, relPath)
870
871			if err := deleteFileIfNotPresent(swsFile, deqpFile); err != nil {
872				return fmt.Errorf("failed to delete '%s': %w", swsFile, err)
873			}
874			out = append(out, swsFile)
875
876			return nil
877		})
878	if err != nil {
879		return nil, fmt.Errorf("failed to read files from %s: %w", swsTestlistVkDefaultDir, err)
880	}
881
882	return out, nil
883}
884
885// postDailyResults posts the results of the daily full deqp run to gerrit as
886// a new change, or reusing an old, unsubmitted change.
887// This change contains the updated test lists, an updated deqp.json that
888// points to the latest dEQP commit, and updated dEQP test files, along with a
889// summary of the test results.
890func (r *regres) postDailyResults(
891	client *gerrit.Client,
892	test *test,
893	testLists testlist.Lists,
894	results *deqp.Results,
895	reactorBackend reactorBackend,
896	dailyHash git.Hash) error {
897
898	// Write out the test list status files.
899	filePaths, err := test.writeTestListsByStatus(testLists, results)
900	if err != nil {
901		return fmt.Errorf("failed to write test lists by status: %w", err)
902	}
903
904	newPaths, err := r.updateLocalDeqpFiles(test)
905	if err != nil {
906		return fmt.Errorf("failed to update test lists from dEQP: %w", err)
907	}
908
909	filePaths = append(filePaths, newPaths...)
910
911	// Stage all the updated test files.
912	for _, path := range filePaths {
913		log.Println("Staging", path)
914		if err := git.Add(test.checkoutDir, path); err != nil {
915			return err
916		}
917	}
918
919	log.Println("Checking for existing test list")
920	existingChange, err := r.findTestListChange(client)
921	if err != nil {
922		return err
923	}
924
925	commitMsg := strings.Builder{}
926	commitMsg.WriteString(consts.TestListUpdateCommitSubjectPrefix + dailyHash.String()[:8])
927	commitMsg.WriteString("\n\nReactor backend: " + string(reactorBackend))
928	if existingChange != nil {
929		// Reuse gerrit change ID if there's already a change up for review.
930		commitMsg.WriteString("\n\n")
931		commitMsg.WriteString("Change-Id: " + existingChange.ChangeID + "\n")
932	}
933
934	if err := git.Commit(test.checkoutDir, commitMsg.String(), git.CommitFlags{
935		Name:  "SwiftShader Regression Bot",
936		Email: r.gerritEmail,
937	}); err != nil {
938		return fmt.Errorf("failed to commit test results: %w", err)
939	}
940
941	if r.dryRun {
942		log.Printf("DRY RUN: post results for review")
943	} else {
944		log.Println("Pushing test results for review")
945		if err := git.Push(test.checkoutDir, gitURL, "HEAD", "refs/for/master", git.PushFlags{
946			Username: r.gerritUser,
947			Password: r.gerritPass,
948		}); err != nil {
949			return fmt.Errorf("failed to push test results for review: %w", err)
950		}
951		log.Println("Test results posted for review")
952	}
953
954	// We've just pushed a new commit. Let's reset back to the parent commit
955	// (dailyHash), so that we can run runDaily again for another backend,
956	// and have it update the commit with the same change-id.
957	if err := git.CheckoutCommit(test.checkoutDir, dailyHash); err != nil {
958		return fmt.Errorf("failed to checkout parent commit: %w", err)
959	}
960	log.Println("Checked out parent commit")
961
962	change, err := r.findTestListChange(client)
963	if err != nil {
964		return err
965	}
966
967	if err := r.postMostCommonFailures(client, change, results); err != nil {
968		return err
969	}
970
971	return nil
972}
973
974func (r *regres) postCoverageResults(cov *cov.Tree, revision git.Hash) error {
975	log.Printf("Committing coverage for %v\n", revision.String())
976
977	url := coverageURL
978	url = strings.ReplaceAll(url, "$USERNAME", r.githubUser)
979	url = strings.ReplaceAll(url, "$PASSWORD", r.githubPass)
980
981	dir := filepath.Join(r.cacheRoot, "coverage")
982	defer os.RemoveAll(dir)
983	if err := git.CheckoutRemoteBranch(dir, url, coverageBranch); err != nil {
984		return fmt.Errorf("failed to checkout gh-pages branch: %w", err)
985	}
986
987	filePath := filepath.Join(dir, "coverage.dat")
988	file, err := os.Create(filePath)
989	if err != nil {
990		return fmt.Errorf("failed to create file '%s': %w", filePath, err)
991	}
992	defer file.Close()
993
994	if err := cov.Encode(revision.String(), file); err != nil {
995		return fmt.Errorf("failed to encode coverage: %w", err)
996	}
997	file.Close()
998
999	if err := git.Add(dir, filePath); err != nil {
1000		return fmt.Errorf("failed to git add '%s': %w", filePath, err)
1001	}
1002
1003	shortHash := revision.String()[:8]
1004
1005	err = git.Commit(dir, "Update coverage data @ "+shortHash, git.CommitFlags{
1006		Name:  "SwiftShader Regression Bot",
1007		Email: r.gerritEmail,
1008	})
1009	if err != nil {
1010		return fmt.Errorf("failed to git commit: %w", err)
1011	}
1012
1013	if !r.dryRun {
1014		err = git.Push(dir, url, coverageBranch, coverageBranch, git.PushFlags{})
1015		if err != nil {
1016			return fmt.Errorf("failed to 'git push': %w", err)
1017		}
1018		log.Printf("Coverage for %v pushed to Github\n", shortHash)
1019	}
1020
1021	return nil
1022}
1023
1024// postMostCommonFailures posts the most common failure cases as a review
1025// comment on the given change.
1026func (r *regres) postMostCommonFailures(client *gerrit.Client, change *gerrit.ChangeInfo, results *deqp.Results) error {
1027	const limit = 25
1028
1029	failures := commonFailures(results)
1030	if len(failures) > limit {
1031		failures = failures[:limit]
1032	}
1033	sb := strings.Builder{}
1034	sb.WriteString(fmt.Sprintf("Top %v most common failures:\n", len(failures)))
1035	for _, f := range failures {
1036		lines := strings.Split(f.error, "\n")
1037		if len(lines) == 1 {
1038			line := lines[0]
1039			if line != "" {
1040				sb.WriteString(fmt.Sprintf(" • %d occurrences: %v: %v\n", f.count, f.status, line))
1041			} else {
1042				sb.WriteString(fmt.Sprintf(" • %d occurrences: %v\n", f.count, f.status))
1043			}
1044		} else {
1045			sb.WriteString(fmt.Sprintf(" • %d occurrences: %v:\n", f.count, f.status))
1046			for _, l := range lines {
1047				sb.WriteString("    > ")
1048				sb.WriteString(l)
1049				sb.WriteString("\n")
1050			}
1051		}
1052		sb.WriteString(fmt.Sprintf("    Example test: %v\n", f.exampleTest))
1053
1054	}
1055	msg := sb.String()
1056
1057	if r.dryRun {
1058		log.Printf("DRY RUN: add most common failures: %v\n", msg)
1059	} else {
1060		log.Printf("Posting most common failures to '%v'\n", change.Number)
1061		_, _, err := client.Changes.SetReview(fmt.Sprintf("%v", change.Number), change.CurrentRevision, &gerrit.ReviewInput{
1062			Message: msg,
1063			Tag:     "autogenerated:regress",
1064		})
1065		if err != nil {
1066			return fmt.Errorf("failed to post comments on change '%v': %w", change.Number, err)
1067		}
1068	}
1069	return nil
1070}
1071
1072func (r *regres) findTestListChange(client *gerrit.Client) (*gerrit.ChangeInfo, error) {
1073	log.Println("Checking for existing test list change")
1074	changes, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{
1075		QueryOptions: gerrit.QueryOptions{
1076			Query: []string{fmt.Sprintf(`status:open+owner:"%v"`, r.gerritEmail)},
1077			Limit: 1,
1078		},
1079		ChangeOptions: gerrit.ChangeOptions{
1080			AdditionalFields: []string{"CURRENT_REVISION"},
1081		},
1082	})
1083	if err != nil {
1084		return nil, fmt.Errorf("failed to checking for existing test list: %w", err)
1085	}
1086	if len(*changes) > 0 {
1087		// TODO: This currently assumes that only change changes from
1088		// gerritEmail are test lists updates. This may not always be true.
1089		return &(*changes)[0], nil
1090	}
1091	return nil, nil
1092}
1093
1094// changeInfo holds the important information about a single, open change in
1095// gerrit.
1096type changeInfo struct {
1097	pending       bool      // Is this change waiting a test for the latest patchset?
1098	priority      int       // Calculated priority based on Gerrit labels.
1099	latest        git.Hash  // Git hash of the latest patchset in the change.
1100	parent        git.Hash  // Git hash of the changelist this change is based on.
1101	lastUpdated   time.Time // Time the change was last fetched.
1102	number        int       // The number gerrit assigned to the change
1103	commitMessage string
1104}
1105
1106// queryChanges updates the changes map by querying gerrit for the latest open
1107// changes.
1108func queryChanges(client *gerrit.Client, changes map[int]*changeInfo) error {
1109	log.Println("Checking for latest changes")
1110	results, _, err := client.Changes.QueryChanges(&gerrit.QueryChangeOptions{
1111		QueryOptions: gerrit.QueryOptions{
1112			Query: []string{"status:open+-age:3d"},
1113			Limit: 100,
1114		},
1115	})
1116	if err != nil {
1117		return fmt.Errorf("failed to get list of changes: %w", err)
1118	}
1119
1120	ids := map[int]bool{}
1121	for _, r := range *results {
1122		ids[r.Number] = true
1123	}
1124
1125	// Add new changes
1126	for number := range ids {
1127		if _, found := changes[number]; !found {
1128			log.Printf("Tracking new change '%v'\n", number)
1129			changes[number] = &changeInfo{number: number}
1130		}
1131	}
1132
1133	// Remove old changes
1134	for number := range changes {
1135		if _, found := ids[number]; !found {
1136			log.Printf("Untracking change '%v'\n", number)
1137			delete(changes, number)
1138		}
1139	}
1140
1141	return nil
1142}
1143
1144// update queries gerrit for information about the given change.
1145func (c *changeInfo) update(client *gerrit.Client) error {
1146	change, _, err := client.Changes.GetChange(fmt.Sprintf("%v", c.number), &gerrit.ChangeOptions{
1147		AdditionalFields: []string{"CURRENT_REVISION", "CURRENT_COMMIT", "MESSAGES", "LABELS", "DETAILED_ACCOUNTS"},
1148	})
1149	if err != nil {
1150		return fmt.Errorf("failed to get info for change %v: %w", c.number, err)
1151	}
1152
1153	current, ok := change.Revisions[change.CurrentRevision]
1154	if !ok {
1155		return fmt.Errorf("failed to find current revision for change %v", c.number)
1156	}
1157
1158	if len(current.Commit.Parents) == 0 {
1159		return fmt.Errorf("failed to find current commit for change %v has no parents(?)", c.number)
1160	}
1161
1162	kokoroPresubmit := change.Labels["Kokoro-Presubmit"].Approved.AccountID != 0
1163	codeReviewScore := change.Labels["Code-Review"].Value
1164	codeReviewApproved := change.Labels["Code-Review"].Approved.AccountID != 0
1165	presubmitReady := change.Labels["Presubmit-Ready"].Approved.AccountID != 0
1166	verifiedScore := change.Labels["Verified"].Value
1167
1168	c.priority = 0
1169	if presubmitReady {
1170		c.priority += 10
1171	}
1172	c.priority += codeReviewScore
1173	if codeReviewApproved {
1174		c.priority += 2
1175	}
1176	if kokoroPresubmit {
1177		c.priority++
1178	}
1179
1180	// Is the change from a Googler or reviewed by a Googler?
1181	canTest := strings.HasSuffix(current.Commit.Committer.Email, "@google.com") ||
1182		strings.HasSuffix(change.Labels["Code-Review"].Approved.Email, "@google.com") ||
1183		strings.HasSuffix(change.Labels["Code-Review"].Recommended.Email, "@google.com") ||
1184		strings.HasSuffix(change.Labels["Presubmit-Ready"].Approved.Email, "@google.com")
1185
1186	// Don't test if the change has negative scores.
1187	if canTest {
1188		if codeReviewScore < 0 || verifiedScore < 0 {
1189			canTest = false
1190		}
1191	}
1192
1193	// Has the latest patchset already been tested?
1194	if canTest {
1195		for _, msg := range change.Messages {
1196			if msg.RevisionNumber == current.Number &&
1197				strings.Contains(msg.Message, reportHeader) {
1198				canTest = false
1199				break
1200			}
1201		}
1202	}
1203
1204	c.pending = canTest
1205	c.latest = git.ParseHash(change.CurrentRevision)
1206	c.parent = git.ParseHash(current.Commit.Parents[0].Commit)
1207	c.commitMessage = current.Commit.Message
1208
1209	return nil
1210}
1211
1212func (r *regres) newTest(commit git.Hash) *test {
1213	checkoutDir := filepath.Join(r.cacheRoot, "checkout", commit.String())
1214	resDir := filepath.Join(r.cacheRoot, "res", commit.String())
1215	return &test{
1216		r:              r,
1217		commit:         commit,
1218		checkoutDir:    checkoutDir,
1219		resDir:         resDir,
1220		buildDir:       filepath.Join(checkoutDir, "build"),
1221		reactorBackend: backendSubzero,
1222	}
1223}
1224
1225func (t *test) setReactorBackend(reactorBackend reactorBackend) *test {
1226	t.reactorBackend = reactorBackend
1227	return t
1228}
1229
1230type reactorBackend string
1231
1232const (
1233	backendLLVM    reactorBackend = "LLVM"
1234	backendSubzero reactorBackend = "Subzero"
1235)
1236
1237type test struct {
1238	r              *regres
1239	commit         git.Hash       // hash of the commit to test
1240	checkoutDir    string         // directory for the SwiftShader checkout
1241	resDir         string         // directory for the test results
1242	buildDir       string         // directory for SwiftShader build
1243	toolchain      llvm.Toolchain // the toolchain used for building
1244	reactorBackend reactorBackend // backend for SwiftShader build
1245	coverageEnv    *cov.Env       // coverage generation environment (optional).
1246}
1247
1248// cleanup removes any temporary files used by the test.
1249func (t *test) cleanup() {
1250	if t.checkoutDir != "" && !t.r.keepCheckouts {
1251		os.RemoveAll(t.checkoutDir)
1252	}
1253}
1254
1255// checkout clones the test's source commit into t.src.
1256func (t *test) checkout() error {
1257	if util.IsDir(t.checkoutDir) && t.r.keepCheckouts {
1258		log.Printf("Reusing source cache for commit '%s'\n", t.commit)
1259		return nil
1260	}
1261	log.Printf("Checking out '%s'\n", t.commit)
1262	os.RemoveAll(t.checkoutDir)
1263	if err := git.CheckoutRemoteCommit(t.checkoutDir, gitURL, t.commit); err != nil {
1264		return fmt.Errorf("failed to check out commit '%s': %w", t.commit, err)
1265	}
1266	log.Printf("Checked out commit '%s'\n", t.commit)
1267	return nil
1268}
1269
1270// buildAndRun calls t.build() followed by t.run(). Errors are logged and
1271// reported in the returned deqprun.Results.Error field.
1272func (t *test) buildAndRun(testLists testlist.Lists, d deqpBuild) *deqp.Results {
1273	// Build the parent change.
1274	if err := t.build(); err != nil {
1275		msg := fmt.Sprintf("Failed to build '%s'", t.commit)
1276		log.Println(fmt.Errorf("%s: %w", msg, err))
1277		return &deqp.Results{Error: msg}
1278	}
1279
1280	// Run the tests on the parent change.
1281	results, err := t.run(testLists, d)
1282	if err != nil {
1283		msg := fmt.Sprintf("Failed to test change '%s'", t.commit)
1284		log.Println(fmt.Errorf("%s: %w", msg, err))
1285		return &deqp.Results{Error: msg}
1286	}
1287
1288	return results
1289}
1290
1291// build builds the SwiftShader source into t.buildDir.
1292func (t *test) build() error {
1293	log.Printf("Building '%s'\n", t.commit)
1294
1295	if err := os.MkdirAll(t.buildDir, 0777); err != nil {
1296		return fmt.Errorf("failed to create build directory: %w", err)
1297	}
1298
1299	args := []string{
1300		`..`,
1301		`-DCMAKE_BUILD_TYPE=Release`,
1302		`-DSWIFTSHADER_DCHECK_ALWAYS_ON=1`,
1303		`-DREACTOR_VERIFY_LLVM_IR=1`,
1304		`-DREACTOR_BACKEND=` + string(t.reactorBackend),
1305		`-DSWIFTSHADER_LLVM_VERSION=10.0`,
1306		`-DSWIFTSHADER_WARNINGS_AS_ERRORS=0`,
1307	}
1308
1309	if t.coverageEnv != nil {
1310		args = append(args, "-DSWIFTSHADER_EMIT_COVERAGE=1")
1311	}
1312
1313	if err := shell.Env(buildTimeout, t.r.cmake, t.buildDir, t.r.toolchainEnv(), args...); err != nil {
1314		return err
1315	}
1316
1317	if err := shell.Shell(buildTimeout, t.r.make, t.buildDir, fmt.Sprintf("-j%d", runtime.NumCPU())); err != nil {
1318		return err
1319	}
1320
1321	return nil
1322}
1323
1324func (t *test) run(testLists testlist.Lists, d deqpBuild) (*deqp.Results, error) {
1325	log.Printf("Running tests for '%s'\n", t.commit)
1326
1327	swiftshaderICDSo := filepath.Join(t.buildDir, "libvk_swiftshader.so")
1328	if !util.IsFile(swiftshaderICDSo) {
1329		return nil, fmt.Errorf("failed to find '%s'", swiftshaderICDSo)
1330	}
1331
1332	swiftshaderICDJSON := filepath.Join(t.buildDir, "Linux", "vk_swiftshader_icd.json")
1333	if !util.IsFile(swiftshaderICDJSON) {
1334		return nil, fmt.Errorf("failed to find '%s'", swiftshaderICDJSON)
1335	}
1336
1337	if *limit != 0 {
1338		log.Printf("Limiting tests to %d\n", *limit)
1339		testLists = append(testlist.Lists{}, testLists...)
1340		for i := range testLists {
1341			testLists[i] = testLists[i].Limit(*limit)
1342		}
1343	}
1344
1345	// Directory for per-test small transient files, such as log files,
1346	// coverage output, etc.
1347	// TODO(bclayton): consider using tmpfs here.
1348	tempDir := filepath.Join(t.buildDir, "temp")
1349	os.MkdirAll(tempDir, 0777)
1350
1351	// Path to SwiftShader's libvulkan.so.1, which can be loaded directly by
1352	// dEQP without use of the Vulkan Loader.
1353	swiftshaderLibvulkanPath := filepath.Join(t.buildDir, "Linux")
1354
1355	config := deqp.Config{
1356		ExeEgl:    filepath.Join(d.path, "build", "modules", "egl", "deqp-egl"),
1357		ExeGles2:  filepath.Join(d.path, "build", "modules", "gles2", "deqp-gles2"),
1358		ExeGles3:  filepath.Join(d.path, "build", "modules", "gles3", "deqp-gles3"),
1359		ExeVulkan: filepath.Join(d.path, "build", "external", "vulkancts", "modules", "vulkan", "deqp-vk"),
1360		TempDir:   tempDir,
1361		TestLists: testLists,
1362		Env: []string{
1363			"LD_LIBRARY_PATH=" + os.Getenv("LD_LIBRARY_PATH") + ":" + swiftshaderLibvulkanPath,
1364			"VK_ICD_FILENAMES=" + swiftshaderICDJSON,
1365			"DISPLAY=" + os.Getenv("DISPLAY"),
1366			"LIBC_FATAL_STDERR_=1", // Put libc explosions into logs.
1367		},
1368		LogReplacements: map[string]string{
1369			t.checkoutDir: "<SwiftShader>",
1370		},
1371		NumParallelTests: numParallelTests,
1372		MaxTestsPerProc:  *maxTestsPerProc,
1373		TestTimeout:      testTimeout,
1374		CoverageEnv:      t.coverageEnv,
1375	}
1376
1377	return config.Run()
1378}
1379
1380func (t *test) writeTestListsByStatus(testLists testlist.Lists, results *deqp.Results) ([]string, error) {
1381	out := []string{}
1382
1383	for _, list := range testLists {
1384		files := map[testlist.Status]*os.File{}
1385		for _, status := range testlist.Statuses {
1386			path := testlist.FilePathWithStatus(filepath.Join(t.checkoutDir, list.File), status)
1387			dir := filepath.Dir(path)
1388			os.MkdirAll(dir, 0777)
1389			f, err := os.Create(path)
1390			if err != nil {
1391				return nil, fmt.Errorf("failed to create file '%v': %w", path, err)
1392			}
1393			defer f.Close()
1394			files[status] = f
1395
1396			out = append(out, path)
1397		}
1398
1399		for _, testName := range list.Tests {
1400			if r, found := results.Tests[testName]; found {
1401				fmt.Fprintln(files[r.Status], testName)
1402			}
1403		}
1404	}
1405
1406	return out, nil
1407}
1408
1409// resultsCachePath returns the path to the cache results file for the given
1410// test, testlists and deqpBuild.
1411func (t *test) resultsCachePath(testLists testlist.Lists, d deqpBuild) string {
1412	return filepath.Join(t.resDir, testLists.Hash(), d.hash)
1413}
1414
1415type testStatusAndError struct {
1416	status testlist.Status
1417	error  string
1418}
1419
1420type commonFailure struct {
1421	count int
1422	testStatusAndError
1423	exampleTest string
1424}
1425
1426func commonFailures(results *deqp.Results) []commonFailure {
1427	failures := map[testStatusAndError]int{}
1428	examples := map[testStatusAndError]string{}
1429	for name, test := range results.Tests {
1430		if !test.Status.Failing() {
1431			continue
1432		}
1433		key := testStatusAndError{test.Status, test.Err}
1434		if count, ok := failures[key]; ok {
1435			failures[key] = count + 1
1436		} else {
1437			failures[key] = 1
1438			examples[key] = name
1439		}
1440	}
1441	out := make([]commonFailure, 0, len(failures))
1442	for failure, count := range failures {
1443		out = append(out, commonFailure{count, failure, examples[failure]})
1444	}
1445	sort.Slice(out, func(i, j int) bool { return out[i].count > out[j].count })
1446	return out
1447}
1448
1449// compare returns a string describing all differences between two
1450// deqprun.Results, and a boolean indicating that this there are differences
1451// that are considered important.
1452// This string is used as the report message posted to the gerrit code review.
1453func compare(old, new *deqp.Results) (msg string, alert bool) {
1454	if old.Error != "" {
1455		return old.Error, false
1456	}
1457	if new.Error != "" {
1458		return new.Error, true
1459	}
1460
1461	oldStatusCounts, newStatusCounts := map[testlist.Status]int{}, map[testlist.Status]int{}
1462	totalTests := 0
1463
1464	broken, fixed, failing, removed, changed := []string{}, []string{}, []string{}, []string{}, []string{}
1465
1466	for test, new := range new.Tests {
1467		old, found := old.Tests[test]
1468		if !found {
1469			log.Printf("Test result for '%s' not found on old change\n", test)
1470			continue
1471		}
1472		switch {
1473		case !old.Status.Failing() && new.Status.Failing():
1474			broken = append(broken, test)
1475			alert = true
1476		case !old.Status.Passing() && new.Status.Passing():
1477			fixed = append(fixed, test)
1478		case old.Status != new.Status:
1479			changed = append(changed, test)
1480			alert = true
1481		case old.Status.Failing() && new.Status.Failing():
1482			failing = append(failing, test) // Still broken
1483			alert = true
1484		}
1485		totalTests++
1486		if found {
1487			oldStatusCounts[old.Status] = oldStatusCounts[old.Status] + 1
1488		}
1489		newStatusCounts[new.Status] = newStatusCounts[new.Status] + 1
1490	}
1491
1492	for test := range old.Tests {
1493		if _, found := new.Tests[test]; !found {
1494			removed = append(removed, test)
1495		}
1496	}
1497
1498	sb := strings.Builder{}
1499
1500	// list prints the list l to sb, truncating after a limit.
1501	list := func(l []string) {
1502		const max = 10
1503		for i, s := range l {
1504			sb.WriteString("  ")
1505			if i == max {
1506				sb.WriteString(fmt.Sprintf("> %d more\n", len(l)-i))
1507				break
1508			}
1509			sb.WriteString(fmt.Sprintf("> %s", s))
1510			if n, ok := new.Tests[s]; ok {
1511				if o, ok := old.Tests[s]; ok && n != o {
1512					sb.WriteString(fmt.Sprintf(" - [%s -> %s]", o.Status, n.Status))
1513				} else {
1514					sb.WriteString(fmt.Sprintf(" - [%s]", n.Status))
1515				}
1516				sb.WriteString("\n")
1517				for _, line := range strings.Split(n.Err, "\n") {
1518					if line != "" {
1519						sb.WriteString(fmt.Sprintf("     %v\n", line))
1520					}
1521				}
1522			} else {
1523				sb.WriteString("\n")
1524			}
1525		}
1526	}
1527
1528	if n := len(broken); n > 0 {
1529		sort.Strings(broken)
1530		sb.WriteString(fmt.Sprintf("\n--- This change breaks %d tests: ---\n", n))
1531		list(broken)
1532	}
1533	if n := len(fixed); n > 0 {
1534		sort.Strings(fixed)
1535		sb.WriteString(fmt.Sprintf("\n--- This change fixes %d tests: ---\n", n))
1536		list(fixed)
1537	}
1538	if n := len(removed); n > 0 {
1539		sort.Strings(removed)
1540		sb.WriteString(fmt.Sprintf("\n--- This change removes %d tests: ---\n", n))
1541		list(removed)
1542	}
1543	if n := len(changed); n > 0 {
1544		sort.Strings(changed)
1545		sb.WriteString(fmt.Sprintf("\n--- This change alters %d tests: ---\n", n))
1546		list(changed)
1547	}
1548
1549	if len(broken) == 0 && len(fixed) == 0 && len(removed) == 0 && len(changed) == 0 {
1550		sb.WriteString(fmt.Sprintf("\n--- No change in test results ---\n"))
1551	}
1552
1553	sb.WriteString(fmt.Sprintf("          Total tests: %d\n", totalTests))
1554	for _, s := range []struct {
1555		label  string
1556		status testlist.Status
1557	}{
1558		{"                 Pass", testlist.Pass},
1559		{"                 Fail", testlist.Fail},
1560		{"              Timeout", testlist.Timeout},
1561		{"      UNIMPLEMENTED()", testlist.Unimplemented},
1562		{"        UNSUPPORTED()", testlist.Unsupported},
1563		{"        UNREACHABLE()", testlist.Unreachable},
1564		{"             ASSERT()", testlist.Assert},
1565		{"              ABORT()", testlist.Abort},
1566		{"                Crash", testlist.Crash},
1567		{"        Not Supported", testlist.NotSupported},
1568		{"Compatibility Warning", testlist.CompatibilityWarning},
1569		{"      Quality Warning", testlist.QualityWarning},
1570	} {
1571		old, new := oldStatusCounts[s.status], newStatusCounts[s.status]
1572		if old == 0 && new == 0 {
1573			continue
1574		}
1575		change := util.Percent64(int64(new-old), int64(old))
1576		switch {
1577		case old == new:
1578			sb.WriteString(fmt.Sprintf("%s: %v\n", s.label, new))
1579		case change == 0:
1580			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d)\n", s.label, old, new, new-old))
1581		default:
1582			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d %+d%%)\n", s.label, old, new, new-old, change))
1583		}
1584	}
1585
1586	if old, new := old.Duration, new.Duration; old != 0 && new != 0 {
1587		label := "           Time taken"
1588		change := util.Percent64(int64(new-old), int64(old))
1589		switch {
1590		case old == new:
1591			sb.WriteString(fmt.Sprintf("%s: %v\n", label, new))
1592		case change == 0:
1593			sb.WriteString(fmt.Sprintf("%s: %v -> %v\n", label, old, new))
1594		default:
1595			sb.WriteString(fmt.Sprintf("%s: %v -> %v (%+d%%)\n", label, old, new, change))
1596		}
1597	}
1598
1599	type timingDiff struct {
1600		old      time.Duration
1601		new      time.Duration
1602		relDelta float64
1603		name     string
1604	}
1605
1606	timingDiffs := []timingDiff{}
1607	for name, new := range new.Tests {
1608		if old, ok := old.Tests[name]; ok {
1609			old, new := old.TimeTaken, new.TimeTaken
1610			delta := new.Seconds() - old.Seconds()
1611			absDelta := math.Abs(delta)
1612			relDelta := delta / old.Seconds()
1613			if absDelta > 2.0 && math.Abs(relDelta) > 0.05 { // If change > ±2s and > than ±5% old time...
1614				timingDiffs = append(timingDiffs, timingDiff{
1615					old:      old,
1616					new:      new,
1617					name:     name,
1618					relDelta: relDelta,
1619				})
1620			}
1621		}
1622	}
1623	if len(timingDiffs) > 0 {
1624		sb.WriteString(fmt.Sprintf("\n--- Test duration changes ---\n"))
1625		const limit = 10
1626		if len(timingDiffs) > limit {
1627			sort.Slice(timingDiffs, func(i, j int) bool { return math.Abs(timingDiffs[i].relDelta) > math.Abs(timingDiffs[j].relDelta) })
1628			timingDiffs = timingDiffs[:limit]
1629		}
1630		sort.Slice(timingDiffs, func(i, j int) bool { return timingDiffs[i].relDelta < timingDiffs[j].relDelta })
1631		for _, d := range timingDiffs {
1632			percent := util.Percent64(int64(d.new-d.old), int64(d.old))
1633			sb.WriteString(fmt.Sprintf("  > %v: %v -> %v (%+d%%)\n", d.name, d.old, d.new, percent))
1634		}
1635	}
1636
1637	return sb.String(), alert
1638}
1639
1640// loadTestLists loads the full test lists from the json file.
1641// The file is first searched at {t.srcDir}/{relPath}
1642// If this cannot be found, then the file is searched at the fallback path
1643// {CWD}/{relPath}
1644// This allows CLs to alter the list of tests to be run, as well as providing
1645// a default set.
1646func (t *test) loadTestLists(relPath string) (testlist.Lists, error) {
1647	// Seach for the test.json file in the checked out source directory.
1648	if path := filepath.Join(t.checkoutDir, relPath); util.IsFile(path) {
1649		log.Printf("Loading test list '%v' from commit\n", relPath)
1650		return testlist.Load(t.checkoutDir, path)
1651	}
1652
1653	// Not found there. Search locally.
1654	wd, err := os.Getwd()
1655	if err != nil {
1656		return testlist.Lists{}, fmt.Errorf("failed to get current working directory: %w", err)
1657	}
1658	if path := filepath.Join(wd, relPath); util.IsFile(path) {
1659		log.Printf("Loading test list '%v' from regres\n", relPath)
1660		return testlist.Load(wd, relPath)
1661	}
1662
1663	return nil, errors.New("Couldn't find a test list file")
1664}
1665
1666type date struct {
1667	year  int
1668	month time.Month
1669	day   int
1670}
1671
1672func toDate(t time.Time) date {
1673	d := date{}
1674	d.year, d.month, d.day = t.Date()
1675	return d
1676}
1677