• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28	"go.skia.org/skia/bazel/device_specific_configs"
29)
30
31const (
32	CAS_BAZEL         = "bazel"
33	CAS_CANVASKIT     = "canvaskit"
34	CAS_COMPILE       = "compile"
35	CAS_EMPTY         = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
36	CAS_LOTTIE_CI     = "lottie-ci"
37	CAS_LOTTIE_WEB    = "lottie-web"
38	CAS_PATHKIT       = "pathkit"
39	CAS_PERF          = "perf"
40	CAS_PUPPETEER     = "puppeteer"
41	CAS_RUN_RECIPE    = "run-recipe"
42	CAS_RECIPES       = "recipes"
43	CAS_RECREATE_SKPS = "recreate-skps"
44	CAS_SKOTTIE_WASM  = "skottie-wasm"
45	CAS_TASK_DRIVERS  = "task-drivers"
46	CAS_TEST          = "test"
47	CAS_WASM_GM       = "wasm-gm"
48	CAS_WHOLE_REPO    = "whole-repo"
49
50	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
51	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
52	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
53	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
54	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
55	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
56	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
57	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
58	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
59	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
60
61	DEBIAN_11_OS                   = "Debian-11.5"
62	DEFAULT_OS_DEBIAN              = "Debian-10.10"
63	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
64	OLD_OS_LINUX_GCE               = "Debian-9.8"
65	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
66	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
67	DEFAULT_OS_MAC                 = "Mac-10.15.7"
68	DEFAULT_OS_WIN                 = "Windows-Server-17763"
69
70	// Small is a 2-core machine.
71	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
72	MACHINE_TYPE_SMALL = "n1-highmem-2"
73	// Medium is a 16-core machine
74	MACHINE_TYPE_MEDIUM = "n1-standard-16"
75	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
76	// any of our tasks.)
77	MACHINE_TYPE_LARGE = "n1-highcpu-64"
78
79	// Swarming output dirs.
80	OUTPUT_NONE          = "output_ignored" // This will result in outputs not being isolated.
81	OUTPUT_BUILD         = "build"
82	OUTPUT_BUILD_NOPATCH = "build_nopatch"
83	OUTPUT_TEST          = "test"
84	OUTPUT_PERF          = "perf"
85	OUTPUT_BAZEL         = "bazel_output"
86
87	// Name prefix for upload jobs.
88	PREFIX_UPLOAD = "Upload"
89
90	// This will have to kept in sync with the kMin_Version in
91	// src/core/SkPicturePriv.h
92	// See the comment in that file on how to find the version to use here.
93	oldestSupportedSkpVersion = 293
94
95	// bazelCacheDirOnGCELinux is the path where Bazel should write its cache on Linux GCE machines.
96	// The Bazel cache can grow large (>10GB), so this should be in a partition with enough free
97	// space. On Linux GCE machines, the partition mounted at /mnt/pd0 is significantly larger than
98	// the partition mounted at /.
99	bazelCacheDirOnGCELinux = "/mnt/pd0/bazel_cache"
100
101	// bazelCacheDirOnSkoloLinux is like bazelCacheDirOnGCELinux for Skolo Linux machines. Unlike GCE
102	// Linux machines, the partition mounted at / on Skolo Linux machines is large enough. While
103	// using the default Bazel cache path would work, our Bazel task drivers demand an explicit path.
104	// We store the Bazel cache at /home/chrome-bot/bazel_cache rather than on the default location
105	// of /home/chrome-bot/cache/.bazel to make it obvious to someone examining a Skolo machine that
106	// we are overriding the default location.
107	bazelCacheDirOnSkoloLinux = "/home/chrome-bot/bazel_cache"
108)
109
110var (
111	// "Constants"
112
113	// Named caches used by tasks.
114	CACHES_GIT = []*specs.Cache{
115		{
116			Name: "git",
117			Path: "cache/git",
118		},
119		{
120			Name: "git_cache",
121			Path: "cache/git_cache",
122		},
123	}
124	CACHES_GO = []*specs.Cache{
125		{
126			Name: "go_cache",
127			Path: "cache/go_cache",
128		},
129		{
130			Name: "gopath",
131			Path: "cache/gopath",
132		},
133	}
134	CACHES_WORKDIR = []*specs.Cache{
135		{
136			Name: "work",
137			Path: "cache/work",
138		},
139	}
140	CACHES_CCACHE = []*specs.Cache{
141		{
142			Name: "ccache",
143			Path: "cache/ccache",
144		},
145	}
146	// The "docker" cache is used as a persistent working directory for
147	// tasks which use Docker. It is not to be confused with Docker's own
148	// cache, which stores images. We do not currently use a named Swarming
149	// cache for the latter.
150	// TODO(borenet): We should ensure that any task which uses Docker does
151	// not also use the normal "work" cache, to prevent issues like
152	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
153	CACHES_DOCKER = []*specs.Cache{
154		{
155			Name: "docker",
156			Path: "cache/docker",
157		},
158	}
159
160	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
161	// lottie-ci.  This is global so that it can be overridden by other
162	// repositories which import this file.
163	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
164		Root: "..",
165		Paths: []string{
166			"skia/.vpython3",
167			"skia/infra/bots/run_recipe.py",
168			"skia/infra/lottiecap",
169			"skia/tools/lottie-web-perf",
170			"skia/tools/lottiecap",
171		},
172		Excludes: []string{rbe.ExcludeGitDir},
173	}
174
175	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
176	// global so that it can be overridden by other repositories which import
177	// this file.
178	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
179		Root:     "..",
180		Paths:    []string{"skia"},
181		Excludes: []string{rbe.ExcludeGitDir},
182	}
183
184	// TODO(borenet): This hacky and bad.
185	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
186
187	CIPD_PKGS_GOLDCTL = cipd.MustGetPackage("skia/tools/goldctl/${platform}")
188
189	CIPD_PKGS_XCODE = []*specs.CipdPackage{
190		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
191		// This package is really just an installer for XCode.
192		{
193			Name: "infra/tools/mac_toolchain/${platform}",
194			Path: "mac_toolchain",
195			// When this is updated, also update
196			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
197			Version: "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a",
198		},
199	}
200
201	// These properties are required by some tasks, eg. for running
202	// bot_update, but they prevent de-duplication, so they should only be
203	// used where necessary.
204	EXTRA_PROPS = map[string]string{
205		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
206		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
207		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
208		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
209		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
210		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
211		"repository":           specs.PLACEHOLDER_REPO,
212		"revision":             specs.PLACEHOLDER_REVISION,
213		"task_id":              specs.PLACEHOLDER_TASK_ID,
214	}
215
216	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
217	// for how the CIPD package should be installed for a given task.
218	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
219		"gcloud_linux": {
220			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
221			path:           "gcloud_linux",
222		},
223		"skimage": {
224			uploadTaskName: ISOLATE_SKIMAGE_NAME,
225			path:           "skimage",
226		},
227		"skp": {
228			uploadTaskName: ISOLATE_SKP_NAME,
229			path:           "skp",
230		},
231		"svg": {
232			uploadTaskName: ISOLATE_SVG_NAME,
233			path:           "svg",
234		},
235		"mskp": {
236			uploadTaskName: ISOLATE_MSKP_NAME,
237			path:           "mskp",
238		},
239		"android_ndk_linux": {
240			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
241			path:           "android_ndk_linux",
242		},
243		"android_sdk_linux": {
244			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
245			path:           "android_sdk_linux",
246		},
247		"win_toolchain": {
248			alwaysIsolate:  true,
249			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
250			path:           "win_toolchain",
251		},
252	}
253
254	// Set dontReduceOpsTaskSplitting option on these models
255	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
256		"NUC5PPYH",
257	}
258)
259
260// Config contains general configuration information.
261type Config struct {
262	// Directory containing assets. Assumed to be relative to the directory
263	// which contains the calling gen_tasks.go file. If not specified, uses
264	// the infra/bots/assets from this repo.
265	AssetsDir string `json:"assets_dir"`
266
267	// Path to the builder name schema JSON file. Assumed to be relative to
268	// the directory which contains the calling gen_tasks.go file. If not
269	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
270	// from this repo.
271	BuilderNameSchemaFile string `json:"builder_name_schema"`
272
273	// URL of the Skia Gold known hashes endpoint.
274	GoldHashesURL string `json:"gold_hashes_url"`
275
276	// GCS bucket used for GM results.
277	GsBucketGm string `json:"gs_bucket_gm"`
278
279	// GCS bucket used for Nanobench results.
280	GsBucketNano string `json:"gs_bucket_nano"`
281
282	// Optional function which returns a bot ID for internal devices.
283	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
284
285	// List of task names for which we'll never upload results.
286	NoUpload []string `json:"no_upload"`
287
288	// PathToSkia is the relative path from the root of the current checkout to
289	// the root of the Skia checkout.
290	PathToSkia string `json:"path_to_skia"`
291
292	// Swarming pool used for triggering tasks.
293	Pool string `json:"pool"`
294
295	// LUCI project associated with this repo.
296	Project string `json:"project"`
297
298	// Service accounts.
299	ServiceAccountCanary       string `json:"service_account_canary"`
300	ServiceAccountCompile      string `json:"service_account_compile"`
301	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
302	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
303	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
304	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
305	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
306
307	// Optional override function which derives Swarming bot dimensions
308	// from parts of task names.
309	SwarmDimensions func(parts map[string]string) []string `json:"-"`
310}
311
312// JobInfo is the type of each entry in the jobs.json file.
313type JobInfo struct {
314	// The name of the job.
315	Name string `json:"name"`
316
317	// The optional CQ config of this job. If the CQ config is missing then the
318	// job will not be added to the CQ of this branch.
319	CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"`
320}
321
322// LoadConfig loads the Config from a cfg.json file which is the sibling of the
323// calling gen_tasks.go file.
324func LoadConfig() *Config {
325	cfgDir := getCallingDirName()
326	var cfg Config
327	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
328	return &cfg
329}
330
331// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
332// caller from needing a dependency on the specs package.
333func CheckoutRoot() string {
334	root, err := specs.GetCheckoutRoot()
335	if err != nil {
336		log.Fatal(err)
337	}
338	return root
339}
340
341// LoadJson loads JSON from the given file and unmarshals it into the given
342// destination.
343func LoadJson(filename string, dest interface{}) {
344	b, err := ioutil.ReadFile(filename)
345	if err != nil {
346		log.Fatalf("Unable to read %q: %s", filename, err)
347	}
348	if err := json.Unmarshal(b, dest); err != nil {
349		log.Fatalf("Unable to parse %q: %s", filename, err)
350	}
351}
352
353// In returns true if |s| is *in* |a| slice.
354// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
355// huge set of additional dependencies added by that package.
356func In(s string, a []string) bool {
357	for _, x := range a {
358		if x == s {
359			return true
360		}
361	}
362	return false
363}
364
365// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
366// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
367// is similarly loaded from a cfg.json file which is the sibling of the calling
368// gen_tasks.go file.
369func GenTasks(cfg *Config) {
370	b := specs.MustNewTasksCfgBuilder()
371
372	// Find the paths to the infra/bots directories in this repo and the
373	// repo of the calling file.
374	relpathTargetDir := getThisDirName()
375	relpathBaseDir := getCallingDirName()
376
377	// Parse jobs.json.
378	var jobsWithInfo []*JobInfo
379	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo)
380	// Create a slice with only job names.
381	jobs := []string{}
382	for _, j := range jobsWithInfo {
383		jobs = append(jobs, j.Name)
384	}
385
386	if cfg == nil {
387		cfg = new(Config)
388		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
389	}
390
391	// Create the JobNameSchema.
392	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
393	if cfg.BuilderNameSchemaFile != "" {
394		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
395	}
396	schema, err := NewJobNameSchema(builderNameSchemaFile)
397	if err != nil {
398		log.Fatal(err)
399	}
400
401	// Set the assets dir.
402	assetsDir := filepath.Join(relpathTargetDir, "assets")
403	if cfg.AssetsDir != "" {
404		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
405	}
406	b.SetAssetsDir(assetsDir)
407
408	// Create Tasks and Jobs.
409	builder := &builder{
410		TasksCfgBuilder: b,
411		cfg:             cfg,
412		jobNameSchema:   schema,
413		jobs:            jobs,
414	}
415	for _, j := range jobsWithInfo {
416		jb := newJobBuilder(builder, j.Name)
417		jb.genTasksForJob()
418		jb.finish()
419
420		// Add the CQ spec if it is a CQ job.
421		if j.CQConfig != nil {
422			b.MustAddCQJob(j.Name, j.CQConfig)
423		}
424	}
425
426	// Create CasSpecs.
427	b.MustAddCasSpec(CAS_BAZEL, &specs.CasSpec{
428		Root: "..",
429		Paths: []string{
430			// Source code.
431			"skia/example",
432			"skia/experimental/bazel_test",
433			"skia/include",
434			"skia/modules",
435			"skia/src",
436			"skia/tests",
437			"skia/third_party",
438			"skia/tools",
439			// Needed for tests.
440			"skia/bench", // Needed to run benchmark tests with Bazel.
441			"skia/gm",    // Needed to run GMs with Bazel.
442			"skia/gn",    // Some Python scripts still live here.
443			"skia/resources",
444			"skia/package.json",
445			"skia/package-lock.json",
446			"skia/DEPS",   // Needed to check generation.
447			"skia/infra",  // Many Go tests and Bazel tools live here.
448			"skia/go.mod", // Needed by Gazelle.
449			"skia/go.sum", // Needed by Gazelle.
450			// Needed to run Bazel.
451			"skia/.bazelignore",
452			"skia/.bazelrc",
453			"skia/.bazelversion",
454			"skia/BUILD.bazel",
455			"skia/LICENSE", // Referred to by default_applicable_licenses
456			"skia/WORKSPACE.bazel",
457			"skia/bazel",
458			"skia/defines.bzl",
459			"skia/go_repositories.bzl",
460			"skia/requirements.txt",
461			"skia/toolchain",
462		},
463		Excludes: []string{
464			rbe.ExcludeGitDir,
465			"skia/third_party/externals",
466		},
467	})
468	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
469		Root: "..",
470		Paths: []string{
471			"skia/.vpython3",
472			"skia/infra/bots/run_recipe.py",
473			"skia/infra/canvaskit",
474			"skia/modules/canvaskit",
475			"skia/modules/pathkit/perf/perfReporter.js",
476			"skia/modules/pathkit/tests/testReporter.js",
477		},
478		Excludes: []string{rbe.ExcludeGitDir},
479	})
480	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
481	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
482	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
483		Root: "..",
484		Paths: []string{
485			"skia/.vpython3",
486			"skia/infra/bots/run_recipe.py",
487			"skia/tools/lottie-web-perf",
488		},
489		Excludes: []string{rbe.ExcludeGitDir},
490	})
491	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
492		Root: "..",
493		Paths: []string{
494			"skia/.vpython3",
495			"skia/infra/bots/run_recipe.py",
496			"skia/infra/pathkit",
497			"skia/modules/pathkit",
498		},
499		Excludes: []string{rbe.ExcludeGitDir},
500	})
501	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
502		Root: "..",
503		Paths: []string{
504			"skia/.vpython3",
505			"skia/infra/bots/assets",
506			"skia/infra/bots/run_recipe.py",
507			"skia/platform_tools/ios/bin",
508			"skia/resources",
509			"skia/tools/valgrind.supp",
510		},
511		Excludes: []string{rbe.ExcludeGitDir},
512	})
513	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
514		Root: "../skia", // Needed for other repos.
515		Paths: []string{
516			".vpython3",
517			"tools/perf-canvaskit-puppeteer",
518		},
519		Excludes: []string{rbe.ExcludeGitDir},
520	})
521	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
522		Root: "..",
523		Paths: []string{
524			"skia/.vpython3",
525			"skia/infra/config/recipes.cfg",
526			"skia/infra/bots/bundle_recipes.sh",
527			"skia/infra/bots/README.recipes.md",
528			"skia/infra/bots/recipe_modules",
529			"skia/infra/bots/recipes",
530			"skia/infra/bots/recipes.py",
531		},
532		Excludes: []string{rbe.ExcludeGitDir},
533	})
534	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
535		Root: "..",
536		Paths: []string{
537			"skia/.vpython3",
538			"skia/infra/bots/run_recipe.py",
539		},
540		Excludes: []string{rbe.ExcludeGitDir},
541	})
542	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
543		Root: "..",
544		Paths: []string{
545			"skia/.vpython3",
546			"skia/infra/bots/run_recipe.py",
547			"skia/tools/skottie-wasm-perf",
548		},
549		Excludes: []string{rbe.ExcludeGitDir},
550	})
551	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
552		Root: "..",
553		Paths: []string{
554			// Deps needed to use Bazel
555			"skia/.bazelrc",
556			"skia/.bazelversion",
557			"skia/BUILD.bazel",
558			"skia/LICENSE",
559			"skia/WORKSPACE.bazel",
560			"skia/bazel",
561			"skia/defines.bzl",
562			"skia/go_repositories.bzl",
563			"skia/include/config", // There's a WORKSPACE.bazel in here
564			"skia/requirements.txt",
565			"skia/toolchain",
566			// Actually needed to build the task drivers
567			"skia/infra/bots/BUILD.bazel",
568			"skia/infra/bots/build_task_drivers.sh",
569			"skia/infra/bots/task_drivers",
570		},
571		Excludes: []string{rbe.ExcludeGitDir},
572	})
573	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
574		Root: "..",
575		Paths: []string{
576			"skia/.vpython3",
577			"skia/infra/bots/assets",
578			"skia/infra/bots/run_recipe.py",
579			"skia/platform_tools/ios/bin",
580			"skia/resources",
581			"skia/tools/valgrind.supp",
582		},
583		Excludes: []string{rbe.ExcludeGitDir},
584	})
585	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
586		Root: "../skia", // Needed for other repos.
587		Paths: []string{
588			".vpython3",
589			"resources",
590			"tools/run-wasm-gm-tests",
591		},
592		Excludes: []string{rbe.ExcludeGitDir},
593	})
594	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
595	b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{
596		Root: "..",
597		Paths: []string{
598			"skia/.vpython3",
599			"skia/DEPS",
600			"skia/bin/fetch-sk",
601			"skia/infra/bots/assets/skp",
602			"skia/infra/bots/utils.py",
603			"skia/tools/skp",
604		},
605		Excludes: []string{rbe.ExcludeGitDir},
606	})
607	generateCompileCAS(b, cfg)
608
609	builder.MustFinish()
610}
611
612// getThisDirName returns the infra/bots directory which is an ancestor of this
613// file.
614func getThisDirName() string {
615	_, thisFileName, _, ok := runtime.Caller(0)
616	if !ok {
617		log.Fatal("Unable to find path to current file.")
618	}
619	return filepath.Dir(filepath.Dir(thisFileName))
620}
621
622// getCallingDirName returns the infra/bots directory which is an ancestor of
623// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
624// file appears two steps up the stack; do not call from a function which is not
625// directly called by gen_tasks.go.
626func getCallingDirName() string {
627	_, callingFileName, _, ok := runtime.Caller(2)
628	if !ok {
629		log.Fatal("Unable to find path to calling file.")
630	}
631	return filepath.Dir(callingFileName)
632}
633
634// builder is a wrapper for specs.TasksCfgBuilder.
635type builder struct {
636	*specs.TasksCfgBuilder
637	cfg           *Config
638	jobNameSchema *JobNameSchema
639	jobs          []string
640}
641
642// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
643// does by default.
644func marshalJson(data interface{}) string {
645	j, err := json.Marshal(data)
646	if err != nil {
647		log.Fatal(err)
648	}
649	return strings.Replace(string(j), "\\u003c", "<", -1)
650}
651
652// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
653// recipe bundle.
654func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
655	b.cipd(CIPD_PKG_LUCI_AUTH)
656	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
657	b.env("RECIPES_USE_PY3", "true")
658	b.envPrefixes("VPYTHON_DEFAULT_SPEC", "skia/.vpython3")
659	b.usesPython()
660	b.recipeProp("swarm_out_dir", outputDir)
661	if outputDir != OUTPUT_NONE {
662		b.output(outputDir)
663	}
664	const python = "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}"
665	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
666	// Most recipes want this isolate; they can override if necessary.
667	b.cas(CAS_RUN_RECIPE)
668	b.timeout(time.Hour)
669	b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
670	b.Spec.ExtraTags = map[string]string{
671		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
672	}
673
674	// Attempts.
675	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") {
676		// Sanitizers often find non-deterministic issues that retries would hide.
677		b.attempts(1)
678	} else {
679		// Retry by default to hide random bot/hardware failures.
680		b.attempts(2)
681	}
682}
683
684// kitchenTask sets up the task to run a recipe via Kitchen.
685func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
686	b.kitchenTaskNoBundle(recipe, outputDir)
687	b.dep(b.bundleRecipes())
688}
689
690// internalHardwareLabel returns the internal ID for the bot, if any.
691func (b *taskBuilder) internalHardwareLabel() *int {
692	if b.cfg.InternalHardwareLabel != nil {
693		return b.cfg.InternalHardwareLabel(b.parts)
694	}
695	return nil
696}
697
698// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
699func (b *taskBuilder) linuxGceDimensions(machineType string) {
700	b.dimension(
701		// Specify CPU to avoid running builds on bots with a more unique CPU.
702		"cpu:x86-64-Haswell_GCE",
703		"gpu:none",
704		// Currently all Linux GCE tasks run on 16-CPU machines.
705		fmt.Sprintf("machine_type:%s", machineType),
706		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
707		fmt.Sprintf("pool:%s", b.cfg.Pool),
708	)
709}
710
711// codesizeTaskNameRegexp captures the "CodeSize-<binary name>-" prefix of a CodeSize task name.
712var codesizeTaskNameRegexp = regexp.MustCompile("^CodeSize-[a-zA-Z0-9_]+-")
713
714// deriveCompileTaskName returns the name of a compile task based on the given
715// job name.
716func (b *jobBuilder) deriveCompileTaskName() string {
717	if b.role("Test", "Perf") {
718		task_os := b.parts["os"]
719		ec := []string{}
720		if val := b.parts["extra_config"]; val != "" {
721			ec = strings.Split(val, "_")
722			ignore := []string{
723				"AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
724				"FailFlushTimeCallbacks", "ReleaseAndAbandonGpuContext",
725				"NativeFonts", "GDI", "NoGPUThreads", "DDL1", "DDL3",
726				"DDLRecord", "BonusConfigs", "ColorSpaces", "GL",
727				"SkottieTracing", "SkottieWASM", "GpuTess", "DMSAAStats", "Docker", "PDF",
728				"Puppeteer", "SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile",
729				"WebGL2", "i5", "OldestSupportedSkpVersion", "FakeWGPU", "Protected"}
730			keep := make([]string, 0, len(ec))
731			for _, part := range ec {
732				if !In(part, ignore) {
733					keep = append(keep, part)
734				}
735			}
736			ec = keep
737		}
738		if b.matchOs("Android") {
739			if !In("Android", ec) {
740				ec = append([]string{"Android"}, ec...)
741			}
742			task_os = COMPILE_TASK_NAME_OS_LINUX
743		} else if b.os("ChromeOS") {
744			ec = append([]string{"Chromebook", "GLES"}, ec...)
745			task_os = COMPILE_TASK_NAME_OS_LINUX
746		} else if b.os("iOS") {
747			ec = append([]string{task_os}, ec...)
748			task_os = "Mac"
749		} else if b.matchOs("Win") {
750			task_os = "Win"
751		} else if b.compiler("GCC") {
752			// GCC compiles are now on a Docker container. We use the same OS and
753			// version to compile as to test.
754			ec = append(ec, "Docker")
755		} else if b.matchOs("Debian11") {
756			// We compile using the Debian11 machines in the skolo.
757			task_os = "Debian11"
758		} else if b.matchOs("Ubuntu", "Debian") {
759			task_os = COMPILE_TASK_NAME_OS_LINUX
760		} else if b.matchOs("Mac") {
761			task_os = "Mac"
762		}
763		jobNameMap := map[string]string{
764			"role":          "Build",
765			"os":            task_os,
766			"compiler":      b.parts["compiler"],
767			"target_arch":   b.parts["arch"],
768			"configuration": b.parts["configuration"],
769		}
770		if b.extraConfig("PathKit") {
771			ec = []string{"PathKit"}
772			// We prefer to compile this in the cloud because we have more resources there
773			jobNameMap["os"] = "Debian10"
774		}
775		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
776			if b.cpu() {
777				ec = []string{"CanvasKit_CPU"}
778			} else {
779				ec = []string{"CanvasKit"}
780			}
781			// We prefer to compile this in the cloud because we have more resources there
782			jobNameMap["os"] = "Debian10"
783		}
784		if len(ec) > 0 {
785			jobNameMap["extra_config"] = strings.Join(ec, "_")
786		}
787		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
788		if err != nil {
789			log.Fatal(err)
790		}
791		return name
792	} else if b.role("BuildStats") {
793		return strings.Replace(b.Name, "BuildStats", "Build", 1)
794	} else if b.role("CodeSize") {
795		return codesizeTaskNameRegexp.ReplaceAllString(b.Name, "Build-")
796	} else {
797		return b.Name
798	}
799}
800
801// swarmDimensions generates swarming bot dimensions for the given task.
802func (b *taskBuilder) swarmDimensions() {
803	if b.cfg.SwarmDimensions != nil {
804		dims := b.cfg.SwarmDimensions(b.parts)
805		if dims != nil {
806			b.dimension(dims...)
807			return
808		}
809	}
810	b.defaultSwarmDimensions()
811}
812
813// androidDeviceInfo maps Android models (as in the "model" part of a task) to the device_type and
814// device_os Swarming dimensions.
815var androidDeviceInfos = map[string][]string{
816	"AndroidOne":      {"sprout", "MOB30Q"},
817	"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
818	"GalaxyS9":        {"starlte", "QP1A.190711.020"},     // This is Android10.
819	"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
820	"JioNext":         {"msm8937", "RKQ1.210602.002"},
821	"Mokey":           {"mokey", "UDC_11161052"},
822	"MokeyGo32":       {"mokey_go32", "UQ1A.240105.003.A1_11159138"},
823	"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
824	"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
825	"P30":             {"HWELE", "HUAWEIELE-L29"},
826	"Pixel2XL":        {"taimen", "PPR1.180610.009"},
827	"Pixel3":          {"blueline", "PQ1A.190105.004"},
828	"Pixel3a":         {"sargo", "QP1A.190711.020"},
829	"Pixel4":          {"flame", "RPB2.200611.009"},       // R Preview
830	"Pixel4a":         {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build.
831	"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
832	"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
833	"Pixel6":          {"oriole", "SD1A.210817.037"},
834	"Pixel7":          {"cheetah", "TD1A.221105.002"},
835	"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
836	"Wembley":         {"wembley", "SP2A.220505.008"},
837}
838
839// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
840func (b *taskBuilder) defaultSwarmDimensions() {
841	d := map[string]string{
842		"pool": b.cfg.Pool,
843	}
844	if os, ok := b.parts["os"]; ok {
845		d["os"], ok = map[string]string{
846			"Android":    "Android",
847			"Android12":  "Android",
848			"ChromeOS":   "ChromeOS",
849			"Debian9":    DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
850			"Debian10":   DEFAULT_OS_LINUX_GCE,
851			"Debian11":   DEBIAN_11_OS,
852			"Mac":        DEFAULT_OS_MAC,
853			"Mac10.15.1": "Mac-10.15.1",
854			"Mac10.15.7": "Mac-10.15.7", // Same as 'Mac', but explicit.
855			"Mac11":      "Mac-11.4",
856			"Mac12":      "Mac-12",
857			"Mac13":      "Mac-13",
858			"Mokey":      "Android",
859			"MokeyGo32":  "Android",
860			"Ubuntu18":   "Ubuntu-18.04",
861			"Win":        DEFAULT_OS_WIN,
862			"Win10":      "Windows-10-19045",
863			"Win2019":    DEFAULT_OS_WIN,
864			"iOS":        "iOS-13.3.1",
865		}[os]
866		if !ok {
867			log.Fatalf("Entry %q not found in OS mapping.", os)
868		}
869		if os == "Debian11" && b.extraConfig("Docker") {
870			d["os"] = DEFAULT_OS_LINUX_GCE
871		}
872		if os == "Win10" && b.parts["model"] == "Golo" {
873			// ChOps-owned machines have Windows 10 22H2.
874			d["os"] = "Windows-10-19045"
875		}
876		if b.parts["model"] == "iPhone11" {
877			d["os"] = "iOS-13.6"
878		}
879		if b.parts["model"] == "iPadPro" {
880			d["os"] = "iOS-13.6"
881		}
882	} else {
883		d["os"] = DEFAULT_OS_DEBIAN
884	}
885	if b.role("Test", "Perf") {
886		if b.os("Android") {
887			// For Android, the device type is a better dimension
888			// than CPU or GPU.
889			deviceInfo, ok := androidDeviceInfos[b.parts["model"]]
890			if !ok {
891				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
892			}
893			d["device_type"] = deviceInfo[0]
894			d["device_os"] = deviceInfo[1]
895
896			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
897			// See https://developer.android.com/ndk/guides/hwasan.
898			if b.extraConfig("HWASAN") {
899				d["android_hwasan_build"] = "1"
900			}
901		} else if b.os("Android12") {
902			// For Android, the device type is a better dimension
903			// than CPU or GPU.
904			deviceInfo, ok := map[string][]string{
905				"Pixel5": {"redfin", "SP2A.220305.012"},
906			}[b.parts["model"]]
907			if !ok {
908				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
909			}
910			d["device_type"] = deviceInfo[0]
911			d["device_os"] = deviceInfo[1]
912
913			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
914			// See https://developer.android.com/ndk/guides/hwasan.
915			if b.extraConfig("HWASAN") {
916				d["android_hwasan_build"] = "1"
917			}
918		} else if b.os("iOS") {
919			device, ok := map[string]string{
920				"iPadMini4": "iPad5,1",
921				"iPhone7":   "iPhone9,1",
922				"iPhone8":   "iPhone10,1",
923				"iPhone11":  "iPhone12,1",
924				"iPadPro":   "iPad6,3",
925			}[b.parts["model"]]
926			if !ok {
927				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
928			}
929			d["device_type"] = device
930		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
931			modelMapping, ok := map[string]map[string]string{
932				"AppleM1": {
933					"MacMini9.1": "arm64-64-Apple_M1",
934				},
935				"AppleIntel": {
936					"MacBookPro16.2": "x86-64",
937				},
938				"AVX": {
939					"VMware7.1": "x86-64",
940				},
941				"AVX2": {
942					"GCE":            "x86-64-Haswell_GCE",
943					"MacBookAir7.2":  "x86-64-i5-5350U",
944					"MacBookPro11.5": "x86-64-i7-4870HQ",
945					"MacMini7.1":     "x86-64-i5-4278U",
946					"NUC5i7RYH":      "x86-64-i7-5557U",
947					"NUC9i7QN":       "x86-64-i7-9750H",
948					"NUC11TZi5":      "x86-64-i5-1135G7",
949				},
950				"AVX512": {
951					"GCE":  "x86-64-Skylake_GCE",
952					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
953				},
954				"Rome": {
955					"GCE": "x86-64-AMD_Rome_GCE",
956				},
957				"SwiftShader": {
958					"GCE": "x86-64-Haswell_GCE",
959				},
960			}[b.parts["cpu_or_gpu_value"]]
961			if !ok {
962				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
963			}
964			cpu, ok := modelMapping[b.parts["model"]]
965			if !ok {
966				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
967			}
968			d["cpu"] = cpu
969			if b.model("GCE") && b.matchOs("Debian") {
970				d["os"] = DEFAULT_OS_LINUX_GCE
971			}
972			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
973				d["machine_type"] = MACHINE_TYPE_MEDIUM
974			}
975		} else {
976			// It's a GPU job.
977			if b.matchOs("Win") {
978				gpu, ok := map[string]string{
979					// At some point this might use the device ID, but for now it's like Chromebooks.
980					"GTX660":        "10de:11c0-26.21.14.4120",
981					"GTX960":        "10de:1401-31.0.15.3699",
982					"IntelHD4400":   "8086:0a16-20.19.15.4963",
983					"IntelIris540":  "8086:1926-31.0.101.2115",
984					"IntelIris6100": "8086:162b-20.19.15.4963",
985					"IntelIris655":  "8086:3ea5-26.20.100.7463",
986					"IntelIrisXe":   "8086:9a49-31.0.101.5186",
987					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
988					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
989					"QuadroP400":    "10de:1cb3-31.0.15.5222",
990					"RadeonVega6":   "1002:1636-31.0.14057.5006",
991					"RTX3060":       "10de:2489-31.0.15.3699",
992				}[b.parts["cpu_or_gpu_value"]]
993				if !ok {
994					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
995				}
996				d["gpu"] = gpu
997			} else if b.isLinux() {
998				gpu, ok := map[string]string{
999					// Intel drivers come from CIPD, so no need to specify the version here.
1000					"IntelHD2000":  "8086:0102",
1001					"IntelHD405":   "8086:22b1",
1002					"IntelIris640": "8086:5926",
1003					"QuadroP400":   "10de:1cb3-510.60.02",
1004					"RTX3060":      "10de:2489-470.182.03",
1005					"IntelIrisXe":  "8086:9a49",
1006					"RadeonVega6":  "1002:1636",
1007				}[b.parts["cpu_or_gpu_value"]]
1008				if !ok {
1009					log.Fatalf("Entry %q not found in Ubuntu GPU mapping.", b.parts["cpu_or_gpu_value"])
1010				}
1011				d["gpu"] = gpu
1012
1013				if b.matchOs("Debian11") {
1014					d["os"] = DEBIAN_11_OS
1015				} else if b.matchOs("Debian") {
1016					// The Debian10 machines in the skolo are 10.10, not 10.3.
1017					d["os"] = DEFAULT_OS_DEBIAN
1018				}
1019				if b.parts["cpu_or_gpu_value"] == "IntelIrisXe" {
1020					// The Intel Iris Xe devices are Debian 11.3.
1021					d["os"] = "Debian-bookworm/sid"
1022				}
1023			} else if b.matchOs("Mac") {
1024				gpu, ok := map[string]string{
1025					"AppleM1":       "AppleM1",
1026					"IntelHD6000":   "8086:1626",
1027					"IntelHD615":    "8086:591e",
1028					"IntelIris5100": "8086:0a2e",
1029					"IntelIrisPlus": "8086:8a53",
1030					"RadeonHD8870M": "1002:6821-4.0.20-3.2.8",
1031				}[b.parts["cpu_or_gpu_value"]]
1032				if !ok {
1033					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
1034				}
1035				if gpu == "AppleM1" {
1036					// No GPU dimension yet, but we can constrain by CPU.
1037					d["cpu"] = "arm64-64-Apple_M1"
1038				} else {
1039					d["gpu"] = gpu
1040				}
1041				// We have two different types of MacMini7,1 with the same GPU but different CPUs.
1042				if b.gpu("IntelIris5100") {
1043					if b.extraConfig("i5") {
1044						// If we say "i5", run on our MacMini7,1s in the Skolo:
1045						d["cpu"] = "x86-64-i5-4278U"
1046					} else {
1047						// Otherwise, run on Golo machines, just because that's
1048						// where those jobs have always run. Plus, some of them
1049						// are Perf jobs, which we want to keep consistent.
1050						d["cpu"] = "x86-64-i7-4578U"
1051					}
1052				}
1053			} else if b.os("ChromeOS") {
1054				version, ok := map[string]string{
1055					"IntelUHDGraphics605": "15236.2.0",
1056					"RadeonVega3":         "14233.0.0",
1057					"Adreno618":           "14150.39.0",
1058					"MaliT860":            "14092.77.0",
1059				}[b.parts["cpu_or_gpu_value"]]
1060				if !ok {
1061					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
1062				}
1063				d["gpu"] = b.parts["cpu_or_gpu_value"]
1064				d["release_version"] = version
1065			} else {
1066				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
1067			}
1068		}
1069	} else {
1070		if d["os"] == DEBIAN_11_OS {
1071			// The Debian11 compile machines in the skolo have
1072			// GPUs, but we still use them for compiles also.
1073
1074			// Dodge Raspberry Pis.
1075			d["cpu"] = "x86-64"
1076			// Target the RTX3060 Intel machines, as they are beefy and we have
1077			// 20 of them, and they are setup to compile.
1078			d["gpu"] = "10de:2489"
1079		} else {
1080			d["gpu"] = "none"
1081		}
1082		if d["os"] == DEFAULT_OS_LINUX_GCE {
1083			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats", "CodeSize") {
1084				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1085				return
1086			}
1087			// Use many-core machines for Build tasks.
1088			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1089			return
1090		} else if d["os"] == DEFAULT_OS_WIN {
1091			// Windows CPU bots.
1092			d["cpu"] = "x86-64-Haswell_GCE"
1093			// Use many-core machines for Build tasks.
1094			d["machine_type"] = MACHINE_TYPE_LARGE
1095		} else if d["os"] == DEFAULT_OS_MAC {
1096			// Mac CPU bots are no longer VMs.
1097			d["cpu"] = "x86-64"
1098			d["cores"] = "12"
1099			delete(d, "gpu")
1100		}
1101	}
1102
1103	dims := make([]string, 0, len(d))
1104	for k, v := range d {
1105		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
1106	}
1107	sort.Strings(dims)
1108	b.dimension(dims...)
1109}
1110
1111// bundleRecipes generates the task to bundle and isolate the recipes. Returns
1112// the name of the task, which may be added as a dependency.
1113func (b *jobBuilder) bundleRecipes() string {
1114	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
1115		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
1116		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
1117		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1118		b.idempotent()
1119		b.cas(CAS_RECIPES)
1120		b.usesPython()
1121		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1122	})
1123	return BUNDLE_RECIPES_NAME
1124}
1125
1126// buildTaskDrivers generates the task to compile the task driver code to run on
1127// all platforms. Returns the name of the task, which may be added as a
1128// dependency.
1129func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
1130	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
1131	b.addTask(name, func(b *taskBuilder) {
1132		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
1133			specs.PLACEHOLDER_ISOLATED_OUTDIR,
1134			goos+"_"+goarch)
1135		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1136		b.usesBazel("linux_x64")
1137		b.idempotent()
1138		b.cas(CAS_TASK_DRIVERS)
1139	})
1140	return name
1141}
1142
1143// createDockerImage creates the specified docker image. Returns the name of the
1144// generated task.
1145func (b *jobBuilder) createDockerImage(wasm bool) string {
1146	// First, derive the name of the task.
1147	imageName := "skia-release"
1148	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1149	if wasm {
1150		imageName = "skia-wasm-release"
1151		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1152	}
1153	imageDir := path.Join("docker", imageName)
1154
1155	// Add the task.
1156	b.addTask(taskName, func(b *taskBuilder) {
1157		// TODO(borenet): Make this task not use Git.
1158		b.usesGit()
1159		b.cmd(
1160			"./build_push_docker_image",
1161			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1162			"--dockerfile_dir", imageDir,
1163			"--project_id", "skia-swarming-bots",
1164			"--task_id", specs.PLACEHOLDER_TASK_ID,
1165			"--task_name", b.Name,
1166			"--workdir", ".",
1167			"--gerrit_project", "skia",
1168			"--gerrit_url", "https://skia-review.googlesource.com",
1169			"--repo", specs.PLACEHOLDER_REPO,
1170			"--revision", specs.PLACEHOLDER_REVISION,
1171			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1172			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1173			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1174			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1175		)
1176		b.dep(b.buildTaskDrivers("linux", "amd64"))
1177		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1178		b.cas(CAS_EMPTY)
1179		b.serviceAccount(b.cfg.ServiceAccountCompile)
1180		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1181		b.usesDocker()
1182		b.cache(CACHES_DOCKER...)
1183		b.timeout(time.Hour)
1184	})
1185	return taskName
1186}
1187
1188// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1189// (eg: fiddler, api) using the skia-release docker image.
1190func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1191	b.addTask(b.Name, func(b *taskBuilder) {
1192		// TODO(borenet): Make this task not use Git.
1193		b.usesGit()
1194		b.cmd(
1195			"./push_apps_from_skia_image",
1196			"--project_id", "skia-swarming-bots",
1197			"--task_id", specs.PLACEHOLDER_TASK_ID,
1198			"--task_name", b.Name,
1199			"--workdir", ".",
1200			"--repo", specs.PLACEHOLDER_REPO,
1201			"--revision", specs.PLACEHOLDER_REVISION,
1202			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1203			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1204			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1205			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1206		)
1207		b.dep(b.buildTaskDrivers("linux", "amd64"))
1208		b.dep(b.createDockerImage(false))
1209		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1210		b.cas(CAS_EMPTY)
1211		b.usesBazel("linux_x64")
1212		b.serviceAccount(b.cfg.ServiceAccountCompile)
1213		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1214		b.usesDocker()
1215		b.cache(CACHES_DOCKER...)
1216		b.timeout(2 * time.Hour)
1217	})
1218}
1219
1220// createPushBazelAppsFromWASMDockerImage pushes those infra apps that have been ported to Bazel
1221// and require assets built in the WASM docker image.
1222// TODO(kjlubick) The inputs to this job should not be the docker build, but a Bazel build.
1223func (b *jobBuilder) createPushBazelAppsFromWASMDockerImage() {
1224	b.addTask(b.Name, func(b *taskBuilder) {
1225		// TODO(borenet): Make this task not use Git.
1226		b.usesGit()
1227		b.cmd(
1228			"--project_id", "skia-swarming-bots",
1229			"--task_id", specs.PLACEHOLDER_TASK_ID,
1230			"--task_name", b.Name,
1231			"--workdir", ".",
1232			"--skia_revision", specs.PLACEHOLDER_REVISION,
1233			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1234		)
1235		b.dep(b.buildTaskDrivers("linux", "amd64"))
1236		b.dep(b.createDockerImage(true))
1237		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1238		b.cas(CAS_EMPTY)
1239		b.usesBazel("linux_x64")
1240		b.serviceAccount(b.cfg.ServiceAccountCompile)
1241		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1242		b.usesDocker()
1243		b.cache(CACHES_DOCKER...)
1244	})
1245}
1246
1247var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1248
1249func (b *taskBuilder) maybeAddIosDevImage() {
1250	for _, dim := range b.Spec.Dimensions {
1251		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1252			var asset string
1253			switch m[1] {
1254			// Other patch versions can be added to the same case.
1255			case "11.4.1":
1256				asset = "ios-dev-image-11.4"
1257			case "13.3.1":
1258				asset = "ios-dev-image-13.3"
1259			case "13.4.1":
1260				asset = "ios-dev-image-13.4"
1261			case "13.5.1":
1262				asset = "ios-dev-image-13.5"
1263			case "13.6":
1264				asset = "ios-dev-image-13.6"
1265			default:
1266				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1267			}
1268			b.asset(asset)
1269			break
1270		} else if strings.Contains(dim, "iOS") {
1271			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1272		}
1273	}
1274}
1275
1276// compile generates a compile task. Returns the name of the compile task.
1277func (b *jobBuilder) compile() string {
1278	name := b.deriveCompileTaskName()
1279	if b.extraConfig("WasmGMTests") {
1280		b.compileWasmGMTests(name)
1281	} else {
1282		b.addTask(name, func(b *taskBuilder) {
1283			recipe := "compile"
1284			casSpec := CAS_COMPILE
1285			if b.extraConfig("NoDEPS", "CMake", "Flutter", "NoPatch", "Vello", "Fontations") {
1286				recipe = "sync_and_compile"
1287				casSpec = CAS_RUN_RECIPE
1288				b.recipeProps(EXTRA_PROPS)
1289				b.usesGit()
1290				if !b.extraConfig("NoDEPS") {
1291					b.cache(CACHES_WORKDIR...)
1292				}
1293			} else {
1294				b.idempotent()
1295			}
1296			if b.extraConfig("NoPatch") {
1297				b.kitchenTask(recipe, OUTPUT_BUILD_NOPATCH)
1298			} else {
1299				b.kitchenTask(recipe, OUTPUT_BUILD)
1300			}
1301			b.cas(casSpec)
1302			b.serviceAccount(b.cfg.ServiceAccountCompile)
1303			b.swarmDimensions()
1304			if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") {
1305				b.usesDocker()
1306				b.cache(CACHES_DOCKER...)
1307			}
1308			if b.extraConfig("Dawn") {
1309				// https://dawn.googlesource.com/dawn/+/516701da8184655a47c92a573cc84da7db5e69d4/generator/dawn_version_generator.py#21
1310				b.usesGit()
1311			}
1312
1313			// Android bots require a toolchain.
1314			if b.extraConfig("Android") {
1315				if b.matchOs("Mac") {
1316					b.asset("android_ndk_darwin")
1317				} else if b.matchOs("Win") {
1318					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1319					pkg.Path = "n"
1320					b.cipd(pkg)
1321				} else {
1322					b.asset("android_ndk_linux")
1323				}
1324			} else if b.extraConfig("Chromebook") {
1325				b.asset("clang_linux")
1326				if b.arch("x86_64") {
1327					b.asset("chromebook_x86_64_gles")
1328				} else if b.arch("arm") {
1329					b.asset("armhf_sysroot")
1330					b.asset("chromebook_arm_gles")
1331				}
1332			} else if b.isLinux() {
1333				if b.compiler("Clang") {
1334					b.asset("clang_linux")
1335				}
1336				if b.extraConfig("SwiftShader") {
1337					b.asset("cmake_linux")
1338				}
1339				b.asset("ccache_linux")
1340				b.usesCCache()
1341				if b.extraConfig("Vello") || b.extraConfig("Fontations") {
1342					b.usesBazel("linux_x64")
1343					b.attempts(1)
1344				}
1345			} else if b.matchOs("Win") {
1346				b.asset("win_toolchain")
1347				if b.compiler("Clang") {
1348					b.asset("clang_win")
1349				}
1350				if b.extraConfig("DWriteCore") {
1351					b.asset("dwritecore")
1352				}
1353			} else if b.matchOs("Mac") {
1354				b.cipd(CIPD_PKGS_XCODE...)
1355				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1356					Name: "xcode",
1357					Path: "cache/Xcode.app",
1358				})
1359				b.asset("ccache_mac")
1360				b.usesCCache()
1361				if b.extraConfig("iOS") {
1362					b.asset("provisioning_profile_ios")
1363				}
1364				if b.extraConfig("Vello") || b.extraConfig("Fontations") {
1365					// All of our current Mac compile machines are x64 Mac only.
1366					b.usesBazel("mac_x64")
1367					b.attempts(1)
1368				}
1369			}
1370		})
1371	}
1372
1373	// All compile tasks are runnable as their own Job. Assert that the Job
1374	// is listed in jobs.
1375	if !In(name, b.jobs) {
1376		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1377	}
1378
1379	return name
1380}
1381
1382// recreateSKPs generates a RecreateSKPs task.
1383func (b *jobBuilder) recreateSKPs() {
1384	cmd := []string{
1385		"./recreate_skps",
1386		"--local=false",
1387		"--project_id", "skia-swarming-bots",
1388		"--task_id", specs.PLACEHOLDER_TASK_ID,
1389		"--task_name", b.Name,
1390		"--skia_revision", specs.PLACEHOLDER_REVISION,
1391		"--patch_ref", specs.PLACEHOLDER_PATCH_REF,
1392		"--git_cache", "cache/git",
1393		"--checkout_root", "cache/work",
1394		"--dm_path", "build/dm",
1395	}
1396	if b.matchExtraConfig("DryRun") {
1397		cmd = append(cmd, "--dry_run")
1398	}
1399	b.addTask(b.Name, func(b *taskBuilder) {
1400		b.cas(CAS_RECREATE_SKPS)
1401		b.dep(b.buildTaskDrivers("linux", "amd64"))
1402		b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM.
1403		b.cmd(cmd...)
1404		b.cipd(CIPD_PKG_LUCI_AUTH)
1405		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1406		b.dimension(
1407			"pool:SkiaCT",
1408			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1409		)
1410		b.usesGo()
1411		b.cache(CACHES_WORKDIR...)
1412		b.timeout(6 * time.Hour)
1413		b.usesPython()
1414		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1415		b.attempts(2)
1416	})
1417}
1418
1419// checkGeneratedFiles verifies that no generated SKSL files have been edited by hand, and that
1420// we do not get any diffs after regenerating all files (go generate, Gazelle, etc.).
1421func (b *jobBuilder) checkGeneratedFiles() {
1422	b.addTask(b.Name, func(b *taskBuilder) {
1423		b.cas(CAS_BAZEL)
1424		b.dep(b.buildTaskDrivers("linux", "amd64"))
1425		b.cmd("./check_generated_files",
1426			"--local=false",
1427			"--git_path=cipd_bin_packages/git",
1428			"--project_id", "skia-swarming-bots",
1429			"--task_id", specs.PLACEHOLDER_TASK_ID,
1430			"--task_name", b.Name,
1431			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1432			"--bazel_arg=--config=for_linux_x64_with_rbe",
1433			"--bazel_arg=--jobs=100",
1434		)
1435		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
1436		b.usesBazel("linux_x64")
1437		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1438		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1439	})
1440}
1441
1442// goLinters runs various Go linters (gofmt, errcheck, etc.) and fails if there are any errors or
1443// diffs.
1444func (b *jobBuilder) goLinters() {
1445	b.addTask(b.Name, func(b *taskBuilder) {
1446		b.cas(CAS_BAZEL)
1447		b.dep(b.buildTaskDrivers("linux", "amd64"))
1448		b.cmd("./go_linters",
1449			"--local=false",
1450			"--git_path=cipd_bin_packages/git",
1451			"--project_id", "skia-swarming-bots",
1452			"--task_id", specs.PLACEHOLDER_TASK_ID,
1453			"--task_name", b.Name,
1454			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1455			"--bazel_arg=--config=for_linux_x64_with_rbe",
1456			"--bazel_arg=--jobs=100",
1457		)
1458		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
1459		b.usesBazel("linux_x64")
1460		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1461		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1462	})
1463}
1464
1465// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1466func (b *jobBuilder) checkGnToBp() {
1467	b.addTask(b.Name, func(b *taskBuilder) {
1468		b.cas(CAS_COMPILE)
1469		b.dep(b.buildTaskDrivers("linux", "amd64"))
1470		b.cmd("./run_gn_to_bp",
1471			"--local=false",
1472			"--project_id", "skia-swarming-bots",
1473			"--task_id", specs.PLACEHOLDER_TASK_ID,
1474			"--task_name", b.Name,
1475		)
1476		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1477		b.usesPython()
1478		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1479	})
1480}
1481
1482// housekeeper generates a Housekeeper task.
1483func (b *jobBuilder) housekeeper() {
1484	b.addTask(b.Name, func(b *taskBuilder) {
1485		b.recipeProps(EXTRA_PROPS)
1486		b.kitchenTask("housekeeper", OUTPUT_NONE)
1487		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1488		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1489		b.usesGit()
1490		b.cache(CACHES_WORKDIR...)
1491	})
1492}
1493
1494// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1495// the name of the last task in the generated chain of tasks, which the Job
1496// should add as a dependency.
1497func (b *jobBuilder) g3FrameworkCanary() {
1498	b.addTask(b.Name, func(b *taskBuilder) {
1499		b.cas(CAS_EMPTY)
1500		b.dep(b.buildTaskDrivers("linux", "amd64"))
1501		b.cmd("./g3_canary",
1502			"--local=false",
1503			"--project_id", "skia-swarming-bots",
1504			"--task_id", specs.PLACEHOLDER_TASK_ID,
1505			"--task_name", b.Name,
1506			"--repo", specs.PLACEHOLDER_REPO,
1507			"--revision", specs.PLACEHOLDER_REVISION,
1508			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1509			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1510			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1511		)
1512		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1513		b.cipd(CIPD_PKG_LUCI_AUTH)
1514		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1515		b.timeout(3 * time.Hour)
1516		b.attempts(1)
1517	})
1518}
1519
1520// infra generates an infra_tests task.
1521func (b *jobBuilder) infra() {
1522	b.addTask(b.Name, func(b *taskBuilder) {
1523		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1524			b.dimension(
1525				// Specify CPU to avoid running builds on bots with a more unique CPU.
1526				"cpu:x86-64-Haswell_GCE",
1527				"gpu:none",
1528				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1529				fmt.Sprintf("os:%s", DEFAULT_OS_WIN),
1530				fmt.Sprintf("pool:%s", b.cfg.Pool),
1531			)
1532		} else {
1533			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1534		}
1535		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1536		b.kitchenTask("infra", OUTPUT_NONE)
1537		b.cas(CAS_WHOLE_REPO)
1538		b.serviceAccount(b.cfg.ServiceAccountCompile)
1539		b.usesGSUtil()
1540		b.idempotent()
1541		b.usesGo()
1542	})
1543}
1544
1545// buildstats generates a builtstats task, which compiles code and generates
1546// statistics about the build.
1547func (b *jobBuilder) buildstats() {
1548	compileTaskName := b.compile()
1549	b.addTask(b.Name, func(b *taskBuilder) {
1550		b.recipeProps(EXTRA_PROPS)
1551		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1552		b.dep(compileTaskName)
1553		b.asset("bloaty")
1554		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1555		b.usesDocker()
1556		b.usesGit()
1557		b.cache(CACHES_WORKDIR...)
1558	})
1559	// Upload release results (for tracking in perf)
1560	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1561	if b.release() && !b.arch("x86_64") {
1562		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1563		depName := b.Name
1564		b.addTask(uploadName, func(b *taskBuilder) {
1565			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1566			b.recipeProps(EXTRA_PROPS)
1567			// TODO(borenet): I'm not sure why the upload task is
1568			// using the BuildStats task name, but I've done this
1569			// to maintain existing behavior.
1570			b.Name = depName
1571			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1572			b.Name = uploadName
1573			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1574			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1575			b.usesGSUtil()
1576			b.dep(depName)
1577		})
1578	}
1579}
1580
1581// codesize generates a codesize task, which takes binary produced by a
1582// compile task, runs Bloaty against it, and uploads the resulting code size
1583// statistics to the GCS bucket belonging to the codesize.skia.org service.
1584func (b *jobBuilder) codesize() {
1585	compileTaskName := b.compile()
1586	compileTaskNameNoPatch := compileTaskName
1587	if b.extraConfig("Android") {
1588		compileTaskNameNoPatch += "_NoPatch" // add a second "extra config"
1589	} else {
1590		compileTaskNameNoPatch += "-NoPatch" // add the only "extra config"
1591	}
1592
1593	bloatyCipdPkg := b.MustGetCipdPackageFromAsset("bloaty")
1594
1595	b.addTask(b.Name, func(b *taskBuilder) {
1596		b.cas(CAS_EMPTY)
1597		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1598		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskNameNoPatch)
1599		cmd := []string{
1600			"./codesize",
1601			"--local=false",
1602			"--project_id", "skia-swarming-bots",
1603			"--task_id", specs.PLACEHOLDER_TASK_ID,
1604			"--task_name", b.Name,
1605			"--compile_task_name", compileTaskName,
1606			"--compile_task_name_no_patch", compileTaskNameNoPatch,
1607			// Note: the binary name cannot contain dashes, otherwise the naming
1608			// schema logic will partition it into multiple parts.
1609			//
1610			// If we ever need to define a CodeSize-* task for a binary with
1611			// dashes in its name (e.g. "my-binary"), a potential workaround is to
1612			// create a mapping from a new, non-dashed binary name (e.g. "my_binary")
1613			// to the actual binary name with dashes. This mapping can be hardcoded
1614			// in this function; no changes to the task driver would be necessary.
1615			"--binary_name", b.parts["binary_name"],
1616			"--bloaty_cipd_version", bloatyCipdPkg.Version,
1617			"--bloaty_binary", "bloaty/bloaty",
1618
1619			"--repo", specs.PLACEHOLDER_REPO,
1620			"--revision", specs.PLACEHOLDER_REVISION,
1621			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1622			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1623			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1624		}
1625		if strings.Contains(compileTaskName, "Android") {
1626			b.asset("android_ndk_linux")
1627			cmd = append(cmd, "--strip_binary",
1628				"android_ndk_linux/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip")
1629		} else {
1630			b.asset("binutils_linux_x64")
1631			cmd = append(cmd, "--strip_binary", "binutils_linux_x64/strip")
1632		}
1633		b.cmd(cmd...)
1634		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1635		b.cache(CACHES_WORKDIR...)
1636		b.cipd(CIPD_PKG_LUCI_AUTH)
1637		b.asset("bloaty")
1638		b.serviceAccount("skia-external-codesize@skia-swarming-bots.iam.gserviceaccount.com")
1639		b.timeout(20 * time.Minute)
1640		b.attempts(1)
1641	})
1642}
1643
1644// doUpload indicates whether the given Job should upload its results.
1645func (b *jobBuilder) doUpload() bool {
1646	for _, s := range b.cfg.NoUpload {
1647		m, err := regexp.MatchString(s, b.Name)
1648		if err != nil {
1649			log.Fatal(err)
1650		}
1651		if m {
1652			return false
1653		}
1654	}
1655	return true
1656}
1657
1658// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1659func (b *taskBuilder) commonTestPerfAssets() {
1660	// Docker-based tests don't need the standard CIPD assets
1661	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1662		return
1663	}
1664	if b.os("Android", "ChromeOS", "iOS") {
1665		b.asset("skp", "svg", "skimage")
1666	} else if b.extraConfig("OldestSupportedSkpVersion") {
1667		b.assetWithVersion("skp", oldestSupportedSkpVersion)
1668	} else {
1669		// for desktop machines
1670		b.asset("skimage", "skp", "svg")
1671	}
1672
1673	if b.isLinux() && b.matchExtraConfig("SAN") {
1674		b.asset("clang_linux")
1675	}
1676
1677	if b.isLinux() {
1678		if b.extraConfig("Vulkan") {
1679			b.asset("linux_vulkan_sdk")
1680		}
1681		if b.matchGpu("Intel") {
1682			if b.matchGpu("IrisXe") {
1683				b.asset("mesa_intel_driver_linux_22")
1684			} else {
1685				// Use this for legacy drivers that were culled in v22 of Mesa.
1686				// https://www.phoronix.com/scan.php?page=news_item&px=Mesa-22.0-Drops-OpenSWR
1687				b.asset("mesa_intel_driver_linux")
1688			}
1689		}
1690	}
1691
1692	if b.matchOs("Win") && b.extraConfig("DWriteCore") {
1693		b.asset("dwritecore")
1694	}
1695}
1696
1697// directUpload adds prerequisites for uploading to GCS.
1698func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1699	b.recipeProp("gs_bucket", gsBucket)
1700	b.serviceAccount(serviceAccount)
1701	b.usesGSUtil()
1702}
1703
1704// dm generates a Test task using dm.
1705func (b *jobBuilder) dm() {
1706	compileTaskName := ""
1707	// LottieWeb doesn't require anything in Skia to be compiled.
1708	if !b.extraConfig("LottieWeb") {
1709		compileTaskName = b.compile()
1710	}
1711	directUpload := false
1712	b.addTask(b.Name, func(b *taskBuilder) {
1713		cas := CAS_TEST
1714		recipe := "test"
1715		if b.extraConfig("PathKit") {
1716			cas = CAS_PATHKIT
1717			recipe = "test_pathkit"
1718			if b.doUpload() {
1719				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1720				directUpload = true
1721			}
1722		} else if b.extraConfig("CanvasKit") {
1723			cas = CAS_CANVASKIT
1724			recipe = "test_canvaskit"
1725			if b.doUpload() {
1726				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1727				directUpload = true
1728			}
1729		} else if b.extraConfig("LottieWeb") {
1730			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1731			// more of the files, especially those brought in via DEPS in the
1732			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1733			// Test.+LottieWeb is that the former pulls in the lottie build via
1734			// npm and the latter always tests at lottie's
1735			// ToT.
1736			cas = CAS_LOTTIE_CI
1737			recipe = "test_lottie_web"
1738			if b.doUpload() {
1739				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1740				directUpload = true
1741			}
1742		} else {
1743			// Default recipe supports direct upload.
1744			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1745			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1746			if b.doUpload() && !b.matchOs("Win") {
1747				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1748				directUpload = true
1749			}
1750		}
1751		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1752		b.recipeProps(EXTRA_PROPS)
1753		iid := b.internalHardwareLabel()
1754		iidStr := ""
1755		if iid != nil {
1756			iidStr = strconv.Itoa(*iid)
1757		}
1758		if recipe == "test" {
1759			b.dmFlags(iidStr)
1760		}
1761		b.kitchenTask(recipe, OUTPUT_TEST)
1762		b.cas(cas)
1763		b.swarmDimensions()
1764		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") {
1765			b.usesDocker()
1766		}
1767		if compileTaskName != "" {
1768			b.dep(compileTaskName)
1769		}
1770		if b.matchOs("Android") && b.extraConfig("ASAN") {
1771			b.asset("android_ndk_linux")
1772		}
1773		if b.extraConfig("NativeFonts") && !b.matchOs("Android") {
1774			b.needsFontsForParagraphTests()
1775		}
1776		if b.extraConfig("Fontations") {
1777			b.cipd(&specs.CipdPackage{
1778				Name:    "chromium/third_party/googlefonts_testdata",
1779				Path:    "googlefonts_testdata",
1780				Version: "version:20230913",
1781			})
1782		}
1783		b.commonTestPerfAssets()
1784		if b.matchExtraConfig("Lottie") {
1785			b.asset("lottie-samples")
1786		}
1787		b.expiration(20 * time.Hour)
1788
1789		b.timeout(4 * time.Hour)
1790		if b.extraConfig("Valgrind") {
1791			b.timeout(9 * time.Hour)
1792			b.expiration(48 * time.Hour)
1793			b.asset("valgrind")
1794			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1795			// to ensure there are always bots free for CQ tasks.
1796			b.dimension("valgrind:1")
1797		} else if b.extraConfig("MSAN") {
1798			b.timeout(9 * time.Hour)
1799		} else if b.arch("x86") && b.debug() {
1800			// skia:6737
1801			b.timeout(6 * time.Hour)
1802		} else if b.matchOs("Mac11") {
1803			b.timeout(30 * time.Minute)
1804		}
1805		b.maybeAddIosDevImage()
1806	})
1807
1808	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1809	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1810	if b.doUpload() && !directUpload {
1811		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1812		depName := b.Name
1813		b.addTask(uploadName, func(b *taskBuilder) {
1814			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1815			b.recipeProps(EXTRA_PROPS)
1816			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1817			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1818			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1819			b.usesGSUtil()
1820			b.dep(depName)
1821		})
1822	}
1823}
1824
1825// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1826// Canary-G3 does not use this path because it is very different from other autorollers.
1827func (b *jobBuilder) canary(rollerName, canaryCQKeyword, targetProjectBaseURL string) {
1828	b.addTask(b.Name, func(b *taskBuilder) {
1829		b.cas(CAS_EMPTY)
1830		b.dep(b.buildTaskDrivers("linux", "amd64"))
1831		b.cmd("./canary",
1832			"--local=false",
1833			"--project_id", "skia-swarming-bots",
1834			"--task_id", specs.PLACEHOLDER_TASK_ID,
1835			"--task_name", b.Name,
1836			"--roller_name", rollerName,
1837			"--cq_keyword", canaryCQKeyword,
1838			"--target_project_base_url", targetProjectBaseURL,
1839			"--repo", specs.PLACEHOLDER_REPO,
1840			"--revision", specs.PLACEHOLDER_REVISION,
1841			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1842			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1843			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1844		)
1845		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1846		b.cipd(CIPD_PKG_LUCI_AUTH)
1847		b.serviceAccount(b.cfg.ServiceAccountCanary)
1848		b.timeout(3 * time.Hour)
1849		b.attempts(1)
1850	})
1851}
1852
1853// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1854// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1855func (b *jobBuilder) puppeteer() {
1856	compileTaskName := b.compile()
1857	b.addTask(b.Name, func(b *taskBuilder) {
1858		b.defaultSwarmDimensions()
1859		b.usesNode()
1860		b.cipd(CIPD_PKG_LUCI_AUTH)
1861		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1862		b.output(OUTPUT_PERF)
1863		b.timeout(60 * time.Minute)
1864		b.cas(CAS_PUPPETEER)
1865		b.serviceAccount(b.cfg.ServiceAccountCompile)
1866
1867		webglversion := "2"
1868		if b.extraConfig("WebGL1") {
1869			webglversion = "1"
1870		}
1871
1872		if b.extraConfig("SkottieFrames") {
1873			b.cmd(
1874				"./perf_puppeteer_skottie_frames",
1875				"--project_id", "skia-swarming-bots",
1876				"--git_hash", specs.PLACEHOLDER_REVISION,
1877				"--task_id", specs.PLACEHOLDER_TASK_ID,
1878				"--task_name", b.Name,
1879				"--canvaskit_bin_path", "./build",
1880				"--lotties_path", "./lotties_with_assets",
1881				"--node_bin_path", "./node/node/bin",
1882				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1883				"--output_path", OUTPUT_PERF,
1884				"--os_trace", b.parts["os"],
1885				"--model_trace", b.parts["model"],
1886				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1887				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1888				"--webgl_version", webglversion, // ignore when running with cpu backend
1889			)
1890			b.needsLottiesWithAssets()
1891		} else if b.extraConfig("RenderSKP") {
1892			b.cmd(
1893				"./perf_puppeteer_render_skps",
1894				"--project_id", "skia-swarming-bots",
1895				"--git_hash", specs.PLACEHOLDER_REVISION,
1896				"--task_id", specs.PLACEHOLDER_TASK_ID,
1897				"--task_name", b.Name,
1898				"--canvaskit_bin_path", "./build",
1899				"--skps_path", "./skp",
1900				"--node_bin_path", "./node/node/bin",
1901				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1902				"--output_path", OUTPUT_PERF,
1903				"--os_trace", b.parts["os"],
1904				"--model_trace", b.parts["model"],
1905				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1906				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1907				"--webgl_version", webglversion,
1908			)
1909			b.asset("skp")
1910		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1911			b.cmd(
1912				"./perf_puppeteer_canvas",
1913				"--project_id", "skia-swarming-bots",
1914				"--git_hash", specs.PLACEHOLDER_REVISION,
1915				"--task_id", specs.PLACEHOLDER_TASK_ID,
1916				"--task_name", b.Name,
1917				"--canvaskit_bin_path", "./build",
1918				"--node_bin_path", "./node/node/bin",
1919				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1920				"--output_path", OUTPUT_PERF,
1921				"--os_trace", b.parts["os"],
1922				"--model_trace", b.parts["model"],
1923				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1924				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1925				"--webgl_version", webglversion,
1926			)
1927			b.asset("skp")
1928		}
1929
1930	})
1931
1932	// Upload results to Perf after.
1933	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1934	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1935	depName := b.Name
1936	b.addTask(uploadName, func(b *taskBuilder) {
1937		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1938		b.recipeProps(EXTRA_PROPS)
1939		// TODO(borenet): I'm not sure why the upload task is
1940		// using the Perf task name, but I've done this to
1941		// maintain existing behavior.
1942		b.Name = depName
1943		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1944		b.Name = uploadName
1945		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1946		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1947		b.usesGSUtil()
1948		b.dep(depName)
1949	})
1950}
1951
1952// perf generates a Perf task.
1953func (b *jobBuilder) perf() {
1954	compileTaskName := ""
1955	// LottieWeb doesn't require anything in Skia to be compiled.
1956	if !b.extraConfig("LottieWeb") {
1957		compileTaskName = b.compile()
1958	}
1959	doUpload := !b.debug() && b.doUpload()
1960	b.addTask(b.Name, func(b *taskBuilder) {
1961		recipe := "perf"
1962		cas := CAS_PERF
1963		if b.extraConfig("PathKit") {
1964			cas = CAS_PATHKIT
1965			recipe = "perf_pathkit"
1966		} else if b.extraConfig("CanvasKit") {
1967			cas = CAS_CANVASKIT
1968			recipe = "perf_canvaskit"
1969		} else if b.extraConfig("SkottieTracing") {
1970			recipe = "perf_skottietrace"
1971		} else if b.extraConfig("SkottieWASM") {
1972			recipe = "perf_skottiewasm_lottieweb"
1973			cas = CAS_SKOTTIE_WASM
1974		} else if b.extraConfig("LottieWeb") {
1975			recipe = "perf_skottiewasm_lottieweb"
1976			cas = CAS_LOTTIE_WEB
1977		}
1978		b.recipeProps(EXTRA_PROPS)
1979		if recipe == "perf" {
1980			b.nanobenchFlags(doUpload)
1981		}
1982		b.kitchenTask(recipe, OUTPUT_PERF)
1983		b.cas(cas)
1984		b.swarmDimensions()
1985		if b.extraConfig("Docker") {
1986			b.usesDocker()
1987		}
1988		if compileTaskName != "" {
1989			b.dep(compileTaskName)
1990		}
1991		b.commonTestPerfAssets()
1992		b.expiration(20 * time.Hour)
1993		b.timeout(4 * time.Hour)
1994
1995		if b.extraConfig("Valgrind") {
1996			b.timeout(9 * time.Hour)
1997			b.expiration(48 * time.Hour)
1998			b.asset("valgrind")
1999			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
2000			// to ensure there are always bots free for CQ tasks.
2001			b.dimension("valgrind:1")
2002		} else if b.extraConfig("MSAN") {
2003			b.timeout(9 * time.Hour)
2004		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
2005			// skia:6737
2006			b.timeout(6 * time.Hour)
2007		} else if b.matchOs("Mac11") {
2008			b.timeout(30 * time.Minute)
2009		}
2010
2011		if b.extraConfig("LottieWeb", "SkottieWASM") {
2012			b.asset("node", "lottie-samples")
2013		} else if b.matchExtraConfig("SkottieTracing") {
2014			b.needsLottiesWithAssets()
2015		} else if b.matchExtraConfig("Skottie") {
2016			b.asset("lottie-samples")
2017		}
2018
2019		if b.matchOs("Android") && b.cpu() {
2020			b.asset("text_blob_traces")
2021		}
2022		b.maybeAddIosDevImage()
2023
2024		iid := b.internalHardwareLabel()
2025		if iid != nil {
2026			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
2027		}
2028	})
2029
2030	// Upload results if necessary.
2031	if doUpload {
2032		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
2033		depName := b.Name
2034		b.addTask(uploadName, func(b *taskBuilder) {
2035			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
2036			b.recipeProps(EXTRA_PROPS)
2037			// TODO(borenet): I'm not sure why the upload task is
2038			// using the Perf task name, but I've done this to
2039			// maintain existing behavior.
2040			b.Name = depName
2041			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
2042			b.Name = uploadName
2043			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
2044			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
2045			b.usesGSUtil()
2046			b.dep(depName)
2047		})
2048	}
2049}
2050
2051// presubmit generates a task which runs the presubmit for this repo.
2052func (b *jobBuilder) presubmit() {
2053	b.addTask(b.Name, func(b *taskBuilder) {
2054		b.recipeProps(map[string]string{
2055			"category":         "cq",
2056			"patch_gerrit_url": "https://skia-review.googlesource.com",
2057			"patch_project":    "skia",
2058			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
2059			"reason":           "CQ",
2060			"repo_name":        "skia",
2061		})
2062		b.recipeProps(EXTRA_PROPS)
2063		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
2064		b.cas(CAS_RUN_RECIPE)
2065		b.serviceAccount(b.cfg.ServiceAccountCompile)
2066		// Use MACHINE_TYPE_LARGE because it seems to save time versus
2067		// MEDIUM and we want presubmit to be fast.
2068		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
2069		b.usesGit()
2070		b.cipd(&specs.CipdPackage{
2071			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
2072			Path:    "recipe_bundle",
2073			Version: "git_revision:bb122cd16700ab80bfcbd494b605dd11d4f5902d",
2074		})
2075	})
2076}
2077
2078// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2079// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2080// former anyway.
2081func (b *jobBuilder) compileWasmGMTests(compileName string) {
2082	b.addTask(compileName, func(b *taskBuilder) {
2083		b.attempts(1)
2084		b.usesDocker()
2085		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2086		b.cipd(CIPD_PKG_LUCI_AUTH)
2087		b.dep(b.buildTaskDrivers("linux", "amd64"))
2088		b.output("wasm_out")
2089		b.timeout(60 * time.Minute)
2090		b.cas(CAS_COMPILE)
2091		b.serviceAccount(b.cfg.ServiceAccountCompile)
2092		b.cache(CACHES_DOCKER...)
2093		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
2094		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
2095		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
2096		// even bother.
2097		b.cmd(
2098			"./compile_wasm_gm_tests",
2099			"--project_id", "skia-swarming-bots",
2100			"--task_id", specs.PLACEHOLDER_TASK_ID,
2101			"--task_name", compileName,
2102			"--out_path", "./wasm_out",
2103			"--skia_path", "./skia",
2104			"--work_path", "./cache/docker/wasm_gm",
2105		)
2106	})
2107}
2108
2109// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2110// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2111// former anyway.
2112func (b *jobBuilder) runWasmGMTests() {
2113	compileTaskName := b.compile()
2114
2115	b.addTask(b.Name, func(b *taskBuilder) {
2116		b.attempts(1)
2117		b.usesNode()
2118		b.swarmDimensions()
2119		b.cipd(CIPD_PKG_LUCI_AUTH)
2120		b.cipd(CIPD_PKGS_GOLDCTL)
2121		b.dep(b.buildTaskDrivers("linux", "amd64"))
2122		b.dep(compileTaskName)
2123		b.timeout(60 * time.Minute)
2124		b.cas(CAS_WASM_GM)
2125		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
2126		b.cmd(
2127			"./run_wasm_gm_tests",
2128			"--project_id", "skia-swarming-bots",
2129			"--task_id", specs.PLACEHOLDER_TASK_ID,
2130			"--task_name", b.Name,
2131			"--test_harness_path", "./tools/run-wasm-gm-tests",
2132			"--built_path", "./wasm_out",
2133			"--node_bin_path", "./node/node/bin",
2134			"--resource_path", "./resources",
2135			"--work_path", "./wasm_gm/work",
2136			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
2137			"--gold_hashes_url", b.cfg.GoldHashesURL,
2138			"--git_commit", specs.PLACEHOLDER_REVISION,
2139			"--changelist_id", specs.PLACEHOLDER_ISSUE,
2140			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
2141			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
2142			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
2143			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
2144			"--gold_key", "alpha_type:Premul",
2145			"--gold_key", "arch:wasm",
2146			"--gold_key", "browser:Chrome",
2147			"--gold_key", "color_depth:8888",
2148			"--gold_key", "config:gles",
2149			"--gold_key", "configuration:Release",
2150			"--gold_key", "cpu_or_gpu_value:QuadroP400",
2151			"--gold_key", "model:Golo",
2152			"--gold_key", "os:Ubuntu18",
2153		)
2154	})
2155}
2156
2157// labelAndSavedOutputDir contains a Bazel label (e.g. //tests:some_test) and a //bazel-bin
2158// subdirectory that should be stored into CAS.
2159type labelAndSavedOutputDir struct {
2160	label          string
2161	savedOutputDir string
2162}
2163
2164// Maps a shorthand version of a label (which can be an arbitrary string) to an absolute Bazel
2165// label or "target pattern" https://bazel.build/docs/build#specifying-build-targets
2166// The reason we need this mapping is because Buildbucket build names cannot have / or : in them.
2167var shorthandToLabel = map[string]labelAndSavedOutputDir{
2168	"all_tests":                  {"//tests:linux_rbe_tests", ""},
2169	"core":                       {"//:core", ""},
2170	"cpu_8888_benchmark_test":    {"//bench:cpu_8888_test", ""},
2171	"cpu_gms":                    {"//gm:cpu_gm_tests", ""},
2172	"full_library":               {"//tools:full_build", ""},
2173	"ganesh_gl":                  {"//:ganesh_gl", ""},
2174	"hello_bazel_world_test":     {"//gm:hello_bazel_world_test", ""},
2175	"modules_canvaskit":          {"//modules/canvaskit:canvaskit", ""},
2176	"modules_canvaskit_js_tests": {"//modules/canvaskit:canvaskit_js_tests", ""},
2177	"skottie_tool_gpu":           {"//modules/skottie:skottie_tool_gpu", ""},
2178
2179	// Note: these paths are relative to the WORKSPACE in //example/external_client
2180	"decode_everything":  {"//:decode_everything", ""},
2181	"path_combiner":      {"//:path_combiner", ""},
2182	"png_decoder":        {"//:png_decoder", ""},
2183	"shape_text":         {"//:shape_text", ""},
2184	"svg_with_harfbuzz":  {"//:svg_with_harfbuzz", ""},
2185	"svg_with_primitive": {"//:svg_with_primitive", ""},
2186	"use_ganesh_gl":      {"//:use_ganesh_gl", ""},
2187	"use_ganesh_vulkan":  {"//:use_ganesh_vulkan", ""},
2188	"use_skresources":    {"//:use_skresources", ""},
2189	"write_text_to_png":  {"//:write_text_to_png", ""},
2190	"write_to_pdf":       {"//:write_to_pdf", ""},
2191
2192	// Currently there is no way to tell Bazel "only test go_test targets", so we must group them
2193	// under a test_suite.
2194	//
2195	// Alternatives:
2196	//
2197	// - Use --test_lang_filters, which currently does not work for non-native rules. See
2198	//   https://github.com/bazelbuild/bazel/issues/12618.
2199	//
2200	// - As suggested in the same GitHub issue, "bazel query 'kind(go_test, //...)'" would normally
2201	//   return the list of labels. However, this fails due to BUILD.bazel files in
2202	//   //third_party/externals and //bazel/external/vello. We could try either fixing those files
2203	//   when possible, or adding them to //.bazelignore (either permanently or temporarily inside a
2204	//   specialized task driver just for Go tests).
2205	//
2206	// - Have Gazelle add a tag to all Go tests: go_test(name = "foo_test", tag = "go", ... ). Then,
2207	//   we can use a wildcard label such as //... and tell Bazel to only test those targets with
2208	//   said tag, e.g. "bazel test //... --test_tag_filters=go"
2209	//   (https://bazel.build/reference/command-line-reference#flag--test_tag_filters). Today this
2210	//   does not work due to the third party and external BUILD.bazel files mentioned in the
2211	//   previous bullet point.
2212	"all_go_tests": {"//:all_go_tests", ""},
2213
2214	// Android tests that run on a device. We store the //bazel-bin/tests directory into CAS for use
2215	// by subsequent CI tasks.
2216	"android_math_test":               {"//tests:android_math_test", "tests"},
2217	"hello_bazel_world_android_test":  {"//gm:hello_bazel_world_android_test", "gm"},
2218	"cpu_8888_benchmark_android_test": {"//bench:cpu_8888_android_test", "bench"},
2219}
2220
2221// bazelBuild adds a task which builds the specified single-target label (//foo:bar) or
2222// multi-target label (//foo/...) using Bazel. Depending on the host we run this on, we may
2223// specify additional Bazel args to build faster. Optionally, a subset of the //bazel-bin directory
2224// will be stored into CAS for use by subsequent tasks.
2225func (b *jobBuilder) bazelBuild() {
2226	shorthand, config, host := b.parts.bazelBuildParts()
2227	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2228	if !ok {
2229		panic("unsupported Bazel label shorthand " + shorthand)
2230	}
2231
2232	b.addTask(b.Name, func(b *taskBuilder) {
2233		cmd := []string{
2234			"bazel_build_task_driver/bazel_build",
2235			"--project_id=skia-swarming-bots",
2236			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2237			"--task_name=" + b.Name,
2238			"--bazel_label=" + labelAndSavedOutputDir.label,
2239			"--bazel_config=" + config,
2240			"--bazel_cache_dir=" + bazelCacheDirOnGCELinux,
2241			"--workdir=.",
2242		}
2243
2244		if labelAndSavedOutputDir.savedOutputDir != "" {
2245			cmd = append(cmd,
2246				"--out_path="+OUTPUT_BAZEL,
2247				// Which //bazel-bin subdirectory to copy into the output dir (flag --out_path).
2248				"--saved_output_dir="+labelAndSavedOutputDir.savedOutputDir,
2249			)
2250		}
2251
2252		if host == "linux_x64" {
2253			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2254			b.usesBazel("linux_x64")
2255			// Use a built task_driver from CIPD instead of building it from scratch. The
2256			// task_driver should not need to change often, so using a CIPD version should reduce
2257			// build latency.
2258			// TODO(kjlubick) For now, this only has the linux version. We could build the task
2259			//   driver for all hosts that we support running Bazel from in this CIPD package
2260			//   if/when needed.
2261			// TODO(kjlubick,lovisolo) Could we get our task drivers built automatically
2262			// into CIPD instead of this being a manual process?
2263			b.cipd(b.MustGetCipdPackageFromAsset("bazel_build_task_driver"))
2264
2265			if labelAndSavedOutputDir.savedOutputDir != "" {
2266				// We assume that builds which require storing a subset of //bazel-bin to CAS are Android
2267				// builds. We want such builds to use RBE, and we want to download the built top-level
2268				// artifacts. Also, we need the adb_test runner to be cross-compiled to run on a Raspberry
2269				// Pi.
2270				cmd = append(cmd, "--bazel_arg=--config=linux_rbe")
2271				cmd = append(cmd, "--bazel_arg=--jobs=100")
2272				cmd = append(cmd, "--bazel_arg=--remote_download_toplevel")
2273				cmd = append(cmd, "--bazel_arg=--adb_platform=linux_arm64")
2274			} else {
2275				// We want all Linux Bazel Builds to use RBE
2276				cmd = append(cmd, "--bazel_arg=--config=for_linux_x64_with_rbe")
2277				cmd = append(cmd, "--bazel_arg=--jobs=100")
2278				cmd = append(cmd, "--bazel_arg=--remote_download_minimal")
2279			}
2280		} else {
2281			panic("unsupported Bazel host " + host)
2282		}
2283		b.cmd(cmd...)
2284
2285		b.idempotent()
2286		b.cas(CAS_BAZEL)
2287		b.attempts(1)
2288		b.serviceAccount(b.cfg.ServiceAccountCompile)
2289		if labelAndSavedOutputDir.savedOutputDir != "" {
2290			b.output(OUTPUT_BAZEL)
2291		}
2292	})
2293}
2294
2295type precompiledBazelTestKind int
2296
2297const (
2298	precompiledBazelTestNone precompiledBazelTestKind = iota
2299	precompiledBenchmarkTest
2300	precompiledGMTest
2301	precompiledUnitTest
2302)
2303
2304func (b *jobBuilder) bazelTest() {
2305	taskdriverName, shorthand, buildConfig, host, testConfig := b.parts.bazelTestParts()
2306	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2307	if !ok {
2308		panic("unsupported Bazel label shorthand " + shorthand)
2309	}
2310
2311	// Expand task driver name to keep task names short.
2312	precompiledKind := precompiledBazelTestNone
2313	if taskdriverName == "precompiled_benchmark" {
2314		taskdriverName = "bazel_test_precompiled"
2315		precompiledKind = precompiledBenchmarkTest
2316	}
2317	if taskdriverName == "precompiled_gm" {
2318		taskdriverName = "bazel_test_precompiled"
2319		precompiledKind = precompiledGMTest
2320	}
2321	if taskdriverName == "precompiled_test" {
2322		taskdriverName = "bazel_test_precompiled"
2323		precompiledKind = precompiledUnitTest
2324	}
2325	if taskdriverName == "gm" {
2326		taskdriverName = "bazel_test_gm"
2327	}
2328	if taskdriverName == "benchmark" {
2329		taskdriverName = "bazel_test_benchmark"
2330	}
2331
2332	var deviceSpecificBazelConfig *device_specific_configs.Config
2333	if testConfig != "" {
2334		if config, ok := device_specific_configs.Configs[testConfig]; ok {
2335			deviceSpecificBazelConfig = &config
2336		} else {
2337			panic(fmt.Sprintf("Unknown device-specific Bazel config: %q", testConfig))
2338		}
2339	}
2340
2341	bazelCacheDir := bazelCacheDirOnGCELinux
2342	if deviceSpecificBazelConfig != nil && deviceSpecificBazelConfig.Keys["model"] != "GCE" {
2343		bazelCacheDir = bazelCacheDirOnSkoloLinux
2344	}
2345
2346	b.addTask(b.Name, func(b *taskBuilder) {
2347		cmd := []string{"./" + taskdriverName,
2348			"--project_id=skia-swarming-bots",
2349			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2350			"--task_name=" + b.Name,
2351			"--workdir=.",
2352		}
2353
2354		switch taskdriverName {
2355		case "canvaskit_gold":
2356			cmd = append(cmd,
2357				"--bazel_label="+labelAndSavedOutputDir.label,
2358				"--bazel_config="+buildConfig,
2359				"--bazel_cache_dir="+bazelCacheDir,
2360				"--goldctl_path=./cipd_bin_packages/goldctl",
2361				"--git_commit="+specs.PLACEHOLDER_REVISION,
2362				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2363				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2364				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2365			b.cipd(CIPD_PKGS_GOLDCTL)
2366			switch buildConfig {
2367			case "ck_full_cpu_release_chrome":
2368				cmd = append(cmd, "--cpu_or_gpu=CPU", "--cpu_or_gpu_value=CPU",
2369					"--compilation_mode=Release", "--browser=Chrome")
2370			case "ck_full_webgl2_release_chrome":
2371				cmd = append(cmd, "--cpu_or_gpu=GPU", "--cpu_or_gpu_value=WebGL2",
2372					"--compilation_mode=Release", "--browser=Chrome")
2373			default:
2374				panic("Gold keys not specified for config " + buildConfig)
2375			}
2376
2377		case "cpu_tests":
2378			cmd = append(cmd,
2379				"--bazel_label="+labelAndSavedOutputDir.label,
2380				"--bazel_config="+buildConfig,
2381				"--bazel_cache_dir="+bazelCacheDir)
2382
2383		case "toolchain_layering_check":
2384			cmd = append(cmd,
2385				"--bazel_label="+labelAndSavedOutputDir.label,
2386				"--bazel_config="+buildConfig,
2387				"--bazel_cache_dir="+bazelCacheDir)
2388
2389		case "bazel_test_precompiled":
2390			// Compute the file name of the test based on its Bazel label. The file name will be relative to
2391			// the bazel-bin directory, which we receive a subset of as a CAS input.
2392			command := strings.ReplaceAll(labelAndSavedOutputDir.label, "//", "")
2393			command = strings.ReplaceAll(command, ":", "/")
2394			command = path.Join(OUTPUT_BAZEL, command)
2395
2396			// The test's working directory will be its runfiles directory, which simulates the behavior of
2397			// the "bazel run" command.
2398			commandWorkDir := path.Join(command+".runfiles", "skia")
2399
2400			cmd = append(cmd,
2401				"--command="+command,
2402				"--command_workdir="+commandWorkDir)
2403
2404			switch precompiledKind {
2405			case precompiledBenchmarkTest:
2406				cmd = append(cmd,
2407					"--kind=benchmark",
2408					"--git_commit="+specs.PLACEHOLDER_REVISION,
2409					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2410					"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2411
2412			case precompiledGMTest:
2413				cmd = append(cmd,
2414					"--kind=gm",
2415					"--bazel_label="+labelAndSavedOutputDir.label,
2416					"--goldctl_path=./cipd_bin_packages/goldctl",
2417					"--git_commit="+specs.PLACEHOLDER_REVISION,
2418					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2419					"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2420					"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2421				b.cipd(CIPD_PKGS_GOLDCTL)
2422
2423			case precompiledUnitTest:
2424				cmd = append(cmd, "--kind=unit")
2425
2426			default:
2427				panic(fmt.Sprintf("Unknown precompiled test kind: %v", precompiledKind))
2428			}
2429
2430		case "bazel_test_gm":
2431			cmd = append(cmd,
2432				"--bazel_label="+labelAndSavedOutputDir.label,
2433				"--bazel_config="+buildConfig,
2434				"--bazel_cache_dir="+bazelCacheDir,
2435				"--goldctl_path=./cipd_bin_packages/goldctl",
2436				"--git_commit="+specs.PLACEHOLDER_REVISION,
2437				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2438				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2439				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2440			b.cipd(CIPD_PKGS_GOLDCTL)
2441
2442		case "bazel_test_benchmark":
2443			// Note that these tasks run on Skolo machines.
2444			cmd = append(cmd,
2445				"--bazel_label="+labelAndSavedOutputDir.label,
2446				"--bazel_config="+buildConfig,
2447				"--bazel_cache_dir="+bazelCacheDirOnSkoloLinux,
2448				"--git_commit="+specs.PLACEHOLDER_REVISION,
2449				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2450				"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2451
2452		case "external_client":
2453			cmd = append(cmd,
2454				"--bazel_label="+labelAndSavedOutputDir.label,
2455				"--path_in_skia=example/external_client",
2456				"--bazel_cache_dir="+bazelCacheDir)
2457			b.usesDocker()
2458
2459		default:
2460			panic("Unsupported Bazel taskdriver " + taskdriverName)
2461		}
2462
2463		if deviceSpecificBazelConfig != nil {
2464			cmd = append(cmd, "--device_specific_bazel_config="+deviceSpecificBazelConfig.Name)
2465		}
2466
2467		if host == "linux_x64" {
2468			b.dep(b.buildTaskDrivers("linux", "amd64"))
2469			b.usesBazel("linux_x64")
2470		} else if host == "linux_arm64" || host == "on_rpi" {
2471			b.dep(b.buildTaskDrivers("linux", "arm64"))
2472			// The RPIs do not run Bazel directly, they have precompiled binary
2473			// to run instead.
2474		} else {
2475			panic("unsupported Bazel host " + host)
2476		}
2477
2478		if taskdriverName == "bazel_test_gm" ||
2479			taskdriverName == "bazel_test_benchmark" ||
2480			taskdriverName == "bazel_test_precompiled" {
2481			if taskdriverName == "bazel_test_precompiled" {
2482				// This task precompiles the test and stores it to CAS.
2483				b.dep(fmt.Sprintf("BazelBuild-%s-%s-linux_x64", shorthand, buildConfig))
2484			}
2485
2486			// Set dimensions.
2487			if deviceSpecificBazelConfig == nil {
2488				log.Fatalf("While processing job %q: task driver %q requires a device-specific Bazel config.", b.Name, taskdriverName)
2489			}
2490			if len(deviceSpecificBazelConfig.SwarmingDimensions) == 0 {
2491				log.Fatalf("While processing job %q: device-specific Bazel config %q does not provide Swarming dimensions.", b.Name, deviceSpecificBazelConfig.Name)
2492			}
2493			var dimensions []string
2494			for name, value := range deviceSpecificBazelConfig.SwarmingDimensions {
2495				dimensions = append(dimensions, fmt.Sprintf("%s:%s", name, value))
2496			}
2497			dimensions = append(dimensions, fmt.Sprintf("pool:%s", b.cfg.Pool))
2498			sort.Strings(dimensions)
2499			b.dimension(dimensions...)
2500		} else {
2501			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2502		}
2503
2504		b.cmd(cmd...)
2505		b.idempotent()
2506		b.cas(CAS_BAZEL)
2507		b.attempts(1)
2508		b.serviceAccount(b.cfg.ServiceAccountCompile)
2509	})
2510}
2511