• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28	"go.skia.org/skia/bazel/device_specific_configs"
29)
30
31const (
32	CAS_BAZEL         = "bazel"
33	CAS_CANVASKIT     = "canvaskit"
34	CAS_COMPILE       = "compile"
35	CAS_EMPTY         = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
36	CAS_LOTTIE_CI     = "lottie-ci"
37	CAS_LOTTIE_WEB    = "lottie-web"
38	CAS_PATHKIT       = "pathkit"
39	CAS_PERF          = "perf"
40	CAS_PUPPETEER     = "puppeteer"
41	CAS_RUN_RECIPE    = "run-recipe"
42	CAS_RECIPES       = "recipes"
43	CAS_RECREATE_SKPS = "recreate-skps"
44	CAS_SKOTTIE_WASM  = "skottie-wasm"
45	CAS_TASK_DRIVERS  = "task-drivers"
46	CAS_TEST          = "test"
47	CAS_WASM_GM       = "wasm-gm"
48	CAS_WHOLE_REPO    = "whole-repo"
49
50	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
51	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
52	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
53	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
54	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
55	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
56	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
57	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
58	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
59	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
60
61	DEBIAN_11_OS                   = "Debian-11.5"
62	DEFAULT_OS_DEBIAN              = "Debian-10.10"
63	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
64	OLD_OS_LINUX_GCE               = "Debian-9.8"
65	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
66	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
67	DEFAULT_OS_MAC                 = "Mac-14.5"
68	DEFAULT_OS_WIN_GCE             = "Windows-Server-17763"
69	UBUNTU_20_04_OS                = "Ubuntu-20.04"
70	UBUNTU_22_04_OS                = "Ubuntu-22.04"
71	UBUNTU_24_04_OS                = "Ubuntu-24.04"
72
73	// Small is a 2-core machine.
74	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
75	MACHINE_TYPE_SMALL = "n1-highmem-2"
76	// Medium is a 16-core machine
77	MACHINE_TYPE_MEDIUM = "n1-standard-16"
78	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
79	// any of our tasks.)
80	MACHINE_TYPE_LARGE = "n1-highcpu-64"
81
82	// Swarming output dirs.
83	OUTPUT_NONE          = "output_ignored" // This will result in outputs not being isolated.
84	OUTPUT_BUILD         = "build"
85	OUTPUT_BUILD_NOPATCH = "build_nopatch"
86	OUTPUT_TEST          = "test"
87	OUTPUT_PERF          = "perf"
88	OUTPUT_BAZEL         = "bazel_output"
89
90	// Name prefix for upload jobs.
91	PREFIX_UPLOAD = "Upload"
92
93	// This will have to kept in sync with the kMin_Version in
94	// src/core/SkPicturePriv.h
95	// See the comment in that file on how to find the version to use here.
96	oldestSupportedSkpVersion = 293
97
98	// bazelCacheDirOnGCELinux is the path where Bazel should write its cache on Linux GCE machines.
99	// The Bazel cache can grow large (>10GB), so this should be in a partition with enough free
100	// space. On Linux GCE machines, the partition mounted at /mnt/pd0 is significantly larger than
101	// the partition mounted at /.
102	bazelCacheDirOnGCELinux = "/mnt/pd0/bazel_cache"
103
104	// bazelCacheDirOnSkoloLinux is like bazelCacheDirOnGCELinux for Skolo Linux machines. Unlike GCE
105	// Linux machines, the partition mounted at / on Skolo Linux machines is large enough. While
106	// using the default Bazel cache path would work, our Bazel task drivers demand an explicit path.
107	// We store the Bazel cache at /home/chrome-bot/bazel_cache rather than on the default location
108	// of /home/chrome-bot/cache/.bazel to make it obvious to someone examining a Skolo machine that
109	// we are overriding the default location.
110	bazelCacheDirOnSkoloLinux = "/home/chrome-bot/bazel_cache"
111
112	// bazelCacheDirOnWindows is like bazelCacheDirOnSkoloLinux. Unlike GCE Linux machines, we only
113	// have a single partition. While using the default cache path would work, our Bazel task
114	// drivers demand an explicit path. We store the Bazel cache at /home/chrome-bot/bazel_cache
115	// rather than on the default location of %APPDATA% to make it obvious to someone examining a
116	// Skolo machine that we are overriding the default location. Note that double-escaping the
117	// path separator is necessary because this string is passed to Bazel via multiple levels of
118	// subprocesses.
119	bazelCacheDirOnWindows = `C:\\Users\\chrome-bot\\bazel_cache`
120)
121
122var (
123	// "Constants"
124
125	// Named caches used by tasks.
126	CACHES_GIT = []*specs.Cache{
127		{
128			Name: "git",
129			Path: "cache/git",
130		},
131		{
132			Name: "git_cache",
133			Path: "cache/git_cache",
134		},
135	}
136	CACHES_GO = []*specs.Cache{
137		{
138			Name: "go_cache",
139			Path: "cache/go_cache",
140		},
141		{
142			Name: "gopath",
143			Path: "cache/gopath",
144		},
145	}
146	CACHES_WORKDIR = []*specs.Cache{
147		{
148			Name: "work",
149			Path: "cache/work",
150		},
151	}
152	CACHES_CCACHE = []*specs.Cache{
153		{
154			Name: "ccache",
155			Path: "cache/ccache",
156		},
157	}
158	// The "docker" cache is used as a persistent working directory for
159	// tasks which use Docker. It is not to be confused with Docker's own
160	// cache, which stores images. We do not currently use a named Swarming
161	// cache for the latter.
162	// TODO(borenet): We should ensure that any task which uses Docker does
163	// not also use the normal "work" cache, to prevent issues like
164	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
165	CACHES_DOCKER = []*specs.Cache{
166		{
167			Name: "docker",
168			Path: "cache/docker",
169		},
170	}
171
172	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
173	// lottie-ci.  This is global so that it can be overridden by other
174	// repositories which import this file.
175	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
176		Root: "..",
177		Paths: []string{
178			"skia/.vpython3",
179			"skia/infra/bots/run_recipe.py",
180			"skia/infra/lottiecap",
181			"skia/tools/lottie-web-perf",
182			"skia/tools/lottiecap",
183		},
184		Excludes: []string{rbe.ExcludeGitDir},
185	}
186
187	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
188	// global so that it can be overridden by other repositories which import
189	// this file.
190	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
191		Root:     "..",
192		Paths:    []string{"skia"},
193		Excludes: []string{rbe.ExcludeGitDir},
194	}
195
196	// TODO(borenet): This hacky and bad.
197	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
198
199	CIPD_PKGS_GOLDCTL = cipd.MustGetPackage("skia/tools/goldctl/${platform}")
200
201	CIPD_PKGS_XCODE = []*specs.CipdPackage{
202		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
203		// This package is really just an installer for XCode.
204		{
205			Name: "infra/tools/mac_toolchain/${platform}",
206			Path: "mac_toolchain",
207			// When this is updated, also update
208			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
209			// and
210			// https://skia.googlesource.com/skia.git/+/main/infra/bots/recipe_modules/xcode/api.py#38
211			Version: "git_revision:0cb1e51344de158f72524c384f324465aebbcef2",
212		},
213	}
214
215	// These properties are required by some tasks, eg. for running
216	// bot_update, but they prevent de-duplication, so they should only be
217	// used where necessary.
218	EXTRA_PROPS = map[string]string{
219		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
220		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
221		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
222		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
223		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
224		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
225		"repository":           specs.PLACEHOLDER_REPO,
226		"revision":             specs.PLACEHOLDER_REVISION,
227		"task_id":              specs.PLACEHOLDER_TASK_ID,
228	}
229
230	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
231	// for how the CIPD package should be installed for a given task.
232	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
233		"gcloud_linux": {
234			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
235			path:           "gcloud_linux",
236		},
237		"skimage": {
238			uploadTaskName: ISOLATE_SKIMAGE_NAME,
239			path:           "skimage",
240		},
241		"skp": {
242			uploadTaskName: ISOLATE_SKP_NAME,
243			path:           "skp",
244		},
245		"svg": {
246			uploadTaskName: ISOLATE_SVG_NAME,
247			path:           "svg",
248		},
249		"mskp": {
250			uploadTaskName: ISOLATE_MSKP_NAME,
251			path:           "mskp",
252		},
253		"android_ndk_linux": {
254			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
255			path:           "android_ndk_linux",
256		},
257		"android_sdk_linux": {
258			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
259			path:           "android_sdk_linux",
260		},
261		"win_toolchain": {
262			alwaysIsolate:  true,
263			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
264			path:           "win_toolchain",
265		},
266	}
267
268	// Set dontReduceOpsTaskSplitting option on these models
269	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
270		"NUC5PPYH",
271	}
272)
273
274// Config contains general configuration information.
275type Config struct {
276	// Directory containing assets. Assumed to be relative to the directory
277	// which contains the calling gen_tasks.go file. If not specified, uses
278	// the infra/bots/assets from this repo.
279	AssetsDir string `json:"assets_dir"`
280
281	// Path to the builder name schema JSON file. Assumed to be relative to
282	// the directory which contains the calling gen_tasks.go file. If not
283	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
284	// from this repo.
285	BuilderNameSchemaFile string `json:"builder_name_schema"`
286
287	// URL of the Skia Gold known hashes endpoint.
288	GoldHashesURL string `json:"gold_hashes_url"`
289
290	// GCS bucket used for GM results.
291	GsBucketGm string `json:"gs_bucket_gm"`
292
293	// GCS bucket used for Nanobench results.
294	GsBucketNano string `json:"gs_bucket_nano"`
295
296	// Optional function which returns a bot ID for internal devices.
297	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
298
299	// List of task names for which we'll never upload results.
300	NoUpload []string `json:"no_upload"`
301
302	// PathToSkia is the relative path from the root of the current checkout to
303	// the root of the Skia checkout.
304	PathToSkia string `json:"path_to_skia"`
305
306	// Swarming pool used for triggering tasks.
307	Pool string `json:"pool"`
308
309	// LUCI project associated with this repo.
310	Project string `json:"project"`
311
312	// Service accounts.
313	ServiceAccountCanary       string `json:"service_account_canary"`
314	ServiceAccountCompile      string `json:"service_account_compile"`
315	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
316	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
317	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
318	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
319	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
320
321	// Optional override function which derives Swarming bot dimensions
322	// from parts of task names.
323	SwarmDimensions func(parts map[string]string) []string `json:"-"`
324}
325
326// JobInfo is the type of each entry in the jobs.json file.
327type JobInfo struct {
328	// The name of the job.
329	Name string `json:"name"`
330
331	// The optional CQ config of this job. If the CQ config is missing then the
332	// job will not be added to the CQ of this branch.
333	CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"`
334}
335
336// LoadConfig loads the Config from a cfg.json file which is the sibling of the
337// calling gen_tasks.go file.
338func LoadConfig() *Config {
339	cfgDir := getCallingDirName()
340	var cfg Config
341	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
342	return &cfg
343}
344
345// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
346// caller from needing a dependency on the specs package.
347func CheckoutRoot() string {
348	root, err := specs.GetCheckoutRoot()
349	if err != nil {
350		log.Fatal(err)
351	}
352	return root
353}
354
355// LoadJson loads JSON from the given file and unmarshals it into the given
356// destination.
357func LoadJson(filename string, dest interface{}) {
358	b, err := ioutil.ReadFile(filename)
359	if err != nil {
360		log.Fatalf("Unable to read %q: %s", filename, err)
361	}
362	if err := json.Unmarshal(b, dest); err != nil {
363		log.Fatalf("Unable to parse %q: %s", filename, err)
364	}
365}
366
367// In returns true if |s| is *in* |a| slice.
368// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
369// huge set of additional dependencies added by that package.
370func In(s string, a []string) bool {
371	for _, x := range a {
372		if x == s {
373			return true
374		}
375	}
376	return false
377}
378
379// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
380// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
381// is similarly loaded from a cfg.json file which is the sibling of the calling
382// gen_tasks.go file.
383func GenTasks(cfg *Config) {
384	b := specs.MustNewTasksCfgBuilder()
385
386	// Find the paths to the infra/bots directories in this repo and the
387	// repo of the calling file.
388	relpathTargetDir := getThisDirName()
389	relpathBaseDir := getCallingDirName()
390
391	// Parse jobs.json.
392	var jobsWithInfo []*JobInfo
393	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo)
394	// Create a slice with only job names.
395	jobs := []string{}
396	for _, j := range jobsWithInfo {
397		jobs = append(jobs, j.Name)
398	}
399
400	if cfg == nil {
401		cfg = new(Config)
402		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
403	}
404
405	// Create the JobNameSchema.
406	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
407	if cfg.BuilderNameSchemaFile != "" {
408		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
409	}
410	schema, err := NewJobNameSchema(builderNameSchemaFile)
411	if err != nil {
412		log.Fatal(err)
413	}
414
415	// Set the assets dir.
416	assetsDir := filepath.Join(relpathTargetDir, "assets")
417	if cfg.AssetsDir != "" {
418		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
419	}
420	b.SetAssetsDir(assetsDir)
421
422	// Create Tasks and Jobs.
423	builder := &builder{
424		TasksCfgBuilder: b,
425		cfg:             cfg,
426		jobNameSchema:   schema,
427		jobs:            jobs,
428	}
429	for _, j := range jobsWithInfo {
430		jb := newJobBuilder(builder, j.Name)
431		jb.genTasksForJob()
432		jb.finish()
433
434		// Add the CQ spec if it is a CQ job.
435		if j.CQConfig != nil {
436			b.MustAddCQJob(j.Name, j.CQConfig)
437		}
438	}
439
440	// Create CasSpecs.
441	b.MustAddCasSpec(CAS_BAZEL, &specs.CasSpec{
442		Root: "..",
443		Paths: []string{
444			// Source code.
445			"skia/example",
446			"skia/experimental/rust_png",
447			"skia/include",
448			"skia/modules",
449			"skia/src",
450			"skia/tests",
451			"skia/third_party",
452			"skia/tools",
453			// Needed for tests.
454			"skia/bench", // Needed to run benchmark tests with Bazel.
455			"skia/dm",    // Needed to run tests with Bazel.
456			"skia/gm",    // Needed to run GMs with Bazel.
457			"skia/gn",    // Some Python scripts still live here.
458			"skia/resources",
459			"skia/package.json",
460			"skia/package-lock.json",
461			"skia/DEPS",   // Needed to check generation.
462			"skia/infra",  // Many Go tests and Bazel tools live here.
463			"skia/go.mod", // Needed by Gazelle.
464			"skia/go.sum", // Needed by Gazelle.
465			// Needed to run Bazel.
466			"skia/.bazelignore",
467			"skia/.bazelrc",
468			"skia/.bazelversion",
469			"skia/BUILD.bazel",
470			"skia/LICENSE", // Referred to by default_applicable_licenses
471			"skia/WORKSPACE.bazel",
472			"skia/bazel",
473			"skia/go_repositories.bzl",
474			"skia/requirements.txt",
475			"skia/toolchain",
476		},
477		Excludes: []string{
478			rbe.ExcludeGitDir,
479			"skia/third_party/externals",
480		},
481	})
482	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
483		Root: "..",
484		Paths: []string{
485			"skia/.vpython3",
486			"skia/infra/bots/run_recipe.py",
487			"skia/infra/canvaskit",
488			"skia/modules/canvaskit",
489			"skia/modules/pathkit/perf/perfReporter.js",
490			"skia/modules/pathkit/tests/testReporter.js",
491		},
492		Excludes: []string{rbe.ExcludeGitDir},
493	})
494	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
495	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
496	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
497		Root: "..",
498		Paths: []string{
499			"skia/.vpython3",
500			"skia/infra/bots/run_recipe.py",
501			"skia/tools/lottie-web-perf",
502		},
503		Excludes: []string{rbe.ExcludeGitDir},
504	})
505	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
506		Root: "..",
507		Paths: []string{
508			"skia/.vpython3",
509			"skia/infra/bots/run_recipe.py",
510			"skia/infra/pathkit",
511			"skia/modules/pathkit",
512		},
513		Excludes: []string{rbe.ExcludeGitDir},
514	})
515	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
516		Root: "..",
517		Paths: []string{
518			"skia/.vpython3",
519			"skia/infra/bots/assets",
520			"skia/infra/bots/run_recipe.py",
521			"skia/platform_tools/ios/bin",
522			"skia/resources",
523			"skia/tools/valgrind.supp",
524		},
525		Excludes: []string{rbe.ExcludeGitDir},
526	})
527	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
528		Root: "../skia", // Needed for other repos.
529		Paths: []string{
530			".vpython3",
531			"tools/perf-canvaskit-puppeteer",
532		},
533		Excludes: []string{rbe.ExcludeGitDir},
534	})
535	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
536		Root: "..",
537		Paths: []string{
538			"skia/.vpython3",
539			"skia/infra/config/recipes.cfg",
540			"skia/infra/bots/bundle_recipes.sh",
541			"skia/infra/bots/README.recipes.md",
542			"skia/infra/bots/recipe_modules",
543			"skia/infra/bots/recipes",
544			"skia/infra/bots/recipes.py",
545		},
546		Excludes: []string{rbe.ExcludeGitDir},
547	})
548	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
549		Root: "..",
550		Paths: []string{
551			"skia/.vpython3",
552			"skia/infra/bots/run_recipe.py",
553		},
554		Excludes: []string{rbe.ExcludeGitDir},
555	})
556	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
557		Root: "..",
558		Paths: []string{
559			"skia/.vpython3",
560			"skia/infra/bots/run_recipe.py",
561			"skia/tools/skottie-wasm-perf",
562		},
563		Excludes: []string{rbe.ExcludeGitDir},
564	})
565	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
566		Root: "..",
567		Paths: []string{
568			// Deps needed to use Bazel
569			"skia/.bazelrc",
570			"skia/.bazelversion",
571			"skia/BUILD.bazel",
572			"skia/LICENSE",
573			"skia/WORKSPACE.bazel",
574			"skia/bazel",
575			"skia/go_repositories.bzl",
576			"skia/include/config", // There's a WORKSPACE.bazel in here
577			"skia/requirements.txt",
578			"skia/toolchain",
579			// TODO(kjlubick, lukasza) remove after rust's png crate is updated
580			// and we don't need the patches anymore
581			"skia/experimental/rust_png",
582			// Actually needed to build the task drivers
583			"skia/infra/bots/BUILD.bazel",
584			"skia/infra/bots/build_task_drivers.sh",
585			"skia/infra/bots/task_drivers",
586		},
587		Excludes: []string{rbe.ExcludeGitDir},
588	})
589	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
590		Root: "..",
591		Paths: []string{
592			"skia/.vpython3",
593			"skia/infra/bots/assets",
594			"skia/infra/bots/run_recipe.py",
595			"skia/platform_tools/ios/bin",
596			"skia/resources",
597			"skia/tools/valgrind.supp",
598		},
599		Excludes: []string{rbe.ExcludeGitDir},
600	})
601	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
602		Root: "../skia", // Needed for other repos.
603		Paths: []string{
604			".vpython3",
605			"resources",
606			"tools/run-wasm-gm-tests",
607		},
608		Excludes: []string{rbe.ExcludeGitDir},
609	})
610	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
611	b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{
612		Root: "..",
613		Paths: []string{
614			"skia/.vpython3",
615			"skia/DEPS",
616			"skia/bin/fetch-sk",
617			"skia/infra/bots/assets/skp",
618			"skia/infra/bots/utils.py",
619			"skia/tools/skp",
620		},
621		Excludes: []string{rbe.ExcludeGitDir},
622	})
623	generateCompileCAS(b, cfg)
624
625	builder.MustFinish()
626}
627
628// getThisDirName returns the infra/bots directory which is an ancestor of this
629// file.
630func getThisDirName() string {
631	_, thisFileName, _, ok := runtime.Caller(0)
632	if !ok {
633		log.Fatal("Unable to find path to current file.")
634	}
635	return filepath.Dir(filepath.Dir(thisFileName))
636}
637
638// getCallingDirName returns the infra/bots directory which is an ancestor of
639// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
640// file appears two steps up the stack; do not call from a function which is not
641// directly called by gen_tasks.go.
642func getCallingDirName() string {
643	_, callingFileName, _, ok := runtime.Caller(2)
644	if !ok {
645		log.Fatal("Unable to find path to calling file.")
646	}
647	return filepath.Dir(callingFileName)
648}
649
650// builder is a wrapper for specs.TasksCfgBuilder.
651type builder struct {
652	*specs.TasksCfgBuilder
653	cfg           *Config
654	jobNameSchema *JobNameSchema
655	jobs          []string
656}
657
658// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
659// does by default.
660func marshalJson(data interface{}) string {
661	j, err := json.Marshal(data)
662	if err != nil {
663		log.Fatal(err)
664	}
665	return strings.Replace(string(j), "\\u003c", "<", -1)
666}
667
668// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
669// recipe bundle.
670func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
671	b.usesLUCIAuth()
672	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
673	b.env("RECIPES_USE_PY3", "true")
674	b.envPrefixes("VPYTHON_DEFAULT_SPEC", "skia/.vpython3")
675	b.usesPython()
676	b.recipeProp("swarm_out_dir", outputDir)
677	if outputDir != OUTPUT_NONE {
678		b.output(outputDir)
679	}
680	const python = "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}"
681	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
682	// Most recipes want this isolate; they can override if necessary.
683	b.cas(CAS_RUN_RECIPE)
684	b.timeout(time.Hour)
685	b.Spec.ExtraTags = map[string]string{
686		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
687	}
688
689	// Attempts.
690	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") {
691		// Sanitizers often find non-deterministic issues that retries would hide.
692		b.attempts(1)
693	} else {
694		// Retry by default to hide random bot/hardware failures.
695		b.attempts(2)
696	}
697}
698
699// kitchenTask sets up the task to run a recipe via Kitchen.
700func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
701	b.kitchenTaskNoBundle(recipe, outputDir)
702	b.dep(b.bundleRecipes())
703}
704
705// internalHardwareLabel returns the internal ID for the bot, if any.
706func (b *taskBuilder) internalHardwareLabel() *int {
707	if b.cfg.InternalHardwareLabel != nil {
708		return b.cfg.InternalHardwareLabel(b.parts)
709	}
710	return nil
711}
712
713// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
714func (b *taskBuilder) linuxGceDimensions(machineType string) {
715	b.dimension(
716		// Specify CPU to avoid running builds on bots with a more unique CPU.
717		"cpu:x86-64-Haswell_GCE",
718		"gpu:none",
719		// Currently all Linux GCE tasks run on 16-CPU machines.
720		fmt.Sprintf("machine_type:%s", machineType),
721		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
722		fmt.Sprintf("pool:%s", b.cfg.Pool),
723	)
724}
725
726// codesizeTaskNameRegexp captures the "CodeSize-<binary name>-" prefix of a CodeSize task name.
727var codesizeTaskNameRegexp = regexp.MustCompile("^CodeSize-[a-zA-Z0-9_]+-")
728
729// deriveCompileTaskName returns the name of a compile task based on the given
730// job name.
731func (b *jobBuilder) deriveCompileTaskName() string {
732	if b.role("Test", "Perf") {
733		task_os := b.parts["os"]
734		ec := []string{}
735		if val := b.parts["extra_config"]; val != "" {
736			ec = strings.Split(val, "_")
737			ignore := []string{
738				"AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
739				"FailFlushTimeCallbacks", "ReleaseAndAbandonGpuContext",
740				"NativeFonts", "GDI", "NoGPUThreads", "DDL1", "DDL3",
741				"DDLRecord", "BonusConfigs", "ColorSpaces", "GL",
742				"SkottieTracing", "SkottieWASM", "GpuTess", "DMSAAStats", "Docker", "PDF",
743				"Puppeteer", "SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile",
744				"WebGL2", "i5", "OldestSupportedSkpVersion", "FakeWGPU", "TintIR", "Protected",
745				"AndroidNDKFonts", "Upload", "TestPrecompile"}
746			keep := make([]string, 0, len(ec))
747			for _, part := range ec {
748				if !In(part, ignore) {
749					keep = append(keep, part)
750				}
751			}
752			ec = keep
753		}
754		if b.matchOs("Android") {
755			if !In("Android", ec) {
756				ec = append([]string{"Android"}, ec...)
757			}
758			task_os = COMPILE_TASK_NAME_OS_LINUX
759		} else if b.os("ChromeOS") {
760			ec = append([]string{"Chromebook", "GLES"}, ec...)
761			task_os = COMPILE_TASK_NAME_OS_LINUX
762		} else if b.matchOs("iOS") {
763			ec = append([]string{task_os}, ec...)
764			if b.parts["compiler"] == "Xcode11.4.1" {
765				task_os = "Mac10.15.7"
766			} else {
767				task_os = "Mac"
768			}
769		} else if b.matchOs("Win") {
770			task_os = "Win"
771		} else if b.compiler("GCC") {
772			// GCC compiles are now on a Docker container. We use the same OS and
773			// version to compile as to test.
774			ec = append(ec, "Docker")
775		} else if b.matchOs("Debian11") {
776			// We compile using the Debian11 machines in the skolo.
777			task_os = "Debian11"
778		} else if b.matchOs("Ubuntu", "Debian") {
779			task_os = COMPILE_TASK_NAME_OS_LINUX
780		} else if b.matchOs("Mac") {
781			task_os = "Mac"
782		}
783		jobNameMap := map[string]string{
784			"role":          "Build",
785			"os":            task_os,
786			"compiler":      b.parts["compiler"],
787			"target_arch":   b.parts["arch"],
788			"configuration": b.parts["configuration"],
789		}
790		if b.extraConfig("PathKit") {
791			ec = []string{"PathKit"}
792			// We prefer to compile this in the cloud because we have more resources there
793			jobNameMap["os"] = "Debian10"
794		}
795		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
796			if b.cpu() {
797				ec = []string{"CanvasKit_CPU"}
798			} else {
799				ec = []string{"CanvasKit"}
800			}
801			// We prefer to compile this in the cloud because we have more resources there
802			jobNameMap["os"] = "Debian10"
803		}
804		if len(ec) > 0 {
805			jobNameMap["extra_config"] = strings.Join(ec, "_")
806		}
807		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
808		if err != nil {
809			log.Fatal(err)
810		}
811		return name
812	} else if b.role("BuildStats") {
813		return strings.Replace(b.Name, "BuildStats", "Build", 1)
814	} else if b.role("CodeSize") {
815		return codesizeTaskNameRegexp.ReplaceAllString(b.Name, "Build-")
816	} else {
817		return b.Name
818	}
819}
820
821// swarmDimensions generates swarming bot dimensions for the given task.
822func (b *taskBuilder) swarmDimensions() {
823	if b.cfg.SwarmDimensions != nil {
824		dims := b.cfg.SwarmDimensions(b.parts)
825		if dims != nil {
826			b.dimension(dims...)
827			return
828		}
829	}
830	b.defaultSwarmDimensions()
831}
832
833// androidDeviceInfo maps Android models (as in the "model" part of a task) to the device_type and
834// device_os Swarming dimensions.
835var androidDeviceInfos = map[string][]string{
836	"AndroidOne":      {"sprout", "MOB30Q"},
837	"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
838	"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
839	"GalaxyS24":       {"pineapple", "UP1A.231005.007"},
840	"JioNext":         {"msm8937", "RKQ1.210602.002"},
841	"Mokey":           {"mokey", "UDC_11161052"},
842	"MokeyGo32":       {"mokey_go32", "UQ1A.240105.003.A1_11159138"},
843	"MotoG73":         {"devonf", "U1TNS34.82-12-7-6"},
844	"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
845	"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
846	"P30":             {"HWELE", "HUAWEIELE-L29"},
847	"Pixel3a":         {"sargo", "QP1A.190711.020"},
848	"Pixel4":          {"flame", "RPB2.200611.009"},       // R Preview
849	"Pixel4a":         {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build.
850	"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
851	"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
852	"Pixel6":          {"oriole", "SD1A.210817.037"},
853	"Pixel7":          {"panther", "AP4A.241205.013"},
854	"Pixel7Pro":       {"cheetah", "TD1A.221105.002"},
855	"Pixel9":          {"tokay", "AP4A.241205.013"},
856	"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
857	"Wembley":         {"wembley", "SP2A.220505.008"},
858}
859
860// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
861func (b *taskBuilder) defaultSwarmDimensions() {
862	d := map[string]string{
863		"pool": b.cfg.Pool,
864	}
865	if os, ok := b.parts["os"]; ok {
866		d["os"], ok = map[string]string{
867			"Android":     "Android",
868			"Android12":   "Android",
869			"ChromeOS":    "ChromeOS",
870			"Debian9":     DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
871			"Debian10":    DEFAULT_OS_LINUX_GCE,
872			"Debian11":    DEBIAN_11_OS,
873			"Mac":         DEFAULT_OS_MAC,
874			"Mac10.15.1":  "Mac-10.15.1",
875			"Mac10.15.7":  "Mac-10.15.7",
876			"Mac12":       "Mac-12",
877			"Mac13":       "Mac-13",
878			"Mac14":       "Mac-14.7", // Builds run on 14.5, tests on 14.7.
879			"Mac15":       "Mac-15.3",
880			"Mokey":       "Android",
881			"MokeyGo32":   "Android",
882			"Ubuntu18":    "Ubuntu-18.04",
883			"Ubuntu20.04": UBUNTU_20_04_OS,
884			"Ubuntu22.04": UBUNTU_22_04_OS,
885			"Ubuntu24.04": UBUNTU_24_04_OS,
886			"Win":         DEFAULT_OS_WIN_GCE,
887			"Win10":       "Windows-10-19045",
888			"Win11":       "Windows-11-26100.1742",
889			"Win2019":     DEFAULT_OS_WIN_GCE,
890			"iOS":         "iOS-13.3.1",
891			"iOS18":       "iOS-18.2.1",
892		}[os]
893		if !ok {
894			log.Fatalf("Entry %q not found in OS mapping.", os)
895		}
896		if os == "Debian11" && b.extraConfig("Docker") {
897			d["os"] = DEFAULT_OS_LINUX_GCE
898		}
899		if os == "Win10" && b.parts["model"] == "Golo" {
900			// ChOps-owned machines have Windows 10 22H2.
901			d["os"] = "Windows-10-19045"
902		}
903		if strings.Contains(os, "iOS") {
904			d["pool"] = "SkiaIOS"
905		}
906		if b.parts["model"] == "iPadPro" {
907			d["os"] = "iOS-13.6"
908		}
909	} else {
910		d["os"] = DEFAULT_OS_DEBIAN
911	}
912	if b.role("Test", "Perf") {
913		if b.os("Android") {
914			// For Android, the device type is a better dimension
915			// than CPU or GPU.
916			deviceInfo, ok := androidDeviceInfos[b.parts["model"]]
917			if !ok {
918				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
919			}
920			d["device_type"] = deviceInfo[0]
921			d["device_os"] = deviceInfo[1]
922
923			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
924			// See https://developer.android.com/ndk/guides/hwasan.
925			if b.extraConfig("HWASAN") {
926				d["android_hwasan_build"] = "1"
927			}
928		} else if b.os("Android12") {
929			// For Android, the device type is a better dimension
930			// than CPU or GPU.
931			deviceInfo, ok := map[string][]string{
932				"Pixel5": {"redfin", "SP2A.220305.012"},
933			}[b.parts["model"]]
934			if !ok {
935				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
936			}
937			d["device_type"] = deviceInfo[0]
938			d["device_os"] = deviceInfo[1]
939
940			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
941			// See https://developer.android.com/ndk/guides/hwasan.
942			if b.extraConfig("HWASAN") {
943				d["android_hwasan_build"] = "1"
944			}
945		} else if b.os("ChromeOS") {
946			// TODO(borenet): Make this mapping non-optional after removing the
947			// old devices in the Skia lab.
948			deviceOS, ok := map[string]string{
949				"Cherry":   "16002.30.0",
950				"Guybrush": "16002.27.0",
951				"Octopus":  "16002.21.0",
952				"Trogdor":  "16002.26.0",
953			}[b.parts["model"]]
954			if ok {
955				d["device_os"] = deviceOS
956				d["device_type"] = strings.ToLower(b.parts["model"])
957			}
958		} else if b.matchOs("iOS") {
959			device, ok := map[string]string{
960				"iPadMini4":   "iPad5,1",
961				"iPhone15Pro": "iPhone16,1",
962				"iPhone7":     "iPhone9,1",
963				"iPhone8":     "iPhone10,1",
964				"iPadPro":     "iPad6,3",
965			}[b.parts["model"]]
966			if !ok {
967				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
968			}
969			d["device"] = device
970		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
971			modelMapping, ok := map[string]map[string]string{
972				"AppleM1": {
973					"MacMini9.1": "arm64-64-Apple_M1",
974				},
975				"AppleM3": {
976					"MacBookPro15.3": "arm64-64-Apple_M3",
977				},
978				"AppleIntel": {
979					"MacBookPro15.1": "x86-64",
980					"MacBookPro16.2": "x86-64",
981				},
982				"AVX": {
983					"VMware7.1": "x86-64",
984				},
985				"AVX2": {
986					"GCE":            "x86-64-Haswell_GCE",
987					"Golo":           "x86-64-E3-1230_v5",
988					"MacBookAir7.2":  "x86-64-i5-5350U",
989					"MacBookPro11.5": "x86-64-i7-4870HQ",
990					"MacMini7.1":     "x86-64-i5-4278U",
991					"MacMini8.1":     "x86-64-i7-8700B",
992					"NUC5i7RYH":      "x86-64-i7-5557U",
993					"NUC9i7QN":       "x86-64-i7-9750H",
994					"NUC11TZi5":      "x86-64-i5-1135G7",
995				},
996				"AVX512": {
997					"GCE":  "x86-64-Skylake_GCE",
998					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
999				},
1000				"Rome": {
1001					"GCE": "x86-64-AMD_Rome_GCE",
1002				},
1003				"SwiftShader": {
1004					"GCE": "x86-64-Haswell_GCE",
1005				},
1006			}[b.parts["cpu_or_gpu_value"]]
1007			if !ok {
1008				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
1009			}
1010			cpu, ok := modelMapping[b.parts["model"]]
1011			if !ok {
1012				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
1013			}
1014			d["cpu"] = cpu
1015			if b.model("GCE") && b.matchOs("Debian") {
1016				d["os"] = DEFAULT_OS_LINUX_GCE
1017			}
1018			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
1019				d["machine_type"] = MACHINE_TYPE_MEDIUM
1020			}
1021		} else {
1022			// It's a GPU job.
1023			if b.matchOs("Win") {
1024				gpu, ok := map[string]string{
1025					"GTX1660":       "10de:2184-31.0.15.4601",
1026					"IntelHD4400":   "8086:0a16-20.19.15.4963",
1027					"IntelIris540":  "8086:1926-31.0.101.2115",
1028					"IntelIris6100": "8086:162b-20.19.15.4963",
1029					"IntelIris655":  "8086:3ea5-26.20.100.7463",
1030					"IntelIrisXe":   "8086:9a49-32.0.101.5972",
1031					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
1032					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
1033					"QuadroP400":    "10de:1cb3-31.0.15.5222",
1034					"RadeonVega6":   "1002:1636-31.0.14057.5006",
1035					"RadeonVega8":   "1002:1638-31.0.21916.2",
1036					"RTX3060":       "10de:2489-32.0.15.7270",
1037				}[b.parts["cpu_or_gpu_value"]]
1038				if !ok {
1039					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
1040				}
1041				// TODO(borenet): Remove this block once these machines are all
1042				// migrated.
1043				if b.os("Win10") && b.parts["cpu_or_gpu_value"] == "RTX3060" {
1044					gpu = "10de:2489-32.0.15.6094"
1045				}
1046				d["gpu"] = gpu
1047			} else if b.isLinux() {
1048				gpu, ok := map[string]string{
1049					// Intel drivers come from CIPD, so no need to specify the version here.
1050					"IntelHD2000":  "8086:0102",
1051					"IntelHD405":   "8086:22b1",
1052					"IntelIris640": "8086:5926",
1053					"QuadroP400":   "10de:1cb3-510.60.02",
1054					"RTX3060":      "10de:2489-470.182.03",
1055					"IntelIrisXe":  "8086:9a49",
1056					"RadeonVega6":  "1002:1636",
1057					"RadeonVega8":  "1002:1638-23.2.1",
1058				}[b.parts["cpu_or_gpu_value"]]
1059				if !ok {
1060					log.Fatalf("Entry %q not found in Linux GPU mapping.", b.parts["cpu_or_gpu_value"])
1061				}
1062				d["gpu"] = gpu
1063
1064				if b.matchOs("Debian11") {
1065					d["os"] = DEBIAN_11_OS
1066				} else if b.matchOs("Debian") {
1067					// The Debian10 machines in the skolo are 10.10, not 10.3.
1068					d["os"] = DEFAULT_OS_DEBIAN
1069				}
1070				if b.parts["cpu_or_gpu_value"] == "IntelIrisXe" {
1071					// The Intel Iris Xe devices are Debian 11.3.
1072					d["os"] = "Debian-bookworm/sid"
1073				}
1074			} else if b.matchOs("Mac") {
1075				gpu, ok := map[string]string{
1076					"AppleM1":             "AppleM1",
1077					"AppleM3":             "apple:m3",
1078					"IntelHD6000":         "8086:1626",
1079					"IntelHD615":          "8086:591e",
1080					"IntelIris5100":       "8086:0a2e",
1081					"IntelIrisPlus":       "8086:8a53",
1082					"IntelUHDGraphics630": "8086:3e9b",
1083					"RadeonHD8870M":       "1002:6821-4.0.20-3.2.8",
1084				}[b.parts["cpu_or_gpu_value"]]
1085				if !ok {
1086					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
1087				}
1088				if gpu == "AppleM1" {
1089					// No GPU dimension yet, but we can constrain by CPU.
1090					d["cpu"] = "arm64-64-Apple_M1"
1091				} else {
1092					d["gpu"] = gpu
1093				}
1094				// We have two different types of MacMini7,1 with the same GPU but different CPUs.
1095				if b.gpu("IntelIris5100") {
1096					if b.extraConfig("i5") {
1097						// If we say "i5", run on our MacMini7,1s in the Skolo:
1098						d["cpu"] = "x86-64-i5-4278U"
1099					} else {
1100						// Otherwise, run on Golo machines, just because that's
1101						// where those jobs have always run. Plus, some of them
1102						// are Perf jobs, which we want to keep consistent.
1103						d["cpu"] = "x86-64-i7-4578U"
1104					}
1105				}
1106			} else if b.os("ChromeOS") {
1107				version, ok := map[string]string{
1108					"IntelUHDGraphics605": "15236.2.0",
1109					"RadeonVega3":         "14233.0.0",
1110					"Adreno618":           "14150.39.0",
1111					"MaliT860":            "14092.77.0",
1112				}[b.parts["cpu_or_gpu_value"]]
1113				if !ok {
1114					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
1115				}
1116				d["gpu"] = b.parts["cpu_or_gpu_value"]
1117				d["release_version"] = version
1118			} else {
1119				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
1120			}
1121		}
1122		if b.matchOs("Mac") {
1123			// TODO(borenet): Remove empty and nested entries after all Macs
1124			// are migrated to the new lab.
1125			if macModel, ok := map[string]interface{}{
1126				"MacBookAir7.2":  "",
1127				"MacBookPro11.5": "MacBookPro11,5",
1128				"MacBookPro15.1": "MacBookPro15,1",
1129				"MacBookPro15.3": "Mac15,3",
1130				"MacBookPro16.2": "",
1131				"MacMini7.1":     "",
1132				"MacMini8.1":     "Macmini8,1",
1133				"MacMini9.1": map[string]string{
1134					"Mac12": "",
1135					"Mac13": "",
1136					"Mac14": "Macmini9,1",
1137				},
1138				// TODO(borenet): This is currently resolving to multiple
1139				// different actual device types.
1140				"VMware7.1": "",
1141			}[b.parts["model"]]; ok {
1142				if macModel != "" {
1143					macModelDim, ok := macModel.(string)
1144					if !ok {
1145						macModelDim = macModel.(map[string]string)[b.parts["os"]]
1146					}
1147					if macModelDim != "" {
1148						d["mac_model"] = macModelDim
1149					}
1150				}
1151			} else {
1152				log.Fatalf("No mac_model found for %q", b.parts["model"])
1153			}
1154		}
1155	} else {
1156		d["gpu"] = "none"
1157		if d["os"] == DEFAULT_OS_LINUX_GCE {
1158			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats", "CodeSize") {
1159				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1160				return
1161			}
1162			// Use many-core machines for Build tasks.
1163			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1164			return
1165		} else if d["os"] == DEFAULT_OS_WIN_GCE {
1166			// Windows CPU bots.
1167			d["cpu"] = "x86-64-Haswell_GCE"
1168			// Use many-core machines for Build tasks.
1169			d["machine_type"] = MACHINE_TYPE_LARGE
1170		} else if d["os"] == DEFAULT_OS_MAC || d["os"] == "Mac-10.15.7" {
1171			// Mac CPU bots are no longer VMs.
1172			d["cpu"] = "x86-64"
1173			d["cores"] = "12"
1174			delete(d, "gpu")
1175		}
1176	}
1177
1178	dims := make([]string, 0, len(d))
1179	for k, v := range d {
1180		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
1181	}
1182	sort.Strings(dims)
1183	b.dimension(dims...)
1184}
1185
1186// bundleRecipes generates the task to bundle and isolate the recipes. Returns
1187// the name of the task, which may be added as a dependency.
1188func (b *jobBuilder) bundleRecipes() string {
1189	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
1190		b.usesGit()
1191		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
1192		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1193		b.idempotent()
1194		b.cas(CAS_RECIPES)
1195		b.usesPython()
1196	})
1197	return BUNDLE_RECIPES_NAME
1198}
1199
1200// buildTaskDrivers generates the task to compile the task driver code to run on
1201// all platforms. Returns the name of the task, which may be added as a
1202// dependency.
1203func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
1204	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
1205	b.addTask(name, func(b *taskBuilder) {
1206		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
1207			specs.PLACEHOLDER_ISOLATED_OUTDIR,
1208			goos+"_"+goarch)
1209		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1210		b.usesBazel("linux_x64")
1211		b.idempotent()
1212		b.cas(CAS_TASK_DRIVERS)
1213	})
1214	return name
1215}
1216
1217// createDockerImage creates the specified docker image. Returns the name of the
1218// generated task.
1219func (b *jobBuilder) createDockerImage(wasm bool) string {
1220	// First, derive the name of the task.
1221	imageName := "skia-release"
1222	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1223	if wasm {
1224		imageName = "skia-wasm-release"
1225		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1226	}
1227	imageDir := path.Join("docker", imageName)
1228
1229	// Add the task.
1230	b.addTask(taskName, func(b *taskBuilder) {
1231		// TODO(borenet): Make this task not use Git.
1232		b.usesGit()
1233		b.cmd(
1234			b.taskDriver("build_push_docker_image", false),
1235			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1236			"--dockerfile_dir", imageDir,
1237			"--project_id", "skia-swarming-bots",
1238			"--task_id", specs.PLACEHOLDER_TASK_ID,
1239			"--task_name", b.Name,
1240			"--workdir", ".",
1241			"--gerrit_project", "skia",
1242			"--gerrit_url", "https://skia-review.googlesource.com",
1243			"--repo", specs.PLACEHOLDER_REPO,
1244			"--revision", specs.PLACEHOLDER_REVISION,
1245			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1246			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1247			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1248			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1249		)
1250		b.cas(CAS_EMPTY)
1251		b.serviceAccount(b.cfg.ServiceAccountCompile)
1252		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1253		b.usesDocker()
1254		b.cache(CACHES_DOCKER...)
1255		b.timeout(time.Hour)
1256	})
1257	return taskName
1258}
1259
1260// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1261// (eg: fiddler, api) using the skia-release docker image.
1262func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1263	b.addTask(b.Name, func(b *taskBuilder) {
1264		// TODO(borenet): Make this task not use Git.
1265		b.usesGit()
1266		b.cmd(
1267			b.taskDriver("push_apps_from_skia_image", false),
1268			"--project_id", "skia-swarming-bots",
1269			"--task_id", specs.PLACEHOLDER_TASK_ID,
1270			"--task_name", b.Name,
1271			"--workdir", ".",
1272			"--repo", specs.PLACEHOLDER_REPO,
1273			"--revision", specs.PLACEHOLDER_REVISION,
1274			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1275			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1276			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1277			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1278		)
1279		b.dep(b.createDockerImage(false))
1280		b.cas(CAS_EMPTY)
1281		b.usesBazel("linux_x64")
1282		b.serviceAccount(b.cfg.ServiceAccountCompile)
1283		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1284		b.usesDocker()
1285		b.cache(CACHES_DOCKER...)
1286		b.timeout(2 * time.Hour)
1287	})
1288}
1289
1290var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1291
1292func (b *taskBuilder) maybeAddIosDevImage() {
1293	for _, dim := range b.Spec.Dimensions {
1294		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1295			var asset string
1296			switch m[1] {
1297			// Other patch versions can be added to the same case.
1298			case "11.4.1":
1299				asset = "ios-dev-image-11.4"
1300			case "13.3.1":
1301				asset = "ios-dev-image-13.3"
1302			case "13.4.1":
1303				asset = "ios-dev-image-13.4"
1304			case "13.5.1":
1305				asset = "ios-dev-image-13.5"
1306			case "13.6":
1307				asset = "ios-dev-image-13.6"
1308			case "18.2.1":
1309				// Newer iOS versions don't use a pre-packaged dev image.
1310			default:
1311				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1312			}
1313			if asset != "" {
1314				b.asset(asset)
1315			}
1316			break
1317		} else if strings.Contains(dim, "iOS") {
1318			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1319		}
1320	}
1321}
1322
1323// compile generates a compile task. Returns the name of the compile task.
1324func (b *jobBuilder) compile() string {
1325	name := b.deriveCompileTaskName()
1326	if b.extraConfig("WasmGMTests") {
1327		b.compileWasmGMTests(name)
1328	} else {
1329		b.addTask(name, func(b *taskBuilder) {
1330			recipe := "compile"
1331			casSpec := CAS_COMPILE
1332			if b.extraConfig("NoDEPS", "CMake", "Flutter", "NoPatch") || b.shellsOutToBazel() {
1333				recipe = "sync_and_compile"
1334				casSpec = CAS_RUN_RECIPE
1335				b.recipeProps(EXTRA_PROPS)
1336				b.usesGit()
1337				if !b.extraConfig("NoDEPS") {
1338					b.cache(CACHES_WORKDIR...)
1339				}
1340			} else {
1341				b.idempotent()
1342			}
1343			if b.extraConfig("NoPatch") {
1344				b.kitchenTask(recipe, OUTPUT_BUILD_NOPATCH)
1345			} else {
1346				b.kitchenTask(recipe, OUTPUT_BUILD)
1347			}
1348			b.cas(casSpec)
1349			b.serviceAccount(b.cfg.ServiceAccountCompile)
1350			b.swarmDimensions()
1351			if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") {
1352				b.usesDocker()
1353				b.cache(CACHES_DOCKER...)
1354			}
1355			if b.extraConfig("Dawn") {
1356				// https://dawn.googlesource.com/dawn/+/516701da8184655a47c92a573cc84da7db5e69d4/generator/dawn_version_generator.py#21
1357				b.usesGit()
1358			}
1359
1360			// Android bots require a toolchain.
1361			if b.extraConfig("Android") {
1362				if b.matchOs("Mac") {
1363					b.asset("android_ndk_darwin")
1364				} else if b.matchOs("Win") {
1365					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1366					pkg.Path = "n"
1367					b.cipd(pkg)
1368				} else {
1369					b.asset("android_ndk_linux")
1370				}
1371			} else if b.extraConfig("Chromebook") {
1372				b.asset("clang_linux")
1373				if b.arch("x86_64") {
1374					b.asset("chromebook_x86_64_gles")
1375				} else if b.arch("arm") {
1376					b.asset("armhf_sysroot")
1377					b.asset("chromebook_arm_gles")
1378				} else if b.arch("arm64") {
1379					b.asset("arm64_sysroot")
1380					b.asset("chromebook_arm64_gles")
1381				} else {
1382					panic(fmt.Sprintf("Unknown arch %q for Chromebook", b.parts["arch"]))
1383				}
1384			} else if b.isLinux() {
1385				if b.compiler("Clang") {
1386					b.asset("clang_linux")
1387				}
1388				if b.extraConfig("SwiftShader") {
1389					b.asset("cmake_linux")
1390				}
1391				b.asset("ccache_linux")
1392				b.usesCCache()
1393				if b.shellsOutToBazel() {
1394					b.usesBazel("linux_x64")
1395					b.attempts(1)
1396				}
1397			} else if b.matchOs("Win") {
1398				b.asset("win_toolchain")
1399				if b.compiler("Clang") {
1400					b.asset("clang_win")
1401				}
1402				if b.extraConfig("DWriteCore") {
1403					b.asset("dwritecore")
1404				}
1405			} else if b.matchOs("Mac") {
1406				b.cipd(CIPD_PKGS_XCODE...)
1407				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1408					Name: "xcode",
1409					Path: "cache/Xcode.app",
1410				})
1411				b.asset("ccache_mac")
1412				b.usesCCache()
1413				if b.matchExtraConfig("iOS.*") {
1414					b.asset("provisioning_profile_ios")
1415				}
1416				if b.shellsOutToBazel() {
1417					// All of our current Mac compile machines are x64 Mac only.
1418					b.usesBazel("mac_x64")
1419					b.attempts(1)
1420				}
1421			}
1422		})
1423	}
1424
1425	// All compile tasks are runnable as their own Job. Assert that the Job
1426	// is listed in jobs.
1427	if !In(name, b.jobs) {
1428		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1429	}
1430
1431	return name
1432}
1433
1434// recreateSKPs generates a RecreateSKPs task.
1435func (b *jobBuilder) recreateSKPs() {
1436	b.addTask(b.Name, func(b *taskBuilder) {
1437		cmd := []string{
1438			b.taskDriver("recreate_skps", false),
1439			"--local=false",
1440			"--project_id", "skia-swarming-bots",
1441			"--task_id", specs.PLACEHOLDER_TASK_ID,
1442			"--task_name", b.Name,
1443			"--skia_revision", specs.PLACEHOLDER_REVISION,
1444			"--patch_ref", specs.PLACEHOLDER_PATCH_REF,
1445			"--git_cache", "cache/git",
1446			"--checkout_root", "cache/work",
1447			"--dm_path", "build/dm",
1448		}
1449		if b.matchExtraConfig("DryRun") {
1450			cmd = append(cmd, "--dry_run")
1451		}
1452
1453		b.cas(CAS_RECREATE_SKPS)
1454		b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM.
1455		b.cmd(cmd...)
1456		b.usesLUCIAuth()
1457		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1458		b.dimension(
1459			"pool:SkiaCT",
1460			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1461		)
1462		b.usesGo()
1463		b.cache(CACHES_WORKDIR...)
1464		b.timeout(8 * time.Hour)
1465		b.usesPython()
1466		b.attempts(2)
1467	})
1468}
1469
1470// checkGeneratedFiles verifies that no generated SKSL files have been edited by hand, and that
1471// we do not get any diffs after regenerating all files (go generate, Gazelle, etc.).
1472func (b *jobBuilder) checkGeneratedFiles() {
1473	b.addTask(b.Name, func(b *taskBuilder) {
1474		b.cas(CAS_BAZEL)
1475		b.cmd(
1476			b.taskDriver("check_generated_files", false),
1477			"--local=false",
1478			"--git_path=cipd_bin_packages/git",
1479			"--project_id", "skia-swarming-bots",
1480			"--task_id", specs.PLACEHOLDER_TASK_ID,
1481			"--task_name", b.Name,
1482			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1483			"--bazel_arg=--config=for_linux_x64_with_rbe",
1484			"--bazel_arg=--jobs=100",
1485		)
1486		b.usesBazel("linux_x64")
1487		b.usesGit()
1488		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1489		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1490	})
1491}
1492
1493// goLinters runs various Go linters (gofmt, errcheck, etc.) and fails if there are any errors or
1494// diffs.
1495func (b *jobBuilder) goLinters() {
1496	b.addTask(b.Name, func(b *taskBuilder) {
1497		b.cas(CAS_BAZEL)
1498		b.cmd(
1499			b.taskDriver("go_linters", false),
1500			"--local=false",
1501			"--git_path=cipd_bin_packages/git",
1502			"--project_id", "skia-swarming-bots",
1503			"--task_id", specs.PLACEHOLDER_TASK_ID,
1504			"--task_name", b.Name,
1505			"--bazel_cache_dir", bazelCacheDirOnGCELinux,
1506			"--bazel_arg=--config=for_linux_x64_with_rbe",
1507			"--bazel_arg=--jobs=100",
1508		)
1509		b.usesBazel("linux_x64")
1510		b.usesGit()
1511		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1512		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1513	})
1514}
1515
1516// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1517func (b *jobBuilder) checkGnToBp() {
1518	b.addTask(b.Name, func(b *taskBuilder) {
1519		b.cas(CAS_COMPILE)
1520		b.cmd(
1521			b.taskDriver("run_gn_to_bp", false),
1522			"--local=false",
1523			"--project_id", "skia-swarming-bots",
1524			"--task_id", specs.PLACEHOLDER_TASK_ID,
1525			"--task_name", b.Name,
1526		)
1527		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1528		b.usesPython()
1529		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1530	})
1531}
1532
1533// housekeeper generates a Housekeeper task.
1534func (b *jobBuilder) housekeeper() {
1535	b.addTask(b.Name, func(b *taskBuilder) {
1536		b.recipeProps(EXTRA_PROPS)
1537		b.kitchenTask("housekeeper", OUTPUT_NONE)
1538		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1539		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1540		b.usesGit()
1541		b.cache(CACHES_WORKDIR...)
1542	})
1543}
1544
1545// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1546// the name of the last task in the generated chain of tasks, which the Job
1547// should add as a dependency.
1548func (b *jobBuilder) g3FrameworkCanary() {
1549	b.addTask(b.Name, func(b *taskBuilder) {
1550		b.cas(CAS_EMPTY)
1551		b.cmd(
1552			b.taskDriver("g3_canary", false),
1553			"--local=false",
1554			"--project_id", "skia-swarming-bots",
1555			"--task_id", specs.PLACEHOLDER_TASK_ID,
1556			"--task_name", b.Name,
1557			"--repo", specs.PLACEHOLDER_REPO,
1558			"--revision", specs.PLACEHOLDER_REVISION,
1559			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1560			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1561			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1562		)
1563		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1564		b.usesLUCIAuth()
1565		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1566		b.timeout(3 * time.Hour)
1567		b.attempts(1)
1568	})
1569}
1570
1571// infra generates an infra_tests task.
1572func (b *jobBuilder) infra() {
1573	b.addTask(b.Name, func(b *taskBuilder) {
1574		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1575			b.dimension(
1576				// Specify CPU to avoid running builds on bots with a more unique CPU.
1577				"cpu:x86-64-Haswell_GCE",
1578				"gpu:none",
1579				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1580				fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE),
1581				fmt.Sprintf("pool:%s", b.cfg.Pool),
1582			)
1583		} else {
1584			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1585		}
1586		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1587		b.kitchenTask("infra", OUTPUT_NONE)
1588		b.cas(CAS_WHOLE_REPO)
1589		b.serviceAccount(b.cfg.ServiceAccountCompile)
1590		b.usesGSUtil()
1591		b.idempotent()
1592		b.usesGo()
1593	})
1594}
1595
1596// buildstats generates a builtstats task, which compiles code and generates
1597// statistics about the build.
1598func (b *jobBuilder) buildstats() {
1599	compileTaskName := b.compile()
1600	b.addTask(b.Name, func(b *taskBuilder) {
1601		b.recipeProps(EXTRA_PROPS)
1602		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1603		b.dep(compileTaskName)
1604		b.asset("bloaty")
1605		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1606		b.usesDocker()
1607		b.usesGit()
1608		b.cache(CACHES_WORKDIR...)
1609	})
1610	// Upload release results (for tracking in perf)
1611	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1612	if b.release() && !b.arch("x86_64") {
1613		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1614		depName := b.Name
1615		b.addTask(uploadName, func(b *taskBuilder) {
1616			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1617			b.recipeProps(EXTRA_PROPS)
1618			// TODO(borenet): I'm not sure why the upload task is
1619			// using the BuildStats task name, but I've done this
1620			// to maintain existing behavior.
1621			b.Name = depName
1622			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1623			b.Name = uploadName
1624			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1625			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1626			b.usesGSUtil()
1627			b.dep(depName)
1628		})
1629	}
1630}
1631
1632// codesize generates a codesize task, which takes binary produced by a
1633// compile task, runs Bloaty against it, and uploads the resulting code size
1634// statistics to the GCS bucket belonging to the codesize.skia.org service.
1635func (b *jobBuilder) codesize() {
1636	compileTaskName := b.compile()
1637	compileTaskNameNoPatch := compileTaskName
1638	if b.extraConfig("Android") {
1639		compileTaskNameNoPatch += "_NoPatch" // add a second "extra config"
1640	} else {
1641		compileTaskNameNoPatch += "-NoPatch" // add the only "extra config"
1642	}
1643
1644	bloatyCipdPkg := b.MustGetCipdPackageFromAsset("bloaty")
1645
1646	b.addTask(b.Name, func(b *taskBuilder) {
1647		b.cas(CAS_EMPTY)
1648		b.dep(compileTaskName)
1649		b.dep(compileTaskNameNoPatch)
1650		cmd := []string{
1651			b.taskDriver("codesize", false),
1652			"--local=false",
1653			"--project_id", "skia-swarming-bots",
1654			"--task_id", specs.PLACEHOLDER_TASK_ID,
1655			"--task_name", b.Name,
1656			"--compile_task_name", compileTaskName,
1657			"--compile_task_name_no_patch", compileTaskNameNoPatch,
1658			// Note: the binary name cannot contain dashes, otherwise the naming
1659			// schema logic will partition it into multiple parts.
1660			//
1661			// If we ever need to define a CodeSize-* task for a binary with
1662			// dashes in its name (e.g. "my-binary"), a potential workaround is to
1663			// create a mapping from a new, non-dashed binary name (e.g. "my_binary")
1664			// to the actual binary name with dashes. This mapping can be hardcoded
1665			// in this function; no changes to the task driver would be necessary.
1666			"--binary_name", b.parts["binary_name"],
1667			"--bloaty_cipd_version", bloatyCipdPkg.Version,
1668			"--bloaty_binary", "bloaty/bloaty",
1669
1670			"--repo", specs.PLACEHOLDER_REPO,
1671			"--revision", specs.PLACEHOLDER_REVISION,
1672			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1673			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1674			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1675		}
1676		if strings.Contains(compileTaskName, "Android") {
1677			b.asset("android_ndk_linux")
1678			cmd = append(cmd, "--strip_binary",
1679				"android_ndk_linux/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip")
1680		} else {
1681			b.asset("binutils_linux_x64")
1682			cmd = append(cmd, "--strip_binary", "binutils_linux_x64/strip")
1683		}
1684		b.cmd(cmd...)
1685		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1686		b.cache(CACHES_WORKDIR...)
1687		b.usesLUCIAuth()
1688		b.asset("bloaty")
1689		b.serviceAccount("skia-external-codesize@skia-swarming-bots.iam.gserviceaccount.com")
1690		b.timeout(20 * time.Minute)
1691		b.attempts(1)
1692	})
1693}
1694
1695// doUpload indicates whether the given Job should upload its results.
1696func (b *jobBuilder) doUpload() bool {
1697	if b.extraConfig("Upload") {
1698		return true
1699	}
1700	for _, s := range b.cfg.NoUpload {
1701		m, err := regexp.MatchString(s, b.Name)
1702		if err != nil {
1703			log.Fatal(err)
1704		}
1705		if m {
1706			return false
1707		}
1708	}
1709	return true
1710}
1711
1712// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1713func (b *taskBuilder) commonTestPerfAssets() {
1714	// Docker-based tests don't need the standard CIPD assets
1715	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1716		return
1717	}
1718	if b.os("Android", "ChromeOS", "iOS") {
1719		b.asset("skp", "svg", "skimage")
1720	} else if b.extraConfig("OldestSupportedSkpVersion") {
1721		b.assetWithVersion("skp", oldestSupportedSkpVersion)
1722	} else {
1723		// for desktop machines
1724		b.asset("skimage", "skp", "svg")
1725	}
1726
1727	if b.isLinux() && b.matchExtraConfig("SAN") {
1728		b.asset("clang_linux")
1729	}
1730
1731	if b.isLinux() {
1732		if b.extraConfig("Vulkan") {
1733			b.asset("linux_vulkan_sdk")
1734		}
1735		if b.matchGpu("Intel") {
1736			if b.matchGpu("IrisXe") {
1737				b.asset("mesa_intel_driver_linux_22")
1738			} else {
1739				// Use this for legacy drivers that were culled in v22 of Mesa.
1740				// https://www.phoronix.com/scan.php?page=news_item&px=Mesa-22.0-Drops-OpenSWR
1741				b.asset("mesa_intel_driver_linux")
1742			}
1743		}
1744	}
1745
1746	if b.matchOs("Win") && b.extraConfig("DWriteCore") {
1747		b.asset("dwritecore")
1748	}
1749}
1750
1751// directUpload adds prerequisites for uploading to GCS.
1752func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1753	b.recipeProp("gs_bucket", gsBucket)
1754	b.serviceAccount(serviceAccount)
1755	b.usesGSUtil()
1756}
1757
1758// dm generates a Test task using dm.
1759func (b *jobBuilder) dm() {
1760	compileTaskName := ""
1761	// LottieWeb doesn't require anything in Skia to be compiled.
1762	if !b.extraConfig("LottieWeb") {
1763		compileTaskName = b.compile()
1764	}
1765	directUpload := false
1766	b.addTask(b.Name, func(b *taskBuilder) {
1767		cas := CAS_TEST
1768		recipe := "test"
1769		if b.extraConfig("PathKit") {
1770			cas = CAS_PATHKIT
1771			recipe = "test_pathkit"
1772			if b.doUpload() {
1773				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1774				directUpload = true
1775			}
1776		} else if b.extraConfig("CanvasKit") {
1777			cas = CAS_CANVASKIT
1778			recipe = "test_canvaskit"
1779			if b.doUpload() {
1780				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1781				directUpload = true
1782			}
1783		} else if b.extraConfig("LottieWeb") {
1784			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1785			// more of the files, especially those brought in via DEPS in the
1786			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1787			// Test.+LottieWeb is that the former pulls in the lottie build via
1788			// npm and the latter always tests at lottie's
1789			// ToT.
1790			cas = CAS_LOTTIE_CI
1791			recipe = "test_lottie_web"
1792			if b.doUpload() {
1793				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1794				directUpload = true
1795			}
1796		} else {
1797			// Default recipe supports direct upload.
1798			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1799			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1800			if b.doUpload() && !b.matchOs("Win") {
1801				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1802				directUpload = true
1803			}
1804			if b.matchOs("iOS") {
1805				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1806					Name: "xcode",
1807					Path: "cache/Xcode.app",
1808				})
1809			}
1810		}
1811		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1812		b.recipeProps(EXTRA_PROPS)
1813		iid := b.internalHardwareLabel()
1814		iidStr := ""
1815		if iid != nil {
1816			iidStr = strconv.Itoa(*iid)
1817		}
1818		if recipe == "test" {
1819			b.dmFlags(iidStr)
1820		}
1821		b.kitchenTask(recipe, OUTPUT_TEST)
1822		b.cas(cas)
1823		b.swarmDimensions()
1824		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") {
1825			b.usesDocker()
1826		}
1827		if compileTaskName != "" {
1828			b.dep(compileTaskName)
1829		}
1830		if b.matchOs("Android") && b.extraConfig("ASAN") {
1831			b.asset("android_ndk_linux")
1832		}
1833		if b.extraConfig("NativeFonts") && !b.matchOs("Android") {
1834			b.needsFontsForParagraphTests()
1835		}
1836		if b.extraConfig("Fontations") {
1837			b.cipd(&specs.CipdPackage{
1838				Name:    "chromium/third_party/googlefonts_testdata",
1839				Path:    "googlefonts_testdata",
1840				Version: "version:20230913",
1841			})
1842		}
1843		b.commonTestPerfAssets()
1844		if b.matchExtraConfig("Lottie") {
1845			b.asset("lottie-samples")
1846		}
1847		b.expiration(20 * time.Hour)
1848
1849		b.timeout(4 * time.Hour)
1850		if b.extraConfig("Valgrind") {
1851			b.timeout(9 * time.Hour)
1852			b.expiration(48 * time.Hour)
1853			b.asset("valgrind")
1854			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1855			// to ensure there are always bots free for CQ tasks.
1856			b.dimension("valgrind:1")
1857		} else if b.extraConfig("MSAN") {
1858			b.timeout(9 * time.Hour)
1859		} else if b.arch("x86") && b.debug() {
1860			// skia:6737
1861			b.timeout(6 * time.Hour)
1862		} else if b.matchOs("Mac14") {
1863			b.timeout(30 * time.Minute)
1864		}
1865		b.maybeAddIosDevImage()
1866	})
1867
1868	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1869	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1870	if b.doUpload() && !directUpload {
1871		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1872		depName := b.Name
1873		b.addTask(uploadName, func(b *taskBuilder) {
1874			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1875			b.recipeProps(EXTRA_PROPS)
1876			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1877			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1878			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1879			b.usesGSUtil()
1880			b.dep(depName)
1881		})
1882	}
1883}
1884
1885// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1886// Canary-G3 does not use this path because it is very different from other autorollers.
1887func (b *jobBuilder) canary(rollerName, canaryCQKeyword, targetProjectBaseURL string) {
1888	b.addTask(b.Name, func(b *taskBuilder) {
1889		b.cas(CAS_EMPTY)
1890		b.cmd(
1891			b.taskDriver("canary", false),
1892			"--local=false",
1893			"--project_id", "skia-swarming-bots",
1894			"--task_id", specs.PLACEHOLDER_TASK_ID,
1895			"--task_name", b.Name,
1896			"--roller_name", rollerName,
1897			"--cq_keyword", canaryCQKeyword,
1898			"--target_project_base_url", targetProjectBaseURL,
1899			"--repo", specs.PLACEHOLDER_REPO,
1900			"--revision", specs.PLACEHOLDER_REVISION,
1901			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1902			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1903			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1904		)
1905		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1906		b.usesLUCIAuth()
1907		b.serviceAccount(b.cfg.ServiceAccountCanary)
1908		b.timeout(3 * time.Hour)
1909		b.attempts(1)
1910	})
1911}
1912
1913// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1914// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1915func (b *jobBuilder) puppeteer() {
1916	compileTaskName := b.compile()
1917	b.addTask(b.Name, func(b *taskBuilder) {
1918		b.defaultSwarmDimensions()
1919		b.usesNode()
1920		b.usesLUCIAuth()
1921		b.dep(compileTaskName)
1922		b.output(OUTPUT_PERF)
1923		b.timeout(60 * time.Minute)
1924		b.cas(CAS_PUPPETEER)
1925		b.serviceAccount(b.cfg.ServiceAccountCompile)
1926
1927		webglversion := "2"
1928		if b.extraConfig("WebGL1") {
1929			webglversion = "1"
1930		}
1931
1932		if b.extraConfig("SkottieFrames") {
1933			b.cmd(
1934				b.taskDriver("perf_puppeteer_skottie_frames", false),
1935				"--project_id", "skia-swarming-bots",
1936				"--git_hash", specs.PLACEHOLDER_REVISION,
1937				"--task_id", specs.PLACEHOLDER_TASK_ID,
1938				"--task_name", b.Name,
1939				"--canvaskit_bin_path", "./build",
1940				"--lotties_path", "./lotties_with_assets",
1941				"--node_bin_path", "./node/node/bin",
1942				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1943				"--output_path", OUTPUT_PERF,
1944				"--os_trace", b.parts["os"],
1945				"--model_trace", b.parts["model"],
1946				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1947				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1948				"--webgl_version", webglversion, // ignore when running with cpu backend
1949			)
1950			b.needsLottiesWithAssets()
1951		} else if b.extraConfig("RenderSKP") {
1952			b.cmd(
1953				b.taskDriver("perf_puppeteer_render_skps", false),
1954				"--project_id", "skia-swarming-bots",
1955				"--git_hash", specs.PLACEHOLDER_REVISION,
1956				"--task_id", specs.PLACEHOLDER_TASK_ID,
1957				"--task_name", b.Name,
1958				"--canvaskit_bin_path", "./build",
1959				"--skps_path", "./skp",
1960				"--node_bin_path", "./node/node/bin",
1961				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1962				"--output_path", OUTPUT_PERF,
1963				"--os_trace", b.parts["os"],
1964				"--model_trace", b.parts["model"],
1965				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1966				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1967				"--webgl_version", webglversion,
1968			)
1969			b.asset("skp")
1970		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1971			b.cmd(
1972				b.taskDriver("perf_puppeteer_canvas", false),
1973				"--project_id", "skia-swarming-bots",
1974				"--git_hash", specs.PLACEHOLDER_REVISION,
1975				"--task_id", specs.PLACEHOLDER_TASK_ID,
1976				"--task_name", b.Name,
1977				"--canvaskit_bin_path", "./build",
1978				"--node_bin_path", "./node/node/bin",
1979				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1980				"--output_path", OUTPUT_PERF,
1981				"--os_trace", b.parts["os"],
1982				"--model_trace", b.parts["model"],
1983				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1984				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1985				"--webgl_version", webglversion,
1986			)
1987			b.asset("skp")
1988		}
1989
1990	})
1991
1992	// Upload results to Perf after.
1993	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1994	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1995	depName := b.Name
1996	b.addTask(uploadName, func(b *taskBuilder) {
1997		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1998		b.recipeProps(EXTRA_PROPS)
1999		// TODO(borenet): I'm not sure why the upload task is
2000		// using the Perf task name, but I've done this to
2001		// maintain existing behavior.
2002		b.Name = depName
2003		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
2004		b.Name = uploadName
2005		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
2006		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
2007		b.usesGSUtil()
2008		b.dep(depName)
2009	})
2010}
2011
2012// perf generates a Perf task.
2013func (b *jobBuilder) perf() {
2014	compileTaskName := ""
2015	// LottieWeb doesn't require anything in Skia to be compiled.
2016	if !b.extraConfig("LottieWeb") {
2017		compileTaskName = b.compile()
2018	}
2019	doUpload := !b.debug() && b.doUpload()
2020	b.addTask(b.Name, func(b *taskBuilder) {
2021		recipe := "perf"
2022		cas := CAS_PERF
2023		if b.extraConfig("PathKit") {
2024			cas = CAS_PATHKIT
2025			recipe = "perf_pathkit"
2026		} else if b.extraConfig("CanvasKit") {
2027			cas = CAS_CANVASKIT
2028			recipe = "perf_canvaskit"
2029		} else if b.extraConfig("SkottieTracing") {
2030			recipe = "perf_skottietrace"
2031		} else if b.extraConfig("SkottieWASM") {
2032			recipe = "perf_skottiewasm_lottieweb"
2033			cas = CAS_SKOTTIE_WASM
2034		} else if b.extraConfig("LottieWeb") {
2035			recipe = "perf_skottiewasm_lottieweb"
2036			cas = CAS_LOTTIE_WEB
2037		} else if b.matchOs("iOS") {
2038			// We need a service account in order to download the xcode CIPD
2039			// packages.
2040			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
2041			b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
2042				Name: "xcode",
2043				Path: "cache/Xcode.app",
2044			})
2045		}
2046		b.recipeProps(EXTRA_PROPS)
2047		if recipe == "perf" {
2048			b.nanobenchFlags(doUpload)
2049		}
2050		b.kitchenTask(recipe, OUTPUT_PERF)
2051		b.cas(cas)
2052		b.swarmDimensions()
2053		if b.extraConfig("Docker") {
2054			b.usesDocker()
2055		}
2056		if compileTaskName != "" {
2057			b.dep(compileTaskName)
2058		}
2059		b.commonTestPerfAssets()
2060		b.expiration(20 * time.Hour)
2061		b.timeout(4 * time.Hour)
2062
2063		if b.extraConfig("Valgrind") {
2064			b.timeout(9 * time.Hour)
2065			b.expiration(48 * time.Hour)
2066			b.asset("valgrind")
2067			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
2068			// to ensure there are always bots free for CQ tasks.
2069			b.dimension("valgrind:1")
2070		} else if b.extraConfig("MSAN") {
2071			b.timeout(9 * time.Hour)
2072		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
2073			// skia:6737
2074			b.timeout(6 * time.Hour)
2075		} else if b.matchOs("Mac14") {
2076			b.timeout(30 * time.Minute)
2077		}
2078
2079		if b.extraConfig("LottieWeb", "SkottieWASM") {
2080			b.asset("node", "lottie-samples")
2081		} else if b.matchExtraConfig("SkottieTracing") {
2082			b.needsLottiesWithAssets()
2083		} else if b.matchExtraConfig("Skottie") {
2084			b.asset("lottie-samples")
2085		}
2086
2087		if b.matchOs("Android") && b.cpu() {
2088			b.asset("text_blob_traces")
2089		}
2090		b.maybeAddIosDevImage()
2091
2092		iid := b.internalHardwareLabel()
2093		if iid != nil {
2094			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
2095		}
2096	})
2097
2098	// Upload results if necessary.
2099	if doUpload {
2100		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
2101		depName := b.Name
2102		b.addTask(uploadName, func(b *taskBuilder) {
2103			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
2104			b.recipeProps(EXTRA_PROPS)
2105			// TODO(borenet): I'm not sure why the upload task is
2106			// using the Perf task name, but I've done this to
2107			// maintain existing behavior.
2108			b.Name = depName
2109			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
2110			b.Name = uploadName
2111			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
2112			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
2113			b.usesGSUtil()
2114			b.dep(depName)
2115		})
2116	}
2117}
2118
2119// presubmit generates a task which runs the presubmit for this repo.
2120func (b *jobBuilder) presubmit() {
2121	b.addTask(b.Name, func(b *taskBuilder) {
2122		b.recipeProps(map[string]string{
2123			"category":         "cq",
2124			"patch_gerrit_url": "https://skia-review.googlesource.com",
2125			"patch_project":    "skia",
2126			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
2127			"reason":           "CQ",
2128			"repo_name":        "skia",
2129		})
2130		b.recipeProps(EXTRA_PROPS)
2131		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
2132		b.cas(CAS_RUN_RECIPE)
2133		b.serviceAccount(b.cfg.ServiceAccountCompile)
2134		// Use MACHINE_TYPE_LARGE because it seems to save time versus
2135		// MEDIUM and we want presubmit to be fast.
2136		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
2137		b.usesGit()
2138		b.cipd(&specs.CipdPackage{
2139			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
2140			Path:    "recipe_bundle",
2141			Version: "git_revision:bb122cd16700ab80bfcbd494b605dd11d4f5902d",
2142		})
2143	})
2144}
2145
2146// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2147// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2148// former anyway.
2149func (b *jobBuilder) compileWasmGMTests(compileName string) {
2150	b.addTask(compileName, func(b *taskBuilder) {
2151		b.attempts(1)
2152		b.usesDocker()
2153		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2154		b.usesLUCIAuth()
2155		b.output("wasm_out")
2156		b.timeout(60 * time.Minute)
2157		b.cas(CAS_COMPILE)
2158		b.serviceAccount(b.cfg.ServiceAccountCompile)
2159		b.cache(CACHES_DOCKER...)
2160		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
2161		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
2162		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
2163		// even bother.
2164		b.cmd(
2165			b.taskDriver("compile_wasm_gm_tests", false),
2166			"--project_id", "skia-swarming-bots",
2167			"--task_id", specs.PLACEHOLDER_TASK_ID,
2168			"--task_name", compileName,
2169			"--out_path", "./wasm_out",
2170			"--skia_path", "./skia",
2171			"--work_path", "./cache/docker/wasm_gm",
2172		)
2173	})
2174}
2175
2176// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
2177// We can use the same build for both CPU and GPU tests since the latter requires the code for the
2178// former anyway.
2179func (b *jobBuilder) runWasmGMTests() {
2180	compileTaskName := b.compile()
2181
2182	b.addTask(b.Name, func(b *taskBuilder) {
2183		b.attempts(1)
2184		b.usesNode()
2185		b.swarmDimensions()
2186		b.usesLUCIAuth()
2187		b.cipd(CIPD_PKGS_GOLDCTL)
2188		b.dep(compileTaskName)
2189		b.timeout(60 * time.Minute)
2190		b.cas(CAS_WASM_GM)
2191		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
2192		b.cmd(
2193			b.taskDriver("run_wasm_gm_tests", false),
2194			"--project_id", "skia-swarming-bots",
2195			"--task_id", specs.PLACEHOLDER_TASK_ID,
2196			"--task_name", b.Name,
2197			"--test_harness_path", "./tools/run-wasm-gm-tests",
2198			"--built_path", "./wasm_out",
2199			"--node_bin_path", "./node/node/bin",
2200			"--resource_path", "./resources",
2201			"--work_path", "./wasm_gm/work",
2202			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
2203			"--gold_hashes_url", b.cfg.GoldHashesURL,
2204			"--git_commit", specs.PLACEHOLDER_REVISION,
2205			"--changelist_id", specs.PLACEHOLDER_ISSUE,
2206			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
2207			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
2208			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
2209			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
2210			"--gold_key", "alpha_type:Premul",
2211			"--gold_key", "arch:wasm",
2212			"--gold_key", "browser:Chrome",
2213			"--gold_key", "color_depth:8888",
2214			"--gold_key", "config:gles",
2215			"--gold_key", "configuration:Release",
2216			"--gold_key", "cpu_or_gpu_value:QuadroP400",
2217			"--gold_key", "model:Golo",
2218			"--gold_key", "os:Ubuntu18",
2219		)
2220	})
2221}
2222
2223// labelAndSavedOutputDir contains a Bazel label (e.g. //tests:some_test) and a //bazel-bin
2224// subdirectory that should be stored into CAS.
2225type labelAndSavedOutputDir struct {
2226	label          string
2227	savedOutputDir string
2228}
2229
2230// Maps a shorthand version of a label (which can be an arbitrary string) to an absolute Bazel
2231// label or "target pattern" https://bazel.build/docs/build#specifying-build-targets
2232// The reason we need this mapping is because Buildbucket build names cannot have / or : in them.
2233// TODO(borenet/kjlubick): Is there a way to generate a mapping using `bazel query`?
2234var shorthandToLabel = map[string]labelAndSavedOutputDir{
2235	"all_tests":                  {"//tests:linux_rbe_tests", ""},
2236	"core":                       {"//:core", ""},
2237	"cpu_8888_benchmark_test":    {"//bench:cpu_8888_test", ""},
2238	"cpu_gms":                    {"//gm:cpu_gm_tests", ""},
2239	"dm":                         {"//dm", ""},
2240	"full_library":               {"//tools:full_build", ""},
2241	"ganesh_gl":                  {"//:ganesh_gl", ""},
2242	"hello_bazel_world_test":     {"//gm:hello_bazel_world_test", ""},
2243	"modules_canvaskit":          {"//modules/canvaskit:canvaskit", ""},
2244	"modules_canvaskit_js_tests": {"//modules/canvaskit:canvaskit_js_tests", ""},
2245	"skottie_tool_gpu":           {"//modules/skottie:skottie_tool_gpu", ""},
2246	"viewer":                     {"//tools/viewer:viewer", ""},
2247	"decode_everything":          {"//example/external_client:decode_everything", ""},
2248	"path_combiner":              {"//example/external_client:path_combiner", ""},
2249	"png_decoder":                {"//example/external_client:png_decoder", ""},
2250	"shape_text":                 {"//example/external_client:shape_text", ""},
2251	"svg_with_harfbuzz":          {"//example/external_client:svg_with_harfbuzz", ""},
2252	"svg_with_primitive":         {"//example/external_client:svg_with_primitive", ""},
2253	"use_ganesh_gl":              {"//example/external_client:use_ganesh_gl", ""},
2254	"use_ganesh_vulkan":          {"//example/external_client:use_ganesh_vulkan", ""},
2255	"use_graphite_native_vulkan": {"//example/external_client:use_graphite_native_vulkan", ""},
2256	"use_skresources":            {"//example/external_client:use_skresources", ""},
2257	"write_text_to_png":          {"//example/external_client:write_text_to_png", ""},
2258	"write_to_pdf":               {"//example/external_client:write_to_pdf", ""},
2259	"play_skottie":               {"//example/external_client:play_skottie", ""},
2260
2261	// Currently there is no way to tell Bazel "only test go_test targets", so we must group them
2262	// under a test_suite.
2263	//
2264	// Alternatives:
2265	//
2266	// - Use --test_lang_filters, which currently does not work for non-native rules. See
2267	//   https://github.com/bazelbuild/bazel/issues/12618.
2268	//
2269	// - As suggested in the same GitHub issue, "bazel query 'kind(go_test, //...)'" would normally
2270	//   return the list of labels. However, this fails due to BUILD.bazel files in
2271	//   //third_party/externals and //bazel/external/vello. We could try either fixing those files
2272	//   when possible, or adding them to //.bazelignore (either permanently or temporarily inside a
2273	//   specialized task driver just for Go tests).
2274	//
2275	// - Have Gazelle add a tag to all Go tests: go_test(name = "foo_test", tag = "go", ... ). Then,
2276	//   we can use a wildcard label such as //... and tell Bazel to only test those targets with
2277	//   said tag, e.g. "bazel test //... --test_tag_filters=go"
2278	//   (https://bazel.build/reference/command-line-reference#flag--test_tag_filters). Today this
2279	//   does not work due to the third party and external BUILD.bazel files mentioned in the
2280	//   previous bullet point.
2281	"all_go_tests": {"//:all_go_tests", ""},
2282
2283	// Android tests that run on a device. We store the //bazel-bin/tests directory into CAS for use
2284	// by subsequent CI tasks.
2285	"android_math_test":               {"//tests:android_math_test", "tests"},
2286	"hello_bazel_world_android_test":  {"//gm:hello_bazel_world_android_test", "gm"},
2287	"cpu_8888_benchmark_android_test": {"//bench:cpu_8888_android_test", "bench"},
2288}
2289
2290// bazelBuild adds a task which builds the specified single-target label (//foo:bar) or
2291// multi-target label (//foo/...) using Bazel. Depending on the host we run this on, we may
2292// specify additional Bazel args to build faster. Optionally, a subset of the //bazel-bin directory
2293// will be stored into CAS for use by subsequent tasks.
2294func (b *jobBuilder) bazelBuild() {
2295	shorthand, config, host := b.parts.bazelBuildParts()
2296	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2297	if !ok {
2298		panic("unsupported Bazel label shorthand " + shorthand)
2299	}
2300
2301	b.addTask(b.Name, func(b *taskBuilder) {
2302		bazelCacheDir, ok := map[string]string{
2303			// We only run builds in GCE.
2304			"linux_x64":   bazelCacheDirOnGCELinux,
2305			"windows_x64": bazelCacheDirOnWindows,
2306		}[host]
2307		if !ok {
2308			panic("unknown Bazel cache dir for Bazel host " + host)
2309		}
2310
2311		// Bazel git_repository rules shell out to git. Use the version from
2312		// CIPD to ensure that we're not using an old locally-installed version.
2313		b.usesGit()
2314		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
2315
2316		cmd := []string{
2317			b.taskDriver("bazel_build", host != "windows_x64"),
2318			"--project_id=skia-swarming-bots",
2319			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2320			"--task_name=" + b.Name,
2321			"--bazel_label=" + labelAndSavedOutputDir.label,
2322			"--bazel_config=" + config,
2323			"--bazel_cache_dir=" + bazelCacheDir,
2324			"--workdir=./skia",
2325		}
2326
2327		if labelAndSavedOutputDir.savedOutputDir != "" {
2328			cmd = append(cmd,
2329				"--out_path="+OUTPUT_BAZEL,
2330				// Which //bazel-bin subdirectory to copy into the output dir (flag --out_path).
2331				"--saved_output_dir="+labelAndSavedOutputDir.savedOutputDir,
2332			)
2333		}
2334
2335		if host == "linux_x64" {
2336			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2337			b.usesBazel("linux_x64")
2338			if labelAndSavedOutputDir.savedOutputDir != "" {
2339				// We assume that builds which require storing a subset of //bazel-bin to CAS are Android
2340				// builds. We want such builds to use RBE, and we want to download the built top-level
2341				// artifacts. Also, we need the adb_test runner to be cross-compiled to run on a Raspberry
2342				// Pi.
2343				cmd = append(cmd, "--bazel_arg=--config=linux_rbe")
2344				cmd = append(cmd, "--bazel_arg=--jobs=100")
2345				cmd = append(cmd, "--bazel_arg=--remote_download_toplevel")
2346				cmd = append(cmd, "--bazel_arg=--adb_platform=linux_arm64")
2347			} else {
2348				// We want all Linux Bazel Builds to use RBE
2349				cmd = append(cmd, "--bazel_arg=--config=for_linux_x64_with_rbe")
2350				cmd = append(cmd, "--bazel_arg=--jobs=100")
2351				cmd = append(cmd, "--bazel_arg=--remote_download_minimal")
2352			}
2353		} else if host == "windows_x64" {
2354			b.dimension(
2355				"cpu:x86-64-Haswell_GCE",
2356				"gpu:none",
2357				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_LARGE),
2358				fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE),
2359				"pool:Skia",
2360			)
2361			b.usesBazel("windows_x64")
2362			cmd = append(cmd, "--bazel_arg=--experimental_scale_timeouts=2.0")
2363		} else {
2364			panic("unsupported Bazel host " + host)
2365		}
2366		b.cmd(cmd...)
2367
2368		b.idempotent()
2369		b.cas(CAS_BAZEL)
2370		b.attempts(1)
2371		b.serviceAccount(b.cfg.ServiceAccountCompile)
2372		if labelAndSavedOutputDir.savedOutputDir != "" {
2373			b.output(OUTPUT_BAZEL)
2374		}
2375	})
2376}
2377
2378type precompiledBazelTestKind int
2379
2380const (
2381	precompiledBazelTestNone precompiledBazelTestKind = iota
2382	precompiledBenchmarkTest
2383	precompiledGMTest
2384	precompiledUnitTest
2385)
2386
2387func (b *jobBuilder) bazelTest() {
2388	taskdriverName, shorthand, buildConfig, host, testConfig := b.parts.bazelTestParts()
2389	labelAndSavedOutputDir, ok := shorthandToLabel[shorthand]
2390	if !ok {
2391		panic("unsupported Bazel label shorthand " + shorthand)
2392	}
2393
2394	// Expand task driver name to keep task names short.
2395	precompiledKind := precompiledBazelTestNone
2396	if taskdriverName == "precompiled_benchmark" {
2397		taskdriverName = "bazel_test_precompiled"
2398		precompiledKind = precompiledBenchmarkTest
2399	}
2400	if taskdriverName == "precompiled_gm" {
2401		taskdriverName = "bazel_test_precompiled"
2402		precompiledKind = precompiledGMTest
2403	}
2404	if taskdriverName == "precompiled_test" {
2405		taskdriverName = "bazel_test_precompiled"
2406		precompiledKind = precompiledUnitTest
2407	}
2408	if taskdriverName == "gm" {
2409		taskdriverName = "bazel_test_gm"
2410	}
2411	if taskdriverName == "benchmark" {
2412		taskdriverName = "bazel_test_benchmark"
2413	}
2414
2415	var deviceSpecificBazelConfig *device_specific_configs.Config
2416	if testConfig != "" {
2417		if config, ok := device_specific_configs.Configs[testConfig]; ok {
2418			deviceSpecificBazelConfig = &config
2419		} else {
2420			panic(fmt.Sprintf("Unknown device-specific Bazel config: %q", testConfig))
2421		}
2422	}
2423
2424	bazelCacheDir := bazelCacheDirOnGCELinux
2425	if deviceSpecificBazelConfig != nil && deviceSpecificBazelConfig.Keys["model"] != "GCE" {
2426		bazelCacheDir = bazelCacheDirOnSkoloLinux
2427	}
2428
2429	b.addTask(b.Name, func(b *taskBuilder) {
2430		cmd := []string{
2431			b.taskDriver(taskdriverName, false),
2432			"--project_id=skia-swarming-bots",
2433			"--task_id=" + specs.PLACEHOLDER_TASK_ID,
2434			"--task_name=" + b.Name,
2435			"--workdir=.",
2436		}
2437
2438		switch taskdriverName {
2439		case "canvaskit_gold":
2440			cmd = append(cmd,
2441				"--bazel_label="+labelAndSavedOutputDir.label,
2442				"--bazel_config="+buildConfig,
2443				"--bazel_cache_dir="+bazelCacheDir,
2444				"--goldctl_path=./cipd_bin_packages/goldctl",
2445				"--git_commit="+specs.PLACEHOLDER_REVISION,
2446				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2447				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2448				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2449			b.cipd(CIPD_PKGS_GOLDCTL)
2450			switch buildConfig {
2451			case "ck_full_cpu_release_chrome":
2452				cmd = append(cmd, "--cpu_or_gpu=CPU", "--cpu_or_gpu_value=CPU",
2453					"--compilation_mode=Release", "--browser=Chrome")
2454			case "ck_full_webgl2_release_chrome":
2455				cmd = append(cmd, "--cpu_or_gpu=GPU", "--cpu_or_gpu_value=WebGL2",
2456					"--compilation_mode=Release", "--browser=Chrome")
2457			default:
2458				panic("Gold keys not specified for config " + buildConfig)
2459			}
2460
2461		case "cpu_tests":
2462			cmd = append(cmd,
2463				"--bazel_label="+labelAndSavedOutputDir.label,
2464				"--bazel_config="+buildConfig,
2465				"--bazel_cache_dir="+bazelCacheDir)
2466
2467		case "toolchain_layering_check":
2468			cmd = append(cmd,
2469				"--bazel_label="+labelAndSavedOutputDir.label,
2470				"--bazel_config="+buildConfig,
2471				"--bazel_cache_dir="+bazelCacheDir)
2472
2473		case "bazel_test_precompiled":
2474			// Compute the file name of the test based on its Bazel label. The file name will be relative to
2475			// the bazel-bin directory, which we receive a subset of as a CAS input.
2476			command := strings.ReplaceAll(labelAndSavedOutputDir.label, "//", "")
2477			command = strings.ReplaceAll(command, ":", "/")
2478			command = path.Join(OUTPUT_BAZEL, command)
2479
2480			// The test's working directory will be its runfiles directory, which simulates the behavior of
2481			// the "bazel run" command.
2482			commandWorkDir := path.Join(command+".runfiles", "skia")
2483
2484			cmd = append(cmd,
2485				"--command="+command,
2486				"--command_workdir="+commandWorkDir)
2487
2488			switch precompiledKind {
2489			case precompiledBenchmarkTest:
2490				cmd = append(cmd,
2491					"--kind=benchmark",
2492					"--git_commit="+specs.PLACEHOLDER_REVISION,
2493					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2494					"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2495
2496			case precompiledGMTest:
2497				cmd = append(cmd,
2498					"--kind=gm",
2499					"--bazel_label="+labelAndSavedOutputDir.label,
2500					"--goldctl_path=./cipd_bin_packages/goldctl",
2501					"--git_commit="+specs.PLACEHOLDER_REVISION,
2502					"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2503					"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2504					"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2505				b.cipd(CIPD_PKGS_GOLDCTL)
2506
2507			case precompiledUnitTest:
2508				cmd = append(cmd, "--kind=unit")
2509
2510			default:
2511				panic(fmt.Sprintf("Unknown precompiled test kind: %v", precompiledKind))
2512			}
2513
2514		case "bazel_test_gm":
2515			cmd = append(cmd,
2516				"--bazel_label="+labelAndSavedOutputDir.label,
2517				"--bazel_config="+buildConfig,
2518				"--bazel_cache_dir="+bazelCacheDir,
2519				"--goldctl_path=./cipd_bin_packages/goldctl",
2520				"--git_commit="+specs.PLACEHOLDER_REVISION,
2521				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2522				"--patchset_order="+specs.PLACEHOLDER_PATCHSET,
2523				"--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID)
2524			b.cipd(CIPD_PKGS_GOLDCTL)
2525
2526		case "bazel_test_benchmark":
2527			// Note that these tasks run on Skolo machines.
2528			cmd = append(cmd,
2529				"--bazel_label="+labelAndSavedOutputDir.label,
2530				"--bazel_config="+buildConfig,
2531				"--bazel_cache_dir="+bazelCacheDirOnSkoloLinux,
2532				"--git_commit="+specs.PLACEHOLDER_REVISION,
2533				"--changelist_id="+specs.PLACEHOLDER_ISSUE,
2534				"--patchset_order="+specs.PLACEHOLDER_PATCHSET)
2535
2536		case "external_client":
2537			// For external_client, we want to test how an external user would
2538			// build using Skia. Therefore, we change to the workspace in that
2539			// directory and use labels relative to it.
2540			pathInSkia := "example/external_client"
2541			label := strings.Replace(labelAndSavedOutputDir.label, pathInSkia, "", -1)
2542			cmd = append(cmd,
2543				"--bazel_label="+label,
2544				"--path_in_skia="+pathInSkia,
2545				"--bazel_cache_dir="+bazelCacheDir)
2546			b.usesDocker()
2547
2548		default:
2549			panic("Unsupported Bazel taskdriver " + taskdriverName)
2550		}
2551
2552		if deviceSpecificBazelConfig != nil {
2553			cmd = append(cmd, "--device_specific_bazel_config="+deviceSpecificBazelConfig.Name)
2554		}
2555
2556		if host == "linux_x64" {
2557			b.usesBazel("linux_x64")
2558		} else if host == "linux_arm64" || host == "on_rpi" {
2559			// The RPIs do not run Bazel directly, they have precompiled binary
2560			// to run instead.
2561		} else {
2562			panic("unsupported Bazel host " + host)
2563		}
2564
2565		if taskdriverName == "bazel_test_gm" ||
2566			taskdriverName == "bazel_test_benchmark" ||
2567			taskdriverName == "bazel_test_precompiled" {
2568			if taskdriverName == "bazel_test_precompiled" {
2569				// This task precompiles the test and stores it to CAS.
2570				b.dep(fmt.Sprintf("BazelBuild-%s-%s-linux_x64", shorthand, buildConfig))
2571			}
2572
2573			// Set dimensions.
2574			if deviceSpecificBazelConfig == nil {
2575				log.Fatalf("While processing job %q: task driver %q requires a device-specific Bazel config.", b.Name, taskdriverName)
2576			}
2577			if len(deviceSpecificBazelConfig.SwarmingDimensions) == 0 {
2578				log.Fatalf("While processing job %q: device-specific Bazel config %q does not provide Swarming dimensions.", b.Name, deviceSpecificBazelConfig.Name)
2579			}
2580			var dimensions []string
2581			for name, value := range deviceSpecificBazelConfig.SwarmingDimensions {
2582				dimensions = append(dimensions, fmt.Sprintf("%s:%s", name, value))
2583			}
2584			dimensions = append(dimensions, fmt.Sprintf("pool:%s", b.cfg.Pool))
2585			sort.Strings(dimensions)
2586			b.dimension(dimensions...)
2587		} else {
2588			b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
2589		}
2590
2591		b.cmd(cmd...)
2592		b.idempotent()
2593		b.cas(CAS_BAZEL)
2594		b.attempts(1)
2595		b.serviceAccount(b.cfg.ServiceAccountCompile)
2596	})
2597}
2598