• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28)
29
30const (
31	CAS_CANVASKIT     = "canvaskit"
32	CAS_COMPILE       = "compile"
33	CAS_EMPTY         = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
34	CAS_LOTTIE_CI     = "lottie-ci"
35	CAS_LOTTIE_WEB    = "lottie-web"
36	CAS_PATHKIT       = "pathkit"
37	CAS_PERF          = "perf"
38	CAS_PUPPETEER     = "puppeteer"
39	CAS_RUN_RECIPE    = "run-recipe"
40	CAS_RECIPES       = "recipes"
41	CAS_RECREATE_SKPS = "recreate-skps"
42	CAS_SKOTTIE_WASM  = "skottie-wasm"
43	CAS_SKPBENCH      = "skpbench"
44	CAS_TASK_DRIVERS  = "task-drivers"
45	CAS_TEST          = "test"
46	CAS_WASM_GM       = "wasm-gm"
47	CAS_WHOLE_REPO    = "whole-repo"
48
49	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
50	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
51	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
52	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
53	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
54	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
55	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
56	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
57	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
58	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
59
60	DEFAULT_OS_DEBIAN              = "Debian-10.10"
61	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
62	OLD_OS_LINUX_GCE               = "Debian-9.8"
63	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
64	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
65	DEFAULT_OS_MAC                 = "Mac-10.15.7"
66	DEFAULT_OS_WIN                 = "Windows-Server-17763"
67
68	// Small is a 2-core machine.
69	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
70	MACHINE_TYPE_SMALL = "n1-highmem-2"
71	// Medium is a 16-core machine
72	MACHINE_TYPE_MEDIUM = "n1-standard-16"
73	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
74	// any of our tasks.)
75	MACHINE_TYPE_LARGE = "n1-highcpu-64"
76
77	// Swarming output dirs.
78	OUTPUT_NONE  = "output_ignored" // This will result in outputs not being isolated.
79	OUTPUT_BUILD = "build"
80	OUTPUT_TEST  = "test"
81	OUTPUT_PERF  = "perf"
82
83	// Name prefix for upload jobs.
84	PREFIX_UPLOAD = "Upload"
85)
86
87var (
88	// "Constants"
89
90	// Named caches used by tasks.
91	CACHES_GIT = []*specs.Cache{
92		{
93			Name: "git",
94			Path: "cache/git",
95		},
96		{
97			Name: "git_cache",
98			Path: "cache/git_cache",
99		},
100	}
101	CACHES_GO = []*specs.Cache{
102		{
103			Name: "go_cache",
104			Path: "cache/go_cache",
105		},
106		{
107			Name: "gopath",
108			Path: "cache/gopath",
109		},
110	}
111	CACHES_WORKDIR = []*specs.Cache{
112		{
113			Name: "work",
114			Path: "cache/work",
115		},
116	}
117	CACHES_CCACHE = []*specs.Cache{
118		{
119			Name: "ccache",
120			Path: "cache/ccache",
121		},
122	}
123	// The "docker" cache is used as a persistent working directory for
124	// tasks which use Docker. It is not to be confused with Docker's own
125	// cache, which stores images. We do not currently use a named Swarming
126	// cache for the latter.
127	// TODO(borenet): We should ensure that any task which uses Docker does
128	// not also use the normal "work" cache, to prevent issues like
129	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
130	CACHES_DOCKER = []*specs.Cache{
131		{
132			Name: "docker",
133			Path: "cache/docker",
134		},
135	}
136
137	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
138	// lottie-ci.  This is global so that it can be overridden by other
139	// repositories which import this file.
140	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
141		Root: "..",
142		Paths: []string{
143			"skia/.vpython",
144			"skia/infra/bots/run_recipe.py",
145			"skia/infra/lottiecap",
146			"skia/tools/lottie-web-perf",
147			"skia/tools/lottiecap",
148		},
149		Excludes: []string{rbe.ExcludeGitDir},
150	}
151
152	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
153	// global so that it can be overridden by other repositories which import
154	// this file.
155	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
156		Root:     "..",
157		Paths:    []string{"skia"},
158		Excludes: []string{rbe.ExcludeGitDir},
159	}
160
161	// TODO(borenet): This hacky and bad.
162	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
163
164	CIPD_PKGS_GOLDCTL = []*specs.CipdPackage{cipd.MustGetPackage("skia/tools/goldctl/${platform}")}
165
166	CIPD_PKGS_XCODE = []*specs.CipdPackage{
167		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
168		// This package is really just an installer for XCode.
169		{
170			Name: "infra/tools/mac_toolchain/${platform}",
171			Path: "mac_toolchain",
172			// When this is updated, also update
173			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
174			Version: "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a",
175		},
176	}
177
178	// These properties are required by some tasks, eg. for running
179	// bot_update, but they prevent de-duplication, so they should only be
180	// used where necessary.
181	EXTRA_PROPS = map[string]string{
182		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
183		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
184		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
185		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
186		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
187		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
188		"repository":           specs.PLACEHOLDER_REPO,
189		"revision":             specs.PLACEHOLDER_REVISION,
190		"task_id":              specs.PLACEHOLDER_TASK_ID,
191	}
192
193	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
194	// for how the CIPD package should be installed for a given task.
195	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
196		"gcloud_linux": {
197			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
198			path:           "gcloud_linux",
199		},
200		"skimage": {
201			uploadTaskName: ISOLATE_SKIMAGE_NAME,
202			path:           "skimage",
203		},
204		"skp": {
205			uploadTaskName: ISOLATE_SKP_NAME,
206			path:           "skp",
207		},
208		"svg": {
209			uploadTaskName: ISOLATE_SVG_NAME,
210			path:           "svg",
211		},
212		"mskp": {
213			uploadTaskName: ISOLATE_MSKP_NAME,
214			path:           "mskp",
215		},
216		"android_ndk_linux": {
217			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
218			path:           "android_ndk_linux",
219		},
220		"android_sdk_linux": {
221			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
222			path:           "android_sdk_linux",
223		},
224		"win_toolchain": {
225			alwaysIsolate:  true,
226			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
227			path:           "win_toolchain",
228		},
229	}
230
231	// Set dontReduceOpsTaskSplitting option on these models
232	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
233		"NUC5PPYH",
234	}
235)
236
237// Config contains general configuration information.
238type Config struct {
239	// Directory containing assets. Assumed to be relative to the directory
240	// which contains the calling gen_tasks.go file. If not specified, uses
241	// the infra/bots/assets from this repo.
242	AssetsDir string `json:"assets_dir"`
243
244	// Path to the builder name schema JSON file. Assumed to be relative to
245	// the directory which contains the calling gen_tasks.go file. If not
246	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
247	// from this repo.
248	BuilderNameSchemaFile string `json:"builder_name_schema"`
249
250	// URL of the Skia Gold known hashes endpoint.
251	GoldHashesURL string `json:"gold_hashes_url"`
252
253	// GCS bucket used for GM results.
254	GsBucketGm string `json:"gs_bucket_gm"`
255
256	// GCS bucket used for Nanobench results.
257	GsBucketNano string `json:"gs_bucket_nano"`
258
259	// Optional function which returns a bot ID for internal devices.
260	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
261
262	// List of task names for which we'll never upload results.
263	NoUpload []string `json:"no_upload"`
264
265	// PathToSkia is the relative path from the root of the current checkout to
266	// the root of the Skia checkout.
267	PathToSkia string `json:"path_to_skia"`
268
269	// Swarming pool used for triggering tasks.
270	Pool string `json:"pool"`
271
272	// LUCI project associated with this repo.
273	Project string `json:"project"`
274
275	// Service accounts.
276	ServiceAccountCanary       string `json:"service_account_canary"`
277	ServiceAccountCompile      string `json:"service_account_compile"`
278	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
279	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
280	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
281	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
282	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
283
284	// Optional override function which derives Swarming bot dimensions
285	// from parts of task names.
286	SwarmDimensions func(parts map[string]string) []string `json:"-"`
287}
288
289// JobInfo is the type of each entry in the jobs.json file.
290type JobInfo struct {
291	// The name of the job.
292	Name string `json:"name"`
293
294	// The optional CQ config of this job. If the CQ config is missing then the
295	// job will not be added to the CQ of this branch.
296	CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"`
297}
298
299// LoadConfig loads the Config from a cfg.json file which is the sibling of the
300// calling gen_tasks.go file.
301func LoadConfig() *Config {
302	cfgDir := getCallingDirName()
303	var cfg Config
304	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
305	return &cfg
306}
307
308// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
309// caller from needing a dependency on the specs package.
310func CheckoutRoot() string {
311	root, err := specs.GetCheckoutRoot()
312	if err != nil {
313		log.Fatal(err)
314	}
315	return root
316}
317
318// LoadJson loads JSON from the given file and unmarshals it into the given
319// destination.
320func LoadJson(filename string, dest interface{}) {
321	b, err := ioutil.ReadFile(filename)
322	if err != nil {
323		log.Fatalf("Unable to read %q: %s", filename, err)
324	}
325	if err := json.Unmarshal(b, dest); err != nil {
326		log.Fatalf("Unable to parse %q: %s", filename, err)
327	}
328}
329
330// In returns true if |s| is *in* |a| slice.
331// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
332// huge set of additional dependencies added by that package.
333func In(s string, a []string) bool {
334	for _, x := range a {
335		if x == s {
336			return true
337		}
338	}
339	return false
340}
341
342// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
343// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
344// is similarly loaded from a cfg.json file which is the sibling of the calling
345// gen_tasks.go file.
346func GenTasks(cfg *Config) {
347	b := specs.MustNewTasksCfgBuilder()
348
349	// Find the paths to the infra/bots directories in this repo and the
350	// repo of the calling file.
351	relpathTargetDir := getThisDirName()
352	relpathBaseDir := getCallingDirName()
353
354	// Parse jobs.json.
355	var jobsWithInfo []*JobInfo
356	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo)
357	// Create a slice with only job names.
358	jobs := []string{}
359	for _, j := range jobsWithInfo {
360		jobs = append(jobs, j.Name)
361	}
362
363	if cfg == nil {
364		cfg = new(Config)
365		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
366	}
367
368	// Create the JobNameSchema.
369	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
370	if cfg.BuilderNameSchemaFile != "" {
371		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
372	}
373	schema, err := NewJobNameSchema(builderNameSchemaFile)
374	if err != nil {
375		log.Fatal(err)
376	}
377
378	// Set the assets dir.
379	assetsDir := filepath.Join(relpathTargetDir, "assets")
380	if cfg.AssetsDir != "" {
381		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
382	}
383	b.SetAssetsDir(assetsDir)
384
385	// Create Tasks and Jobs.
386	builder := &builder{
387		TasksCfgBuilder: b,
388		cfg:             cfg,
389		jobNameSchema:   schema,
390		jobs:            jobs,
391	}
392	for _, j := range jobsWithInfo {
393		jb := newJobBuilder(builder, j.Name)
394		jb.genTasksForJob()
395		jb.finish()
396
397		// Add the CQ spec if it is a CQ job.
398		if j.CQConfig != nil {
399			b.MustAddCQJob(j.Name, j.CQConfig)
400		}
401	}
402
403	// Create CasSpecs.
404	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
405		Root: "..",
406		Paths: []string{
407			"skia/.vpython",
408			"skia/infra/bots/run_recipe.py",
409			"skia/infra/canvaskit",
410			"skia/modules/canvaskit",
411			"skia/modules/pathkit/perf/perfReporter.js",
412			"skia/modules/pathkit/tests/testReporter.js",
413		},
414		Excludes: []string{rbe.ExcludeGitDir},
415	})
416	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
417	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
418	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
419		Root: "..",
420		Paths: []string{
421			"skia/.vpython",
422			"skia/infra/bots/run_recipe.py",
423			"skia/tools/lottie-web-perf",
424		},
425		Excludes: []string{rbe.ExcludeGitDir},
426	})
427	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
428		Root: "..",
429		Paths: []string{
430			"skia/.vpython",
431			"skia/infra/bots/run_recipe.py",
432			"skia/infra/pathkit",
433			"skia/modules/pathkit",
434		},
435		Excludes: []string{rbe.ExcludeGitDir},
436	})
437	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
438		Root: "..",
439		Paths: []string{
440			"skia/.vpython",
441			"skia/infra/bots/assets",
442			"skia/infra/bots/run_recipe.py",
443			"skia/platform_tools/ios/bin",
444			"skia/resources",
445			"skia/tools/valgrind.supp",
446		},
447		Excludes: []string{rbe.ExcludeGitDir},
448	})
449	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
450		Root: "../skia", // Needed for other repos.
451		Paths: []string{
452			".vpython",
453			"tools/perf-canvaskit-puppeteer",
454		},
455		Excludes: []string{rbe.ExcludeGitDir},
456	})
457	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
458		Root: "..",
459		Paths: []string{
460			"skia/infra/config/recipes.cfg",
461			"skia/infra/bots/bundle_recipes.sh",
462			"skia/infra/bots/README.recipes.md",
463			"skia/infra/bots/recipe_modules",
464			"skia/infra/bots/recipes",
465			"skia/infra/bots/recipes.py",
466		},
467		Excludes: []string{rbe.ExcludeGitDir},
468	})
469	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
470		Root: "..",
471		Paths: []string{
472			"skia/.vpython",
473			"skia/infra/bots/run_recipe.py",
474		},
475		Excludes: []string{rbe.ExcludeGitDir},
476	})
477	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
478		Root: "..",
479		Paths: []string{
480			"skia/.vpython",
481			"skia/infra/bots/run_recipe.py",
482			"skia/tools/skottie-wasm-perf",
483		},
484		Excludes: []string{rbe.ExcludeGitDir},
485	})
486	b.MustAddCasSpec(CAS_SKPBENCH, &specs.CasSpec{
487		Root: "..",
488		Paths: []string{
489			"skia/.vpython",
490			"skia/infra/bots/assets",
491			"skia/infra/bots/run_recipe.py",
492			"skia/tools/skpbench",
493			"skia/tools/valgrind.supp",
494		},
495		Excludes: []string{rbe.ExcludeGitDir},
496	})
497	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
498		Root: "..",
499		Paths: []string{
500			"skia/.vpython",
501			"skia/go.mod",
502			"skia/go.sum",
503			"skia/infra/bots/build_task_drivers.sh",
504			"skia/infra/bots/run_recipe.py",
505			"skia/infra/bots/task_drivers",
506		},
507		Excludes: []string{rbe.ExcludeGitDir},
508	})
509	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
510		Root: "..",
511		Paths: []string{
512			"skia/.vpython",
513			"skia/infra/bots/assets",
514			"skia/infra/bots/run_recipe.py",
515			"skia/platform_tools/ios/bin",
516			"skia/resources",
517			"skia/tools/valgrind.supp",
518		},
519		Excludes: []string{rbe.ExcludeGitDir},
520	})
521	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
522		Root: "../skia", // Needed for other repos.
523		Paths: []string{
524			".vpython",
525			"resources",
526			"tools/run-wasm-gm-tests",
527		},
528		Excludes: []string{rbe.ExcludeGitDir},
529	})
530	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
531	b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{
532		Root: "..",
533		Paths: []string{
534			"skia/.vpython",
535			"skia/DEPS",
536			"skia/bin/fetch-sk",
537			"skia/infra/bots/assets/skp",
538			"skia/infra/bots/utils.py",
539			"skia/infra/config/recipes.cfg",
540			"skia/tools/skp",
541		},
542		Excludes: []string{rbe.ExcludeGitDir},
543	})
544	generateCompileCAS(b, cfg)
545
546	builder.MustFinish()
547}
548
549// getThisDirName returns the infra/bots directory which is an ancestor of this
550// file.
551func getThisDirName() string {
552	_, thisFileName, _, ok := runtime.Caller(0)
553	if !ok {
554		log.Fatal("Unable to find path to current file.")
555	}
556	return filepath.Dir(filepath.Dir(thisFileName))
557}
558
559// getCallingDirName returns the infra/bots directory which is an ancestor of
560// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
561// file appears two steps up the stack; do not call from a function which is not
562// directly called by gen_tasks.go.
563func getCallingDirName() string {
564	_, callingFileName, _, ok := runtime.Caller(2)
565	if !ok {
566		log.Fatal("Unable to find path to calling file.")
567	}
568	return filepath.Dir(callingFileName)
569}
570
571// builder is a wrapper for specs.TasksCfgBuilder.
572type builder struct {
573	*specs.TasksCfgBuilder
574	cfg           *Config
575	jobNameSchema *JobNameSchema
576	jobs          []string
577}
578
579// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
580// does by default.
581func marshalJson(data interface{}) string {
582	j, err := json.Marshal(data)
583	if err != nil {
584		log.Fatal(err)
585	}
586	return strings.Replace(string(j), "\\u003c", "<", -1)
587}
588
589// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
590// recipe bundle.
591func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
592	b.cipd(CIPD_PKG_LUCI_AUTH)
593	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
594	b.env("RECIPES_USE_PY3", "true")
595	b.envPrefixes("VPYTHON_DEFAULT_SPEC", "skia/.vpython")
596	b.usesPython()
597	b.recipeProp("swarm_out_dir", outputDir)
598	if outputDir != OUTPUT_NONE {
599		b.output(outputDir)
600	}
601	python := "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}"
602	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
603	// Most recipes want this isolate; they can override if necessary.
604	b.cas(CAS_RUN_RECIPE)
605	b.timeout(time.Hour)
606	b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
607	b.Spec.ExtraTags = map[string]string{
608		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
609	}
610
611	// Attempts.
612	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") {
613		// Sanitizers often find non-deterministic issues that retries would hide.
614		b.attempts(1)
615	} else {
616		// Retry by default to hide random bot/hardware failures.
617		b.attempts(2)
618	}
619}
620
621// kitchenTask sets up the task to run a recipe via Kitchen.
622func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
623	b.kitchenTaskNoBundle(recipe, outputDir)
624	b.dep(b.bundleRecipes())
625}
626
627// internalHardwareLabel returns the internal ID for the bot, if any.
628func (b *taskBuilder) internalHardwareLabel() *int {
629	if b.cfg.InternalHardwareLabel != nil {
630		return b.cfg.InternalHardwareLabel(b.parts)
631	}
632	return nil
633}
634
635// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
636func (b *taskBuilder) linuxGceDimensions(machineType string) {
637	b.dimension(
638		// Specify CPU to avoid running builds on bots with a more unique CPU.
639		"cpu:x86-64-Haswell_GCE",
640		"gpu:none",
641		// Currently all Linux GCE tasks run on 16-CPU machines.
642		fmt.Sprintf("machine_type:%s", machineType),
643		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
644		fmt.Sprintf("pool:%s", b.cfg.Pool),
645	)
646}
647
648// codesizeTaskNameRegexp captures the "CodeSize-<binary name>-" prefix of a CodeSize task name.
649var codesizeTaskNameRegexp = regexp.MustCompile("^CodeSize-[a-zA-Z0-9_]+-")
650
651// deriveCompileTaskName returns the name of a compile task based on the given
652// job name.
653func (b *jobBuilder) deriveCompileTaskName() string {
654	if b.role("Test", "Perf", "FM") {
655		task_os := b.parts["os"]
656		ec := []string{}
657		if val := b.parts["extra_config"]; val != "" {
658			ec = strings.Split(val, "_")
659			ignore := []string{
660				"Skpbench", "AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
661				"ReleaseAndAbandonGpuContext", "FSAA", "FAAA", "FDAA", "NativeFonts", "GDI",
662				"NoGPUThreads", "ProcDump", "DDL1", "DDL3", "OOPRDDL", "T8888",
663				"DDLTotal", "DDLRecord", "9x9", "BonusConfigs", "SkottieTracing", "SkottieWASM",
664				"GpuTess", "DMSAAStats", "Mskp", "Docker", "PDF", "SkVM", "Puppeteer",
665				"SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile", "WebGL2", "i5"}
666			keep := make([]string, 0, len(ec))
667			for _, part := range ec {
668				if !In(part, ignore) {
669					keep = append(keep, part)
670				}
671			}
672			ec = keep
673		}
674		if b.os("Android") {
675			if !In("Android", ec) {
676				ec = append([]string{"Android"}, ec...)
677			}
678			task_os = COMPILE_TASK_NAME_OS_LINUX
679		} else if b.os("ChromeOS") {
680			ec = append([]string{"Chromebook", "GLES"}, ec...)
681			task_os = COMPILE_TASK_NAME_OS_LINUX
682		} else if b.os("iOS") {
683			ec = append([]string{task_os}, ec...)
684			task_os = "Mac"
685		} else if b.matchOs("Win") {
686			task_os = "Win"
687		} else if b.compiler("GCC") {
688			// GCC compiles are now on a Docker container. We use the same OS and
689			// version to compile as to test.
690			ec = append(ec, "Docker")
691		} else if b.matchOs("Ubuntu", "Debian") {
692			task_os = COMPILE_TASK_NAME_OS_LINUX
693		} else if b.matchOs("Mac") {
694			task_os = "Mac"
695		}
696		jobNameMap := map[string]string{
697			"role":          "Build",
698			"os":            task_os,
699			"compiler":      b.parts["compiler"],
700			"target_arch":   b.parts["arch"],
701			"configuration": b.parts["configuration"],
702		}
703		if b.extraConfig("PathKit") {
704			ec = []string{"PathKit"}
705		}
706		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
707			if b.cpu() {
708				ec = []string{"CanvasKit_CPU"}
709			} else {
710				ec = []string{"CanvasKit"}
711			}
712
713		}
714		if len(ec) > 0 {
715			jobNameMap["extra_config"] = strings.Join(ec, "_")
716		}
717		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
718		if err != nil {
719			log.Fatal(err)
720		}
721		return name
722	} else if b.role("BuildStats") {
723		return strings.Replace(b.Name, "BuildStats", "Build", 1)
724	} else if b.role("CodeSize") {
725		return codesizeTaskNameRegexp.ReplaceAllString(b.Name, "Build-")
726	} else {
727		return b.Name
728	}
729}
730
731// swarmDimensions generates swarming bot dimensions for the given task.
732func (b *taskBuilder) swarmDimensions() {
733	if b.cfg.SwarmDimensions != nil {
734		dims := b.cfg.SwarmDimensions(b.parts)
735		if dims != nil {
736			b.dimension(dims...)
737			return
738		}
739	}
740	b.defaultSwarmDimensions()
741}
742
743// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
744func (b *taskBuilder) defaultSwarmDimensions() {
745	d := map[string]string{
746		"pool": b.cfg.Pool,
747	}
748	if os, ok := b.parts["os"]; ok {
749		d["os"], ok = map[string]string{
750			"Android":    "Android",
751			"ChromeOS":   "ChromeOS",
752			"Debian9":    DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
753			"Debian10":   DEFAULT_OS_LINUX_GCE,
754			"Mac":        DEFAULT_OS_MAC,
755			"Mac10.12":   "Mac-10.12",
756			"Mac10.13":   "Mac-10.13.6",
757			"Mac10.14":   "Mac-10.14",
758			"Mac10.15.1": "Mac-10.15.1",
759			"Mac10.15.7": "Mac-10.15.7", // Same as 'Mac', but explicit.
760			"Mac11":      "Mac-11.4",
761			"Mac12":      "Mac-12",
762			"Ubuntu18":   "Ubuntu-18.04",
763			"Win":        DEFAULT_OS_WIN,
764			"Win10":      "Windows-10-19044",
765			"Win2019":    DEFAULT_OS_WIN,
766			"Win8":       "Windows-8.1-SP0",
767			"iOS":        "iOS-13.3.1",
768		}[os]
769		if !ok {
770			log.Fatalf("Entry %q not found in OS mapping.", os)
771		}
772		if os == "Win10" && b.parts["model"] == "Golo" {
773			// ChOps-owned machines have Windows 10 21h1.
774			d["os"] = "Windows-10-19043"
775		}
776		if b.parts["model"] == "iPhone11" {
777			d["os"] = "iOS-13.6"
778		}
779		if b.parts["model"] == "iPadPro" {
780			d["os"] = "iOS-13.6"
781		}
782	} else {
783		d["os"] = DEFAULT_OS_DEBIAN
784	}
785	if b.role("Test", "Perf") {
786		if b.os("Android") {
787			// For Android, the device type is a better dimension
788			// than CPU or GPU.
789			deviceInfo, ok := map[string][]string{
790				"AndroidOne":      {"sprout", "MOB30Q"},
791				"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
792				"GalaxyS9":        {"starlte", "QP1A.190711.020"},     // This is Android10.
793				"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
794				"JioNext":         {"msm8937", "RKQ1.210602.002"},
795				"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
796				"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
797				"P30":             {"HWELE", "HUAWEIELE-L29"},
798				"Pixel2XL":        {"taimen", "PPR1.180610.009"},
799				"Pixel3":          {"blueline", "PQ1A.190105.004"},
800				"Pixel3a":         {"sargo", "QP1A.190711.020"},
801				"Pixel4":          {"flame", "RPB2.200611.009"},       // R Preview
802				"Pixel4a":         {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build.
803				"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
804				"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
805				"Pixel6":          {"oriole", "SD1A.210817.037"},
806				"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
807				"Wembley":         {"wembley", "SP2A.211004.001"},
808			}[b.parts["model"]]
809			if !ok {
810				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
811			}
812			d["device_type"] = deviceInfo[0]
813			d["device_os"] = deviceInfo[1]
814
815			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
816			// See https://developer.android.com/ndk/guides/hwasan.
817			if b.extraConfig("HWASAN") {
818				d["android_hwasan_build"] = "1"
819			}
820		} else if b.os("iOS") {
821			device, ok := map[string]string{
822				"iPadMini4": "iPad5,1",
823				"iPhone7":   "iPhone9,1",
824				"iPhone8":   "iPhone10,1",
825				"iPhone11":  "iPhone12,1",
826				"iPadPro":   "iPad6,3",
827			}[b.parts["model"]]
828			if !ok {
829				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
830			}
831			d["device_type"] = device
832		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
833			modelMapping, ok := map[string]map[string]string{
834				"AppleM1": {
835					"MacMini9.1": "arm64-64-Apple_M1",
836				},
837				"AVX": {
838					"VMware7.1": "x86-64",
839				},
840				"AVX2": {
841					"GCE":            "x86-64-Haswell_GCE",
842					"MacBookAir7.2":  "x86-64-i5-5350U",
843					"MacBookPro11.5": "x86-64-i7-4870HQ",
844					"MacMini7.1":     "x86-64-i5-4278U",
845					"NUC5i7RYH":      "x86-64-i7-5557U",
846				},
847				"AVX512": {
848					"GCE":  "x86-64-Skylake_GCE",
849					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
850				},
851				"Rome": {
852					"GCE": "x86-64-AMD_Rome_GCE",
853				},
854				"SwiftShader": {
855					"GCE": "x86-64-Haswell_GCE",
856				},
857			}[b.parts["cpu_or_gpu_value"]]
858			if !ok {
859				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
860			}
861			cpu, ok := modelMapping[b.parts["model"]]
862			if !ok {
863				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
864			}
865			d["cpu"] = cpu
866			if b.model("GCE") && b.matchOs("Debian") {
867				d["os"] = DEFAULT_OS_LINUX_GCE
868			}
869			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
870				d["machine_type"] = MACHINE_TYPE_MEDIUM
871			}
872		} else {
873			// It's a GPU job.
874			if b.matchOs("Win") {
875				gpu, ok := map[string]string{
876					// At some point this might use the device ID, but for now it's like Chromebooks.
877					"Adreno630":     "Adreno630",
878					"GT610":         "10de:104a-23.21.13.9101",
879					"GTX660":        "10de:11c0-26.21.14.4120",
880					"GTX960":        "10de:1401-27.21.14.5671",
881					"IntelHD4400":   "8086:0a16-20.19.15.4963",
882					"IntelIris540":  "8086:1926-26.20.100.7463",
883					"IntelIris6100": "8086:162b-20.19.15.4963",
884					"IntelIris655":  "8086:3ea5-26.20.100.7463",
885					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
886					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
887					"QuadroP400":    "10de:1cb3-30.0.15.1179",
888				}[b.parts["cpu_or_gpu_value"]]
889				if !ok {
890					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
891				}
892				d["gpu"] = gpu
893			} else if b.isLinux() {
894				gpu, ok := map[string]string{
895					// Intel drivers come from CIPD, so no need to specify the version here.
896					"IntelBayTrail": "8086:0f31",
897					"IntelHD2000":   "8086:0102",
898					"IntelHD405":    "8086:22b1",
899					"IntelIris640":  "8086:5926",
900					"QuadroP400":    "10de:1cb3-510.60.02",
901				}[b.parts["cpu_or_gpu_value"]]
902				if !ok {
903					log.Fatalf("Entry %q not found in Ubuntu GPU mapping.", b.parts["cpu_or_gpu_value"])
904				}
905				d["gpu"] = gpu
906
907				// The Debian10 machines in the skolo are 10.10, not 10.3.
908				if b.matchOs("Debian") {
909					d["os"] = DEFAULT_OS_DEBIAN
910				}
911
912			} else if b.matchOs("Mac") {
913				gpu, ok := map[string]string{
914					"AppleM1":       "AppleM1",
915					"IntelHD6000":   "8086:1626",
916					"IntelHD615":    "8086:591e",
917					"IntelIris5100": "8086:0a2e",
918					"IntelIrisPlus": "8086:8a53",
919					"RadeonHD8870M": "1002:6821-4.0.20-3.2.8",
920				}[b.parts["cpu_or_gpu_value"]]
921				if !ok {
922					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
923				}
924				if gpu == "AppleM1" {
925					// No GPU dimension yet, but we can constrain by CPU.
926					d["cpu"] = "arm64-64-Apple_M1"
927				} else {
928					d["gpu"] = gpu
929				}
930				// We have two different types of MacMini7,1 with the same GPU but different CPUs.
931				if b.gpu("IntelIris5100") {
932					if b.extraConfig("i5") {
933						// If we say "i5", run on our MacMini7,1s in the Skolo:
934						d["cpu"] = "x86-64-i5-4278U"
935					} else {
936						// Otherwise, run on Golo machines, just because that's
937						// where those jobs have always run. Plus, some of them
938						// are Perf jobs, which we want to keep consistent.
939						d["cpu"] = "x86-64-i7-4578U"
940					}
941				}
942			} else if b.os("ChromeOS") {
943				version, ok := map[string]string{
944					"IntelUHDGraphics605": "14233.0.0",
945					"RadeonVega3":         "14233.0.0",
946					"Adreno618":           "14150.39.0",
947					"MaliT860":            "14092.77.0",
948				}[b.parts["cpu_or_gpu_value"]]
949				if !ok {
950					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
951				}
952				d["gpu"] = b.parts["cpu_or_gpu_value"]
953				d["release_version"] = version
954			} else {
955				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
956			}
957		}
958	} else {
959		d["gpu"] = "none"
960		if d["os"] == DEFAULT_OS_LINUX_GCE {
961			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats", "CodeSize") {
962				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
963				return
964			}
965			// Use many-core machines for Build tasks.
966			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
967			return
968		} else if d["os"] == DEFAULT_OS_WIN {
969			// Windows CPU bots.
970			d["cpu"] = "x86-64-Haswell_GCE"
971			// Use many-core machines for Build tasks.
972			d["machine_type"] = MACHINE_TYPE_LARGE
973		} else if d["os"] == DEFAULT_OS_MAC {
974			// Mac CPU bots are no longer VMs.
975			d["cpu"] = "x86-64"
976			d["cores"] = "12"
977			delete(d, "gpu")
978		}
979	}
980
981	dims := make([]string, 0, len(d))
982	for k, v := range d {
983		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
984	}
985	sort.Strings(dims)
986	b.dimension(dims...)
987}
988
989// bundleRecipes generates the task to bundle and isolate the recipes. Returns
990// the name of the task, which may be added as a dependency.
991func (b *jobBuilder) bundleRecipes() string {
992	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
993		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
994		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
995		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
996		b.idempotent()
997		b.cas(CAS_RECIPES)
998		b.usesPython()
999		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1000	})
1001	return BUNDLE_RECIPES_NAME
1002}
1003
1004// buildTaskDrivers generates the task to compile the task driver code to run on
1005// all platforms. Returns the name of the task, which may be added as a
1006// dependency.
1007func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
1008	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
1009	b.addTask(name, func(b *taskBuilder) {
1010		b.usesGo()
1011		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
1012			specs.PLACEHOLDER_ISOLATED_OUTDIR,
1013			goos,
1014			goarch)
1015		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1016		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1017		b.idempotent()
1018		b.cas(CAS_TASK_DRIVERS)
1019	})
1020	return name
1021}
1022
1023// updateGoDeps generates the task to update Go dependencies.
1024func (b *jobBuilder) updateGoDeps() {
1025	b.addTask(b.Name, func(b *taskBuilder) {
1026		b.usesGo()
1027		b.asset("protoc")
1028		b.cmd(
1029			"./update_go_deps",
1030			"--project_id", "skia-swarming-bots",
1031			"--task_id", specs.PLACEHOLDER_TASK_ID,
1032			"--task_name", b.Name,
1033			"--workdir", ".",
1034			"--gerrit_project", "skia",
1035			"--gerrit_url", "https://skia-review.googlesource.com",
1036			"--repo", specs.PLACEHOLDER_REPO,
1037			"--revision", specs.PLACEHOLDER_REVISION,
1038			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1039			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1040			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1041		)
1042		b.dep(b.buildTaskDrivers("linux", "amd64"))
1043		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1044		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1045		b.cas(CAS_EMPTY)
1046		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1047	})
1048}
1049
1050// createDockerImage creates the specified docker image. Returns the name of the
1051// generated task.
1052func (b *jobBuilder) createDockerImage(wasm bool) string {
1053	// First, derive the name of the task.
1054	imageName := "skia-release"
1055	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1056	if wasm {
1057		imageName = "skia-wasm-release"
1058		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1059	}
1060	imageDir := path.Join("docker", imageName)
1061
1062	// Add the task.
1063	b.addTask(taskName, func(b *taskBuilder) {
1064		// TODO(borenet): Make this task not use Git.
1065		b.usesGit()
1066		b.cmd(
1067			"./build_push_docker_image",
1068			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1069			"--dockerfile_dir", imageDir,
1070			"--project_id", "skia-swarming-bots",
1071			"--task_id", specs.PLACEHOLDER_TASK_ID,
1072			"--task_name", b.Name,
1073			"--workdir", ".",
1074			"--gerrit_project", "skia",
1075			"--gerrit_url", "https://skia-review.googlesource.com",
1076			"--repo", specs.PLACEHOLDER_REPO,
1077			"--revision", specs.PLACEHOLDER_REVISION,
1078			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1079			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1080			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1081			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1082		)
1083		b.dep(b.buildTaskDrivers("linux", "amd64"))
1084		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1085		b.cas(CAS_EMPTY)
1086		b.serviceAccount(b.cfg.ServiceAccountCompile)
1087		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1088		b.usesDocker()
1089		b.cache(CACHES_DOCKER...)
1090	})
1091	return taskName
1092}
1093
1094// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1095// (eg: fiddler, api) using the skia-release docker image.
1096func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1097	b.addTask(b.Name, func(b *taskBuilder) {
1098		// TODO(borenet): Make this task not use Git.
1099		b.usesGit()
1100		b.cmd(
1101			"./push_apps_from_skia_image",
1102			"--project_id", "skia-swarming-bots",
1103			"--task_id", specs.PLACEHOLDER_TASK_ID,
1104			"--task_name", b.Name,
1105			"--workdir", ".",
1106			"--repo", specs.PLACEHOLDER_REPO,
1107			"--revision", specs.PLACEHOLDER_REVISION,
1108			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1109			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1110			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1111		)
1112		b.dep(b.buildTaskDrivers("linux", "amd64"))
1113		b.dep(b.createDockerImage(false))
1114		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "bazelisk")
1115		b.cas(CAS_EMPTY)
1116		b.cipd(b.MustGetCipdPackageFromAsset("bazelisk"))
1117		b.serviceAccount(b.cfg.ServiceAccountCompile)
1118		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1119		b.usesDocker()
1120		b.cache(CACHES_DOCKER...)
1121		b.timeout(2 * time.Hour)
1122	})
1123}
1124
1125// createPushBazelAppsFromWASMDockerImage pushes those infra apps that have been ported to Bazel
1126// and require assets built in the WASM docker image.
1127// TODO(kjlubick) The inputs to this job should not be the docker build, but a Bazel build.
1128func (b *jobBuilder) createPushBazelAppsFromWASMDockerImage() {
1129	b.addTask(b.Name, func(b *taskBuilder) {
1130		// TODO(borenet): Make this task not use Git.
1131		b.usesGit()
1132		b.cmd(
1133			"./push_bazel_apps_from_wasm_image",
1134			"--project_id", "skia-swarming-bots",
1135			"--task_id", specs.PLACEHOLDER_TASK_ID,
1136			"--task_name", b.Name,
1137			"--workdir", ".",
1138			"--skia_revision", specs.PLACEHOLDER_REVISION,
1139		)
1140		b.dep(b.buildTaskDrivers("linux", "amd64"))
1141		b.dep(b.createDockerImage(true))
1142		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "bazelisk")
1143		b.cas(CAS_EMPTY)
1144		b.cipd(b.MustGetCipdPackageFromAsset("bazelisk"))
1145		b.serviceAccount(b.cfg.ServiceAccountCompile)
1146		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1147		b.usesDocker()
1148		b.cache(CACHES_DOCKER...)
1149	})
1150}
1151
1152var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1153
1154func (b *taskBuilder) maybeAddIosDevImage() {
1155	for _, dim := range b.Spec.Dimensions {
1156		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1157			var asset string
1158			switch m[1] {
1159			// Other patch versions can be added to the same case.
1160			case "11.4.1":
1161				asset = "ios-dev-image-11.4"
1162			case "13.3.1":
1163				asset = "ios-dev-image-13.3"
1164			case "13.4.1":
1165				asset = "ios-dev-image-13.4"
1166			case "13.5.1":
1167				asset = "ios-dev-image-13.5"
1168			case "13.6":
1169				asset = "ios-dev-image-13.6"
1170			default:
1171				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1172			}
1173			b.asset(asset)
1174			break
1175		} else if strings.Contains(dim, "iOS") {
1176			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1177		}
1178	}
1179}
1180
1181// compile generates a compile task. Returns the name of the compile task.
1182func (b *jobBuilder) compile() string {
1183	name := b.deriveCompileTaskName()
1184	if b.extraConfig("WasmGMTests") {
1185		b.compileWasmGMTests(name)
1186	} else {
1187		b.addTask(name, func(b *taskBuilder) {
1188			recipe := "compile"
1189			casSpec := CAS_COMPILE
1190			if b.extraConfig("NoDEPS", "CMake", "CommandBuffer", "Flutter") {
1191				recipe = "sync_and_compile"
1192				casSpec = CAS_RUN_RECIPE
1193				b.recipeProps(EXTRA_PROPS)
1194				b.usesGit()
1195				if !b.extraConfig("NoDEPS") {
1196					b.cache(CACHES_WORKDIR...)
1197				}
1198			} else {
1199				b.idempotent()
1200			}
1201			b.kitchenTask(recipe, OUTPUT_BUILD)
1202			b.cas(casSpec)
1203			b.serviceAccount(b.cfg.ServiceAccountCompile)
1204			b.swarmDimensions()
1205			if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") {
1206				b.usesDocker()
1207				b.cache(CACHES_DOCKER...)
1208			}
1209
1210			// Android bots require a toolchain.
1211			if b.extraConfig("Android") {
1212				if b.matchOs("Mac") {
1213					b.asset("android_ndk_darwin")
1214				} else if b.matchOs("Win") {
1215					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1216					pkg.Path = "n"
1217					b.cipd(pkg)
1218				} else {
1219					b.asset("android_ndk_linux")
1220				}
1221			} else if b.extraConfig("Chromebook") {
1222				b.asset("clang_linux")
1223				if b.arch("x86_64") {
1224					b.asset("chromebook_x86_64_gles")
1225				} else if b.arch("arm") {
1226					b.asset("armhf_sysroot")
1227					b.asset("chromebook_arm_gles")
1228				}
1229			} else if b.isLinux() {
1230				if b.compiler("Clang") {
1231					b.asset("clang_linux")
1232				}
1233				if b.extraConfig("SwiftShader") {
1234					b.asset("cmake_linux")
1235				}
1236				b.asset("ccache_linux")
1237				b.usesCCache()
1238			} else if b.matchOs("Win") {
1239				b.asset("win_toolchain")
1240				if b.compiler("Clang") {
1241					b.asset("clang_win")
1242				}
1243			} else if b.matchOs("Mac") {
1244				b.cipd(CIPD_PKGS_XCODE...)
1245				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1246					Name: "xcode",
1247					Path: "cache/Xcode.app",
1248				})
1249				b.asset("ccache_mac")
1250				b.usesCCache()
1251				if b.extraConfig("CommandBuffer") {
1252					b.timeout(2 * time.Hour)
1253				}
1254				if b.extraConfig("iOS") {
1255					b.asset("provisioning_profile_ios")
1256				}
1257			}
1258		})
1259	}
1260
1261	// All compile tasks are runnable as their own Job. Assert that the Job
1262	// is listed in jobs.
1263	if !In(name, b.jobs) {
1264		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1265	}
1266
1267	return name
1268}
1269
1270// recreateSKPs generates a RecreateSKPs task.
1271func (b *jobBuilder) recreateSKPs() {
1272	cmd := []string{
1273		"./recreate_skps",
1274		"--local=false",
1275		"--project_id", "skia-swarming-bots",
1276		"--task_id", specs.PLACEHOLDER_TASK_ID,
1277		"--task_name", b.Name,
1278		"--skia_revision", specs.PLACEHOLDER_REVISION,
1279		"--patch_ref", specs.PLACEHOLDER_PATCH_REF,
1280		"--git_cache", "cache/git",
1281		"--checkout_root", "cache/work",
1282		"--dm_path", "build/dm",
1283	}
1284	if b.matchExtraConfig("DryRun") {
1285		cmd = append(cmd, "--dry_run")
1286	}
1287	b.addTask(b.Name, func(b *taskBuilder) {
1288		b.cas(CAS_RECREATE_SKPS)
1289		b.dep(b.buildTaskDrivers("linux", "amd64"))
1290		b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM.
1291		b.cmd(cmd...)
1292		b.cipd(CIPD_PKG_LUCI_AUTH)
1293		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1294		b.dimension(
1295			"pool:SkiaCT",
1296			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1297		)
1298		b.usesGo()
1299		b.cache(CACHES_WORKDIR...)
1300		b.timeout(6 * time.Hour)
1301		b.usesPython()
1302		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1303		b.attempts(2)
1304	})
1305}
1306
1307// checkGeneratedFiles verifies that no generated SKSL files have been edited
1308// by hand.
1309func (b *jobBuilder) checkGeneratedFiles() {
1310	b.addTask(b.Name, func(b *taskBuilder) {
1311		b.recipeProps(EXTRA_PROPS)
1312		b.kitchenTask("check_generated_files", OUTPUT_NONE)
1313		b.serviceAccount(b.cfg.ServiceAccountCompile)
1314		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1315		b.usesGo()
1316		b.asset("clang_linux")
1317		b.asset("ccache_linux")
1318		b.usesCCache()
1319		b.cache(CACHES_WORKDIR...)
1320	})
1321}
1322
1323// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1324func (b *jobBuilder) checkGnToBp() {
1325	b.addTask(b.Name, func(b *taskBuilder) {
1326		b.cas(CAS_COMPILE)
1327		b.dep(b.buildTaskDrivers("linux", "amd64"))
1328		b.cmd("./run_gn_to_bp",
1329			"--local=false",
1330			"--project_id", "skia-swarming-bots",
1331			"--task_id", specs.PLACEHOLDER_TASK_ID,
1332			"--task_name", b.Name,
1333		)
1334		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1335		b.usesPython()
1336		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1337	})
1338}
1339
1340// housekeeper generates a Housekeeper task.
1341func (b *jobBuilder) housekeeper() {
1342	b.addTask(b.Name, func(b *taskBuilder) {
1343		b.recipeProps(EXTRA_PROPS)
1344		b.kitchenTask("housekeeper", OUTPUT_NONE)
1345		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1346		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1347		b.usesGit()
1348		b.cache(CACHES_WORKDIR...)
1349	})
1350}
1351
1352// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1353// the name of the last task in the generated chain of tasks, which the Job
1354// should add as a dependency.
1355func (b *jobBuilder) g3FrameworkCanary() {
1356	b.addTask(b.Name, func(b *taskBuilder) {
1357		b.cas(CAS_EMPTY)
1358		b.dep(b.buildTaskDrivers("linux", "amd64"))
1359		b.cmd("./g3_canary",
1360			"--local=false",
1361			"--project_id", "skia-swarming-bots",
1362			"--task_id", specs.PLACEHOLDER_TASK_ID,
1363			"--task_name", b.Name,
1364			"--repo", specs.PLACEHOLDER_REPO,
1365			"--revision", specs.PLACEHOLDER_REVISION,
1366			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1367			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1368			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1369		)
1370		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1371		b.cipd(CIPD_PKG_LUCI_AUTH)
1372		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1373		b.timeout(3 * time.Hour)
1374		b.attempts(1)
1375	})
1376}
1377
1378// infra generates an infra_tests task.
1379func (b *jobBuilder) infra() {
1380	b.addTask(b.Name, func(b *taskBuilder) {
1381		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1382			b.dimension(
1383				// Specify CPU to avoid running builds on bots with a more unique CPU.
1384				"cpu:x86-64-Haswell_GCE",
1385				"gpu:none",
1386				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1387				fmt.Sprintf("os:%s", DEFAULT_OS_WIN),
1388				fmt.Sprintf("pool:%s", b.cfg.Pool),
1389			)
1390		} else {
1391			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1392		}
1393		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1394		b.kitchenTask("infra", OUTPUT_NONE)
1395		b.cas(CAS_WHOLE_REPO)
1396		b.serviceAccount(b.cfg.ServiceAccountCompile)
1397		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1398		b.idempotent()
1399		b.usesGo()
1400	})
1401}
1402
1403// buildstats generates a builtstats task, which compiles code and generates
1404// statistics about the build.
1405func (b *jobBuilder) buildstats() {
1406	compileTaskName := b.compile()
1407	b.addTask(b.Name, func(b *taskBuilder) {
1408		b.recipeProps(EXTRA_PROPS)
1409		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1410		b.dep(compileTaskName)
1411		b.asset("bloaty")
1412		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1413		b.usesDocker()
1414		b.usesGit()
1415		b.cache(CACHES_WORKDIR...)
1416	})
1417	// Upload release results (for tracking in perf)
1418	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1419	if b.release() && !b.arch("x86_64") {
1420		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1421		depName := b.Name
1422		b.addTask(uploadName, func(b *taskBuilder) {
1423			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1424			b.recipeProps(EXTRA_PROPS)
1425			// TODO(borenet): I'm not sure why the upload task is
1426			// using the BuildStats task name, but I've done this
1427			// to maintain existing behavior.
1428			b.Name = depName
1429			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1430			b.Name = uploadName
1431			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1432			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1433			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1434			b.dep(depName)
1435		})
1436	}
1437}
1438
1439// codesize generates a codesize task, which takes binary produced by a
1440// compile task, runs Bloaty against it, and uploads the resulting code size
1441// statistics to the GCS bucket belonging to the codesize.skia.org service.
1442func (b *jobBuilder) codesize() {
1443	compileTaskName := b.compile()
1444	bloatyCipdPkg := b.MustGetCipdPackageFromAsset("bloaty")
1445
1446	b.addTask(b.Name, func(b *taskBuilder) {
1447		b.cas(CAS_EMPTY)
1448		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1449		b.cmd("./codesize",
1450			"--local=false",
1451			"--project_id", "skia-swarming-bots",
1452			"--task_id", specs.PLACEHOLDER_TASK_ID,
1453			"--task_name", b.Name,
1454			"--compile_task_name", compileTaskName,
1455			// Note: the binary name cannot contain dashes, otherwise the naming
1456			// schema logic will partition it into multiple parts.
1457			//
1458			// If we ever need to define a CodeSize-* task for a binary with
1459			// dashes in its name (e.g. "my-binary"), a potential workaround is to
1460			// create a mapping from a new, non-dashed binary name (e.g. "my_binary")
1461			// to the actual binary name with dashes. This mapping can be hardcoded
1462			// in this function; no changes to the task driver would be necessary.
1463			"--binary_name", b.parts["binary_name"],
1464			"--bloaty_cipd_version", bloatyCipdPkg.Version,
1465			"--repo", specs.PLACEHOLDER_REPO,
1466			"--revision", specs.PLACEHOLDER_REVISION,
1467			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1468			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1469			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1470		)
1471		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1472		b.cache(CACHES_WORKDIR...)
1473		b.cipd(CIPD_PKG_LUCI_AUTH)
1474		b.asset("bloaty")
1475		b.serviceAccount("skia-external-codesize@skia-swarming-bots.iam.gserviceaccount.com")
1476		b.timeout(20 * time.Minute)
1477		b.attempts(1)
1478	})
1479}
1480
1481// doUpload indicates whether the given Job should upload its results.
1482func (b *jobBuilder) doUpload() bool {
1483	for _, s := range b.cfg.NoUpload {
1484		m, err := regexp.MatchString(s, b.Name)
1485		if err != nil {
1486			log.Fatal(err)
1487		}
1488		if m {
1489			return false
1490		}
1491	}
1492	return true
1493}
1494
1495// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1496func (b *taskBuilder) commonTestPerfAssets() {
1497	// Docker-based tests don't need the standard CIPD assets
1498	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1499		return
1500	}
1501	if b.extraConfig("Skpbench") {
1502		// Skpbench only needs skps
1503		b.asset("skp", "mskp")
1504	} else if b.os("Android", "ChromeOS", "iOS") {
1505		b.asset("skp", "svg", "skimage")
1506	} else {
1507		// for desktop machines
1508		b.asset("skimage", "skp", "svg")
1509	}
1510
1511	if b.isLinux() && b.matchExtraConfig("SAN") {
1512		b.asset("clang_linux")
1513	}
1514
1515	if b.isLinux() {
1516		if b.extraConfig("Vulkan") {
1517			b.asset("linux_vulkan_sdk")
1518		}
1519		if b.matchGpu("Intel") {
1520			b.asset("mesa_intel_driver_linux")
1521		}
1522	}
1523	if b.matchOs("Win") && b.extraConfig("ProcDump") {
1524		b.asset("procdump_win")
1525	}
1526}
1527
1528// directUpload adds prerequisites for uploading to GCS.
1529func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1530	b.recipeProp("gs_bucket", gsBucket)
1531	b.serviceAccount(serviceAccount)
1532	b.cipd(specs.CIPD_PKGS_GSUTIL...)
1533}
1534
1535// dm generates a Test task using dm.
1536func (b *jobBuilder) dm() {
1537	compileTaskName := ""
1538	// LottieWeb doesn't require anything in Skia to be compiled.
1539	if !b.extraConfig("LottieWeb") {
1540		compileTaskName = b.compile()
1541	}
1542	directUpload := false
1543	b.addTask(b.Name, func(b *taskBuilder) {
1544		cas := CAS_TEST
1545		recipe := "test"
1546		if b.extraConfig("PathKit") {
1547			cas = CAS_PATHKIT
1548			recipe = "test_pathkit"
1549			if b.doUpload() {
1550				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1551				directUpload = true
1552			}
1553		} else if b.extraConfig("CanvasKit") {
1554			cas = CAS_CANVASKIT
1555			recipe = "test_canvaskit"
1556			if b.doUpload() {
1557				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1558				directUpload = true
1559			}
1560		} else if b.extraConfig("LottieWeb") {
1561			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1562			// more of the files, especially those brought in via DEPS in the
1563			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1564			// Test.+LottieWeb is that the former pulls in the lottie build via
1565			// npm and the latter always tests at lottie's
1566			// ToT.
1567			cas = CAS_LOTTIE_CI
1568			recipe = "test_lottie_web"
1569			if b.doUpload() {
1570				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1571				directUpload = true
1572			}
1573		} else {
1574			// Default recipe supports direct upload.
1575			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1576			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1577			if b.doUpload() && !b.matchOs("Win") {
1578				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1579				directUpload = true
1580			}
1581		}
1582		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1583		b.recipeProps(EXTRA_PROPS)
1584		iid := b.internalHardwareLabel()
1585		iidStr := ""
1586		if iid != nil {
1587			iidStr = strconv.Itoa(*iid)
1588		}
1589		if recipe == "test" {
1590			b.dmFlags(iidStr)
1591		}
1592		b.kitchenTask(recipe, OUTPUT_TEST)
1593		b.cas(cas)
1594		b.swarmDimensions()
1595		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") {
1596			b.usesDocker()
1597		}
1598		if compileTaskName != "" {
1599			b.dep(compileTaskName)
1600		}
1601		if b.os("Android") && b.extraConfig("ASAN") {
1602			b.asset("android_ndk_linux")
1603		}
1604		b.commonTestPerfAssets()
1605		if b.matchExtraConfig("Lottie") {
1606			b.asset("lottie-samples")
1607		}
1608		b.expiration(20 * time.Hour)
1609
1610		b.timeout(4 * time.Hour)
1611		if b.extraConfig("Valgrind") {
1612			b.timeout(9 * time.Hour)
1613			b.expiration(48 * time.Hour)
1614			b.asset("valgrind")
1615			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1616			// to ensure there are always bots free for CQ tasks.
1617			b.dimension("valgrind:1")
1618		} else if b.extraConfig("MSAN") {
1619			b.timeout(9 * time.Hour)
1620		} else if b.arch("x86") && b.debug() {
1621			// skia:6737
1622			b.timeout(6 * time.Hour)
1623		}
1624		b.maybeAddIosDevImage()
1625	})
1626
1627	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1628	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1629	if b.doUpload() && !directUpload {
1630		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1631		depName := b.Name
1632		b.addTask(uploadName, func(b *taskBuilder) {
1633			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1634			b.recipeProps(EXTRA_PROPS)
1635			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1636			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1637			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1638			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1639			b.dep(depName)
1640		})
1641	}
1642}
1643
1644func (b *jobBuilder) fm() {
1645	goos := "linux"
1646	if strings.Contains(b.parts["os"], "Win") {
1647		goos = "windows"
1648	}
1649	if strings.Contains(b.parts["os"], "Mac") {
1650		goos = "darwin"
1651	}
1652
1653	b.addTask(b.Name, func(b *taskBuilder) {
1654		b.asset("skimage", "skp", "svg")
1655		b.cas(CAS_TEST)
1656		b.dep(b.buildTaskDrivers(goos, "amd64"), b.compile())
1657		b.cmd("./fm_driver${EXECUTABLE_SUFFIX}",
1658			"--local=false",
1659			"--resources=skia/resources",
1660			"--imgs=skimage",
1661			"--skps=skp",
1662			"--svgs=svg",
1663			"--project_id", "skia-swarming-bots",
1664			"--task_id", specs.PLACEHOLDER_TASK_ID,
1665			"--bot", b.Name,
1666			"--gold="+strconv.FormatBool(!b.matchExtraConfig("SAN")),
1667			"--gold_hashes_url", b.cfg.GoldHashesURL,
1668			"build/fm${EXECUTABLE_SUFFIX}")
1669		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1670		b.swarmDimensions()
1671		b.attempts(1)
1672
1673		if b.isLinux() && b.matchExtraConfig("SAN") {
1674			b.asset("clang_linux")
1675			// Sanitizers may want to run llvm-symbolizer for readable stack traces.
1676			b.addToPATH("clang_linux/bin")
1677
1678			// Point sanitizer builds at our prebuilt libc++ for this sanitizer.
1679			if b.extraConfig("MSAN") {
1680				// We'd see false positives in std::basic_string<char> if this weren't set.
1681				b.envPrefixes("LD_LIBRARY_PATH", "clang_linux/msan")
1682			} else if b.extraConfig("TSAN") {
1683				// Occasional false positives may crop up in the standard library without this.
1684				b.envPrefixes("LD_LIBRARY_PATH", "clang_linux/tsan")
1685			} else {
1686				// This isn't strictly required, but we usually get better sanitizer
1687				// diagnostics from libc++ than the default OS-provided libstdc++.
1688				b.envPrefixes("LD_LIBRARY_PATH", "clang_linux/lib")
1689			}
1690		}
1691	})
1692}
1693
1694// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1695// Canary-G3 does not use this path because it is very different from other autorollers.
1696func (b *jobBuilder) canary(rollerName, canaryCQKeyword, targetProjectBaseURL string) {
1697	b.addTask(b.Name, func(b *taskBuilder) {
1698		b.cas(CAS_EMPTY)
1699		b.dep(b.buildTaskDrivers("linux", "amd64"))
1700		b.cmd("./canary",
1701			"--local=false",
1702			"--project_id", "skia-swarming-bots",
1703			"--task_id", specs.PLACEHOLDER_TASK_ID,
1704			"--task_name", b.Name,
1705			"--roller_name", rollerName,
1706			"--cq_keyword", canaryCQKeyword,
1707			"--target_project_base_url", targetProjectBaseURL,
1708			"--repo", specs.PLACEHOLDER_REPO,
1709			"--revision", specs.PLACEHOLDER_REVISION,
1710			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1711			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1712			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1713		)
1714		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1715		b.cipd(CIPD_PKG_LUCI_AUTH)
1716		b.serviceAccount(b.cfg.ServiceAccountCanary)
1717		b.timeout(3 * time.Hour)
1718		b.attempts(1)
1719	})
1720}
1721
1722// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1723// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1724func (b *jobBuilder) puppeteer() {
1725	compileTaskName := b.compile()
1726	b.addTask(b.Name, func(b *taskBuilder) {
1727		b.defaultSwarmDimensions()
1728		b.usesNode()
1729		b.cipd(CIPD_PKG_LUCI_AUTH)
1730		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1731		b.output(OUTPUT_PERF)
1732		b.timeout(60 * time.Minute)
1733		b.cas(CAS_PUPPETEER)
1734		b.serviceAccount(b.cfg.ServiceAccountCompile)
1735
1736		webglversion := "2"
1737		if b.extraConfig("WebGL1") {
1738			webglversion = "1"
1739		}
1740
1741		if b.extraConfig("SkottieFrames") {
1742			b.cmd(
1743				"./perf_puppeteer_skottie_frames",
1744				"--project_id", "skia-swarming-bots",
1745				"--git_hash", specs.PLACEHOLDER_REVISION,
1746				"--task_id", specs.PLACEHOLDER_TASK_ID,
1747				"--task_name", b.Name,
1748				"--canvaskit_bin_path", "./build",
1749				"--lotties_path", "./lotties_with_assets",
1750				"--node_bin_path", "./node/node/bin",
1751				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1752				"--output_path", OUTPUT_PERF,
1753				"--os_trace", b.parts["os"],
1754				"--model_trace", b.parts["model"],
1755				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1756				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1757				"--webgl_version", webglversion, // ignore when running with cpu backend
1758			)
1759			// This CIPD package was made by hand with the following invocation:
1760			//   cipd create -name skia/internal/lotties_with_assets -in ./lotties/ -tag version:0
1761			//   cipd acl-edit skia/internal/lotties_with_assets -reader group:project-skia-external-task-accounts
1762			//   cipd acl-edit skia/internal/lotties_with_assets -reader user:pool-skia@chromium-swarm.iam.gserviceaccount.com
1763			// Where lotties is a hand-selected set of lottie animations and (optionally) assets used in
1764			// them (e.g. fonts, images).
1765			b.cipd(&specs.CipdPackage{
1766				Name:    "skia/internal/lotties_with_assets",
1767				Path:    "lotties_with_assets",
1768				Version: "version:1",
1769			})
1770		} else if b.extraConfig("RenderSKP") {
1771			b.cmd(
1772				"./perf_puppeteer_render_skps",
1773				"--project_id", "skia-swarming-bots",
1774				"--git_hash", specs.PLACEHOLDER_REVISION,
1775				"--task_id", specs.PLACEHOLDER_TASK_ID,
1776				"--task_name", b.Name,
1777				"--canvaskit_bin_path", "./build",
1778				"--skps_path", "./skp",
1779				"--node_bin_path", "./node/node/bin",
1780				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1781				"--output_path", OUTPUT_PERF,
1782				"--os_trace", b.parts["os"],
1783				"--model_trace", b.parts["model"],
1784				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1785				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1786				"--webgl_version", webglversion,
1787			)
1788			b.asset("skp")
1789		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1790			b.cmd(
1791				"./perf_puppeteer_canvas",
1792				"--project_id", "skia-swarming-bots",
1793				"--git_hash", specs.PLACEHOLDER_REVISION,
1794				"--task_id", specs.PLACEHOLDER_TASK_ID,
1795				"--task_name", b.Name,
1796				"--canvaskit_bin_path", "./build",
1797				"--node_bin_path", "./node/node/bin",
1798				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1799				"--output_path", OUTPUT_PERF,
1800				"--os_trace", b.parts["os"],
1801				"--model_trace", b.parts["model"],
1802				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1803				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1804				"--webgl_version", webglversion,
1805			)
1806			b.asset("skp")
1807		}
1808
1809	})
1810
1811	// Upload results to Perf after.
1812	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1813	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1814	depName := b.Name
1815	b.addTask(uploadName, func(b *taskBuilder) {
1816		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1817		b.recipeProps(EXTRA_PROPS)
1818		// TODO(borenet): I'm not sure why the upload task is
1819		// using the Perf task name, but I've done this to
1820		// maintain existing behavior.
1821		b.Name = depName
1822		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1823		b.Name = uploadName
1824		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1825		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1826		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1827		b.dep(depName)
1828	})
1829}
1830
1831// perf generates a Perf task.
1832func (b *jobBuilder) perf() {
1833	compileTaskName := ""
1834	// LottieWeb doesn't require anything in Skia to be compiled.
1835	if !b.extraConfig("LottieWeb") {
1836		compileTaskName = b.compile()
1837	}
1838	doUpload := b.release() && b.doUpload()
1839	b.addTask(b.Name, func(b *taskBuilder) {
1840		recipe := "perf"
1841		cas := CAS_PERF
1842		if b.extraConfig("Skpbench") {
1843			recipe = "skpbench"
1844			cas = CAS_SKPBENCH
1845		} else if b.extraConfig("PathKit") {
1846			cas = CAS_PATHKIT
1847			recipe = "perf_pathkit"
1848		} else if b.extraConfig("CanvasKit") {
1849			cas = CAS_CANVASKIT
1850			recipe = "perf_canvaskit"
1851		} else if b.extraConfig("SkottieTracing") {
1852			recipe = "perf_skottietrace"
1853		} else if b.extraConfig("SkottieWASM") {
1854			recipe = "perf_skottiewasm_lottieweb"
1855			cas = CAS_SKOTTIE_WASM
1856		} else if b.extraConfig("LottieWeb") {
1857			recipe = "perf_skottiewasm_lottieweb"
1858			cas = CAS_LOTTIE_WEB
1859		}
1860		b.recipeProps(EXTRA_PROPS)
1861		if recipe == "perf" {
1862			b.nanobenchFlags(doUpload)
1863		} else if recipe == "skpbench" {
1864			b.skpbenchFlags()
1865		}
1866		b.kitchenTask(recipe, OUTPUT_PERF)
1867		b.cas(cas)
1868		b.swarmDimensions()
1869		if b.extraConfig("CanvasKit", "Docker", "PathKit") {
1870			b.usesDocker()
1871		}
1872		if compileTaskName != "" {
1873			b.dep(compileTaskName)
1874		}
1875		b.commonTestPerfAssets()
1876		b.expiration(20 * time.Hour)
1877		b.timeout(4 * time.Hour)
1878
1879		if b.extraConfig("Valgrind") {
1880			b.timeout(9 * time.Hour)
1881			b.expiration(48 * time.Hour)
1882			b.asset("valgrind")
1883			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1884			// to ensure there are always bots free for CQ tasks.
1885			b.dimension("valgrind:1")
1886		} else if b.extraConfig("MSAN") {
1887			b.timeout(9 * time.Hour)
1888		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
1889			// skia:6737
1890			b.timeout(6 * time.Hour)
1891		} else if b.extraConfig("LottieWeb", "SkottieWASM") {
1892			b.asset("node", "lottie-samples")
1893		} else if b.matchExtraConfig("Skottie") {
1894			b.asset("lottie-samples")
1895		}
1896
1897		if b.os("Android") && b.cpu() {
1898			b.asset("text_blob_traces")
1899		}
1900		b.maybeAddIosDevImage()
1901
1902		iid := b.internalHardwareLabel()
1903		if iid != nil {
1904			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
1905		}
1906	})
1907
1908	// Upload results if necessary.
1909	if doUpload {
1910		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1911		depName := b.Name
1912		b.addTask(uploadName, func(b *taskBuilder) {
1913			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1914			b.recipeProps(EXTRA_PROPS)
1915			// TODO(borenet): I'm not sure why the upload task is
1916			// using the Perf task name, but I've done this to
1917			// maintain existing behavior.
1918			b.Name = depName
1919			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1920			b.Name = uploadName
1921			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1922			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1923			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1924			b.dep(depName)
1925		})
1926	}
1927}
1928
1929// presubmit generates a task which runs the presubmit for this repo.
1930func (b *jobBuilder) presubmit() {
1931	b.addTask(b.Name, func(b *taskBuilder) {
1932		b.recipeProps(map[string]string{
1933			"category":         "cq",
1934			"patch_gerrit_url": "https://skia-review.googlesource.com",
1935			"patch_project":    "skia",
1936			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
1937			"reason":           "CQ",
1938			"repo_name":        "skia",
1939		})
1940		b.recipeProps(EXTRA_PROPS)
1941		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
1942		b.cas(CAS_RUN_RECIPE)
1943		b.serviceAccount(b.cfg.ServiceAccountCompile)
1944		// Use MACHINE_TYPE_LARGE because it seems to save time versus
1945		// MEDIUM and we want presubmit to be fast.
1946		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1947		b.usesGit()
1948		b.cipd(&specs.CipdPackage{
1949			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
1950			Path:    "recipe_bundle",
1951			Version: "git_revision:1a28cb094add070f4beefd052725223930d8c27a",
1952		})
1953	})
1954}
1955
1956// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1957// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1958// former anyway.
1959func (b *jobBuilder) compileWasmGMTests(compileName string) {
1960	b.addTask(compileName, func(b *taskBuilder) {
1961		b.attempts(1)
1962		b.usesDocker()
1963		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1964		b.cipd(CIPD_PKG_LUCI_AUTH)
1965		b.dep(b.buildTaskDrivers("linux", "amd64"))
1966		b.output("wasm_out")
1967		b.timeout(60 * time.Minute)
1968		b.cas(CAS_COMPILE)
1969		b.serviceAccount(b.cfg.ServiceAccountCompile)
1970		b.cache(CACHES_DOCKER...)
1971		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
1972		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
1973		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
1974		// even bother.
1975		b.cmd(
1976			"./compile_wasm_gm_tests",
1977			"--project_id", "skia-swarming-bots",
1978			"--task_id", specs.PLACEHOLDER_TASK_ID,
1979			"--task_name", compileName,
1980			"--out_path", "./wasm_out",
1981			"--skia_path", "./skia",
1982			"--work_path", "./cache/docker/wasm_gm",
1983		)
1984	})
1985}
1986
1987// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1988// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1989// former anyway.
1990func (b *jobBuilder) runWasmGMTests() {
1991	compileTaskName := b.compile()
1992
1993	b.addTask(b.Name, func(b *taskBuilder) {
1994		b.attempts(1)
1995		b.usesNode()
1996		b.swarmDimensions()
1997		b.cipd(CIPD_PKG_LUCI_AUTH)
1998		b.cipd(CIPD_PKGS_GOLDCTL...)
1999		b.dep(b.buildTaskDrivers("linux", "amd64"))
2000		b.dep(compileTaskName)
2001		b.timeout(60 * time.Minute)
2002		b.cas(CAS_WASM_GM)
2003		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
2004		b.cmd(
2005			"./run_wasm_gm_tests",
2006			"--project_id", "skia-swarming-bots",
2007			"--task_id", specs.PLACEHOLDER_TASK_ID,
2008			"--task_name", b.Name,
2009			"--test_harness_path", "./tools/run-wasm-gm-tests",
2010			"--built_path", "./wasm_out",
2011			"--node_bin_path", "./node/node/bin",
2012			"--resource_path", "./resources",
2013			"--work_path", "./wasm_gm/work",
2014			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
2015			"--gold_hashes_url", b.cfg.GoldHashesURL,
2016			"--git_commit", specs.PLACEHOLDER_REVISION,
2017			"--changelist_id", specs.PLACEHOLDER_ISSUE,
2018			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
2019			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
2020			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
2021			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
2022			"--gold_key", "alpha_type:Premul",
2023			"--gold_key", "arch:wasm",
2024			"--gold_key", "browser:Chrome",
2025			"--gold_key", "color_depth:8888",
2026			"--gold_key", "config:gles",
2027			"--gold_key", "configuration:Release",
2028			"--gold_key", "cpu_or_gpu_value:QuadroP400",
2029			"--gold_key", "model:Golo",
2030			"--gold_key", "os:Ubuntu18",
2031		)
2032	})
2033}
2034