• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28)
29
30const (
31	CAS_CANVASKIT     = "canvaskit"
32	CAS_COMPILE       = "compile"
33	CAS_EMPTY         = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
34	CAS_LOTTIE_CI     = "lottie-ci"
35	CAS_LOTTIE_WEB    = "lottie-web"
36	CAS_PATHKIT       = "pathkit"
37	CAS_PERF          = "perf"
38	CAS_PUPPETEER     = "puppeteer"
39	CAS_RUN_RECIPE    = "run-recipe"
40	CAS_RECIPES       = "recipes"
41	CAS_RECREATE_SKPS = "recreate-skps"
42	CAS_SKOTTIE_WASM  = "skottie-wasm"
43	CAS_SKPBENCH      = "skpbench"
44	CAS_TASK_DRIVERS  = "task-drivers"
45	CAS_TEST          = "test"
46	CAS_WASM_GM       = "wasm-gm"
47	CAS_WHOLE_REPO    = "whole-repo"
48
49	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
50	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
51	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
52	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
53	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
54	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
55	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
56	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
57	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
58	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
59
60	DEFAULT_OS_DEBIAN              = "Debian-10.10"
61	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
62	OLD_OS_LINUX_GCE               = "Debian-9.8"
63	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
64	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
65	DEFAULT_OS_MAC                 = "Mac-10.15.7"
66	DEFAULT_OS_WIN                 = "Windows-Server-17763"
67
68	// Small is a 2-core machine.
69	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
70	MACHINE_TYPE_SMALL = "n1-highmem-2"
71	// Medium is a 16-core machine
72	MACHINE_TYPE_MEDIUM = "n1-standard-16"
73	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
74	// any of our tasks.)
75	MACHINE_TYPE_LARGE = "n1-highcpu-64"
76
77	// Swarming output dirs.
78	OUTPUT_NONE  = "output_ignored" // This will result in outputs not being isolated.
79	OUTPUT_BUILD = "build"
80	OUTPUT_TEST  = "test"
81	OUTPUT_PERF  = "perf"
82
83	// Name prefix for upload jobs.
84	PREFIX_UPLOAD = "Upload"
85)
86
87var (
88	// "Constants"
89
90	// Named caches used by tasks.
91	CACHES_GIT = []*specs.Cache{
92		{
93			Name: "git",
94			Path: "cache/git",
95		},
96		{
97			Name: "git_cache",
98			Path: "cache/git_cache",
99		},
100	}
101	CACHES_GO = []*specs.Cache{
102		{
103			Name: "go_cache",
104			Path: "cache/go_cache",
105		},
106		{
107			Name: "gopath",
108			Path: "cache/gopath",
109		},
110	}
111	CACHES_WORKDIR = []*specs.Cache{
112		{
113			Name: "work",
114			Path: "cache/work",
115		},
116	}
117	CACHES_CCACHE = []*specs.Cache{
118		{
119			Name: "ccache",
120			Path: "cache/ccache",
121		},
122	}
123	// The "docker" cache is used as a persistent working directory for
124	// tasks which use Docker. It is not to be confused with Docker's own
125	// cache, which stores images. We do not currently use a named Swarming
126	// cache for the latter.
127	// TODO(borenet): We should ensure that any task which uses Docker does
128	// not also use the normal "work" cache, to prevent issues like
129	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
130	CACHES_DOCKER = []*specs.Cache{
131		{
132			Name: "docker",
133			Path: "cache/docker",
134		},
135	}
136
137	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
138	// lottie-ci.  This is global so that it can be overridden by other
139	// repositories which import this file.
140	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
141		Root: "..",
142		Paths: []string{
143			"skia/infra/bots/run_recipe.py",
144			"skia/infra/lottiecap",
145			"skia/tools/lottie-web-perf",
146			"skia/tools/lottiecap",
147		},
148		Excludes: []string{rbe.ExcludeGitDir},
149	}
150
151	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
152	// global so that it can be overridden by other repositories which import
153	// this file.
154	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
155		Root:     "..",
156		Paths:    []string{"skia"},
157		Excludes: []string{rbe.ExcludeGitDir},
158	}
159
160	// TODO(borenet): This hacky and bad.
161	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
162
163	CIPD_PKGS_GOLDCTL = []*specs.CipdPackage{cipd.MustGetPackage("skia/tools/goldctl/${platform}")}
164
165	CIPD_PKGS_XCODE = []*specs.CipdPackage{
166		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
167		// This package is really just an installer for XCode.
168		{
169			Name: "infra/tools/mac_toolchain/${platform}",
170			Path: "mac_toolchain",
171			// When this is updated, also update
172			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
173			Version: "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a",
174		},
175	}
176
177	// These properties are required by some tasks, eg. for running
178	// bot_update, but they prevent de-duplication, so they should only be
179	// used where necessary.
180	EXTRA_PROPS = map[string]string{
181		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
182		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
183		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
184		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
185		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
186		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
187		"repository":           specs.PLACEHOLDER_REPO,
188		"revision":             specs.PLACEHOLDER_REVISION,
189		"task_id":              specs.PLACEHOLDER_TASK_ID,
190	}
191
192	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
193	// for how the CIPD package should be installed for a given task.
194	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
195		"gcloud_linux": {
196			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
197			path:           "gcloud_linux",
198		},
199		"skimage": {
200			uploadTaskName: ISOLATE_SKIMAGE_NAME,
201			path:           "skimage",
202		},
203		"skp": {
204			uploadTaskName: ISOLATE_SKP_NAME,
205			path:           "skp",
206		},
207		"svg": {
208			uploadTaskName: ISOLATE_SVG_NAME,
209			path:           "svg",
210		},
211		"mskp": {
212			uploadTaskName: ISOLATE_MSKP_NAME,
213			path:           "mskp",
214		},
215		"android_ndk_linux": {
216			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
217			path:           "android_ndk_linux",
218		},
219		"android_sdk_linux": {
220			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
221			path:           "android_sdk_linux",
222		},
223		"win_toolchain": {
224			alwaysIsolate:  true,
225			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
226			path:           "win_toolchain",
227		},
228	}
229
230	// Set dontReduceOpsTaskSplitting option on these models
231	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
232		"NUC5PPYH",
233	}
234)
235
236// Config contains general configuration information.
237type Config struct {
238	// Directory containing assets. Assumed to be relative to the directory
239	// which contains the calling gen_tasks.go file. If not specified, uses
240	// the infra/bots/assets from this repo.
241	AssetsDir string `json:"assets_dir"`
242
243	// Path to the builder name schema JSON file. Assumed to be relative to
244	// the directory which contains the calling gen_tasks.go file. If not
245	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
246	// from this repo.
247	BuilderNameSchemaFile string `json:"builder_name_schema"`
248
249	// URL of the Skia Gold known hashes endpoint.
250	GoldHashesURL string `json:"gold_hashes_url"`
251
252	// GCS bucket used for GM results.
253	GsBucketGm string `json:"gs_bucket_gm"`
254
255	// GCS bucket used for Nanobench results.
256	GsBucketNano string `json:"gs_bucket_nano"`
257
258	// Optional function which returns a bot ID for internal devices.
259	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
260
261	// List of task names for which we'll never upload results.
262	NoUpload []string `json:"no_upload"`
263
264	// PathToSkia is the relative path from the root of the current checkout to
265	// the root of the Skia checkout.
266	PathToSkia string `json:"path_to_skia"`
267
268	// Swarming pool used for triggering tasks.
269	Pool string `json:"pool"`
270
271	// LUCI project associated with this repo.
272	Project string `json:"project"`
273
274	// Service accounts.
275	ServiceAccountCanary       string `json:"service_account_canary"`
276	ServiceAccountCompile      string `json:"service_account_compile"`
277	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
278	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
279	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
280	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
281	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
282
283	// Optional override function which derives Swarming bot dimensions
284	// from parts of task names.
285	SwarmDimensions func(parts map[string]string) []string `json:"-"`
286}
287
288// JobInfo is the type of each entry in the jobs.json file.
289type JobInfo struct {
290	// The name of the job.
291	Name string `json:"name"`
292
293	// The optinal CQ config of this job. If the CQ config is missing then the
294	// job will not be added to the CQ of this branch.
295	CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"`
296}
297
298// LoadConfig loads the Config from a cfg.json file which is the sibling of the
299// calling gen_tasks.go file.
300func LoadConfig() *Config {
301	cfgDir := getCallingDirName()
302	var cfg Config
303	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
304	return &cfg
305}
306
307// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
308// caller from needing a dependency on the specs package.
309func CheckoutRoot() string {
310	root, err := specs.GetCheckoutRoot()
311	if err != nil {
312		log.Fatal(err)
313	}
314	return root
315}
316
317// LoadJson loads JSON from the given file and unmarshals it into the given
318// destination.
319func LoadJson(filename string, dest interface{}) {
320	b, err := ioutil.ReadFile(filename)
321	if err != nil {
322		log.Fatalf("Unable to read %q: %s", filename, err)
323	}
324	if err := json.Unmarshal(b, dest); err != nil {
325		log.Fatalf("Unable to parse %q: %s", filename, err)
326	}
327}
328
329// In returns true if |s| is *in* |a| slice.
330// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
331// huge set of additional dependencies added by that package.
332func In(s string, a []string) bool {
333	for _, x := range a {
334		if x == s {
335			return true
336		}
337	}
338	return false
339}
340
341// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
342// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
343// is similarly loaded from a cfg.json file which is the sibling of the calling
344// gen_tasks.go file.
345func GenTasks(cfg *Config) {
346	b := specs.MustNewTasksCfgBuilder()
347
348	// Find the paths to the infra/bots directories in this repo and the
349	// repo of the calling file.
350	relpathTargetDir := getThisDirName()
351	relpathBaseDir := getCallingDirName()
352
353	// Parse jobs.json.
354	var jobsWithInfo []*JobInfo
355	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo)
356	// Create a slice with only job names.
357	jobs := []string{}
358	for _, j := range jobsWithInfo {
359		jobs = append(jobs, j.Name)
360	}
361
362	if cfg == nil {
363		cfg = new(Config)
364		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
365	}
366
367	// Create the JobNameSchema.
368	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
369	if cfg.BuilderNameSchemaFile != "" {
370		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
371	}
372	schema, err := NewJobNameSchema(builderNameSchemaFile)
373	if err != nil {
374		log.Fatal(err)
375	}
376
377	// Set the assets dir.
378	assetsDir := filepath.Join(relpathTargetDir, "assets")
379	if cfg.AssetsDir != "" {
380		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
381	}
382	b.SetAssetsDir(assetsDir)
383
384	// Create Tasks and Jobs.
385	builder := &builder{
386		TasksCfgBuilder: b,
387		cfg:             cfg,
388		jobNameSchema:   schema,
389		jobs:            jobs,
390	}
391	for _, j := range jobsWithInfo {
392		jb := newJobBuilder(builder, j.Name)
393		jb.genTasksForJob()
394		jb.finish()
395
396		// Add the CQ spec if it is a CQ job.
397		if j.CQConfig != nil {
398			b.MustAddCQJob(j.Name, j.CQConfig)
399		}
400	}
401
402	// Create CasSpecs.
403	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
404		Root: "..",
405		Paths: []string{
406			"skia/infra/bots/run_recipe.py",
407			"skia/infra/canvaskit",
408			"skia/modules/canvaskit",
409			"skia/modules/pathkit/perf/perfReporter.js",
410			"skia/modules/pathkit/tests/testReporter.js",
411		},
412		Excludes: []string{rbe.ExcludeGitDir},
413	})
414	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
415	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
416	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
417		Root: "..",
418		Paths: []string{
419			"skia/infra/bots/run_recipe.py",
420			"skia/tools/lottie-web-perf",
421		},
422		Excludes: []string{rbe.ExcludeGitDir},
423	})
424	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
425		Root: "..",
426		Paths: []string{
427			"skia/infra/bots/run_recipe.py",
428			"skia/infra/pathkit",
429			"skia/modules/pathkit",
430		},
431		Excludes: []string{rbe.ExcludeGitDir},
432	})
433	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
434		Root: "..",
435		Paths: []string{
436			"skia/infra/bots/assets",
437			"skia/infra/bots/run_recipe.py",
438			"skia/platform_tools/ios/bin",
439			"skia/resources",
440			"skia/tools/valgrind.supp",
441		},
442		Excludes: []string{rbe.ExcludeGitDir},
443	})
444	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
445		Root: "../skia", // Needed for other repos.
446		Paths: []string{
447			"tools/perf-canvaskit-puppeteer",
448		},
449		Excludes: []string{rbe.ExcludeGitDir},
450	})
451	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
452		Root: "..",
453		Paths: []string{
454			"skia/infra/config/recipes.cfg",
455			"skia/infra/bots/bundle_recipes.sh",
456			"skia/infra/bots/README.recipes.md",
457			"skia/infra/bots/recipe_modules",
458			"skia/infra/bots/recipes",
459			"skia/infra/bots/recipes.py",
460		},
461		Excludes: []string{rbe.ExcludeGitDir},
462	})
463	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
464		Root: "..",
465		Paths: []string{
466			"skia/infra/bots/run_recipe.py",
467		},
468		Excludes: []string{rbe.ExcludeGitDir},
469	})
470	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
471		Root: "..",
472		Paths: []string{
473			"skia/infra/bots/run_recipe.py",
474			"skia/tools/skottie-wasm-perf",
475		},
476		Excludes: []string{rbe.ExcludeGitDir},
477	})
478	b.MustAddCasSpec(CAS_SKPBENCH, &specs.CasSpec{
479		Root: "..",
480		Paths: []string{
481			"skia/infra/bots/assets",
482			"skia/infra/bots/run_recipe.py",
483			"skia/tools/skpbench",
484			"skia/tools/valgrind.supp",
485		},
486		Excludes: []string{rbe.ExcludeGitDir},
487	})
488	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
489		Root: "..",
490		Paths: []string{
491			"skia/go.mod",
492			"skia/go.sum",
493			"skia/infra/bots/build_task_drivers.sh",
494			"skia/infra/bots/run_recipe.py",
495			"skia/infra/bots/task_drivers",
496		},
497		Excludes: []string{rbe.ExcludeGitDir},
498	})
499	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
500		Root: "..",
501		Paths: []string{
502			"skia/infra/bots/assets",
503			"skia/infra/bots/run_recipe.py",
504			"skia/platform_tools/ios/bin",
505			"skia/resources",
506			"skia/tools/valgrind.supp",
507		},
508		Excludes: []string{rbe.ExcludeGitDir},
509	})
510	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
511		Root: "../skia", // Needed for other repos.
512		Paths: []string{
513			"resources",
514			"tools/run-wasm-gm-tests",
515		},
516		Excludes: []string{rbe.ExcludeGitDir},
517	})
518	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
519	b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{
520		Root: "..",
521		Paths: []string{
522			"skia/DEPS",
523			"skia/bin/fetch-sk",
524			"skia/infra/bots/assets/skp",
525			"skia/infra/bots/utils.py",
526			"skia/infra/config/recipes.cfg",
527			"skia/tools/skp",
528		},
529		Excludes: []string{rbe.ExcludeGitDir},
530	})
531	generateCompileCAS(b, cfg)
532
533	builder.MustFinish()
534}
535
536// getThisDirName returns the infra/bots directory which is an ancestor of this
537// file.
538func getThisDirName() string {
539	_, thisFileName, _, ok := runtime.Caller(0)
540	if !ok {
541		log.Fatal("Unable to find path to current file.")
542	}
543	return filepath.Dir(filepath.Dir(thisFileName))
544}
545
546// getCallingDirName returns the infra/bots directory which is an ancestor of
547// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
548// file appears two steps up the stack; do not call from a function which is not
549// directly called by gen_tasks.go.
550func getCallingDirName() string {
551	_, callingFileName, _, ok := runtime.Caller(2)
552	if !ok {
553		log.Fatal("Unable to find path to calling file.")
554	}
555	return filepath.Dir(callingFileName)
556}
557
558// builder is a wrapper for specs.TasksCfgBuilder.
559type builder struct {
560	*specs.TasksCfgBuilder
561	cfg           *Config
562	jobNameSchema *JobNameSchema
563	jobs          []string
564}
565
566// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
567// does by default.
568func marshalJson(data interface{}) string {
569	j, err := json.Marshal(data)
570	if err != nil {
571		log.Fatal(err)
572	}
573	return strings.Replace(string(j), "\\u003c", "<", -1)
574}
575
576// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
577// recipe bundle.
578func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
579	b.cipd(CIPD_PKG_LUCI_AUTH)
580	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
581	b.usesPython()
582	b.recipeProp("swarm_out_dir", outputDir)
583	if outputDir != OUTPUT_NONE {
584		b.output(outputDir)
585	}
586	python := "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}"
587	if b.role("Test", "Perf") && b.matchOs("Win7") && b.matchModel("Golo") {
588		// TODO(borenet): Win7 machines in the Golo seem to be missing a
589		// necessary DLL to make python3 work.
590		python = "cipd_bin_packages/vpython"
591	}
592	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
593	// Most recipes want this isolate; they can override if necessary.
594	b.cas(CAS_RUN_RECIPE)
595	b.timeout(time.Hour)
596	b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
597	b.Spec.ExtraTags = map[string]string{
598		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
599	}
600
601	// Attempts.
602	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") {
603		// Sanitizers often find non-deterministic issues that retries would hide.
604		b.attempts(1)
605	} else {
606		// Retry by default to hide random bot/hardware failures.
607		b.attempts(2)
608	}
609}
610
611// kitchenTask sets up the task to run a recipe via Kitchen.
612func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
613	b.kitchenTaskNoBundle(recipe, outputDir)
614	b.dep(b.bundleRecipes())
615}
616
617// internalHardwareLabel returns the internal ID for the bot, if any.
618func (b *taskBuilder) internalHardwareLabel() *int {
619	if b.cfg.InternalHardwareLabel != nil {
620		return b.cfg.InternalHardwareLabel(b.parts)
621	}
622	return nil
623}
624
625// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
626func (b *taskBuilder) linuxGceDimensions(machineType string) {
627	b.dimension(
628		// Specify CPU to avoid running builds on bots with a more unique CPU.
629		"cpu:x86-64-Haswell_GCE",
630		"gpu:none",
631		// Currently all Linux GCE tasks run on 16-CPU machines.
632		fmt.Sprintf("machine_type:%s", machineType),
633		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
634		fmt.Sprintf("pool:%s", b.cfg.Pool),
635	)
636}
637
638// deriveCompileTaskName returns the name of a compile task based on the given
639// job name.
640func (b *jobBuilder) deriveCompileTaskName() string {
641	if b.role("Test", "Perf", "FM") {
642		task_os := b.parts["os"]
643		ec := []string{}
644		if val := b.parts["extra_config"]; val != "" {
645			ec = strings.Split(val, "_")
646			ignore := []string{
647				"Skpbench", "AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
648				"ReleaseAndAbandonGpuContext", "FSAA", "FAAA", "FDAA", "NativeFonts", "GDI",
649				"NoGPUThreads", "ProcDump", "DDL1", "DDL3", "OOPRDDL", "T8888",
650				"DDLTotal", "DDLRecord", "9x9", "BonusConfigs", "SkottieTracing", "SkottieWASM",
651				"GpuTess", "DMSAAStats", "Mskp", "Docker", "PDF", "SkVM", "Puppeteer",
652				"SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile", "WebGL2"}
653			keep := make([]string, 0, len(ec))
654			for _, part := range ec {
655				if !In(part, ignore) {
656					keep = append(keep, part)
657				}
658			}
659			ec = keep
660		}
661		if b.os("Android") {
662			if !In("Android", ec) {
663				ec = append([]string{"Android"}, ec...)
664			}
665			task_os = COMPILE_TASK_NAME_OS_LINUX
666		} else if b.os("ChromeOS") {
667			ec = append([]string{"Chromebook", "GLES"}, ec...)
668			task_os = COMPILE_TASK_NAME_OS_LINUX
669		} else if b.os("iOS") {
670			ec = append([]string{task_os}, ec...)
671			task_os = "Mac"
672		} else if b.matchOs("Win") {
673			task_os = "Win"
674		} else if b.compiler("GCC") {
675			// GCC compiles are now on a Docker container. We use the same OS and
676			// version to compile as to test.
677			ec = append(ec, "Docker")
678		} else if b.matchOs("Ubuntu", "Debian") {
679			task_os = COMPILE_TASK_NAME_OS_LINUX
680		} else if b.matchOs("Mac") {
681			task_os = "Mac"
682		}
683		jobNameMap := map[string]string{
684			"role":          "Build",
685			"os":            task_os,
686			"compiler":      b.parts["compiler"],
687			"target_arch":   b.parts["arch"],
688			"configuration": b.parts["configuration"],
689		}
690		if b.extraConfig("PathKit") {
691			ec = []string{"PathKit"}
692		}
693		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
694			if b.cpu() {
695				ec = []string{"CanvasKit_CPU"}
696			} else {
697				ec = []string{"CanvasKit"}
698			}
699
700		}
701		if len(ec) > 0 {
702			jobNameMap["extra_config"] = strings.Join(ec, "_")
703		}
704		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
705		if err != nil {
706			log.Fatal(err)
707		}
708		return name
709	} else if b.parts["role"] == "BuildStats" {
710		return strings.Replace(b.Name, "BuildStats", "Build", 1)
711	} else {
712		return b.Name
713	}
714}
715
716// swarmDimensions generates swarming bot dimensions for the given task.
717func (b *taskBuilder) swarmDimensions() {
718	if b.cfg.SwarmDimensions != nil {
719		dims := b.cfg.SwarmDimensions(b.parts)
720		if dims != nil {
721			b.dimension(dims...)
722			return
723		}
724	}
725	b.defaultSwarmDimensions()
726}
727
728// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
729func (b *taskBuilder) defaultSwarmDimensions() {
730	d := map[string]string{
731		"pool": b.cfg.Pool,
732	}
733	if os, ok := b.parts["os"]; ok {
734		d["os"], ok = map[string]string{
735			"Android":    "Android",
736			"ChromeOS":   "ChromeOS",
737			"Debian9":    DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
738			"Debian10":   DEFAULT_OS_LINUX_GCE,
739			"Mac":        DEFAULT_OS_MAC,
740			"Mac10.13":   "Mac-10.13.6",
741			"Mac10.14":   "Mac-10.14.3",
742			"Mac10.15.1": "Mac-10.15.1",
743			"Mac10.15.7": "Mac-10.15.7", // Same as 'Mac', but explicit.
744			"Mac11":      "Mac-11.4",
745			"Ubuntu18":   "Ubuntu-18.04",
746			"Win":        DEFAULT_OS_WIN,
747			"Win10":      "Windows-10-19041",
748			"Win2019":    DEFAULT_OS_WIN,
749			"Win7":       "Windows-7-SP1",
750			"Win8":       "Windows-8.1-SP0",
751			"iOS":        "iOS-13.3.1",
752		}[os]
753		if !ok {
754			log.Fatalf("Entry %q not found in OS mapping.", os)
755		}
756		if os == "Win10" && b.parts["model"] == "Golo" {
757			// ChOps-owned machines have Windows 10 21h1.
758			d["os"] = "Windows-10-19043"
759		}
760		if os == "Mac10.14" && b.parts["model"] == "VMware7.1" {
761			// ChOps VMs are at a newer version of MacOS.
762			d["os"] = "Mac-10.14.6"
763		}
764		if os == "Mac10.15" && b.parts["model"] == "VMware7.1" {
765			// ChOps VMs are at a newer version of MacOS.
766			d["os"] = "Mac-10.15.7"
767		}
768		if b.parts["model"] == "iPhone6" {
769			// This is the latest iOS that supports iPhone6.
770			d["os"] = "iOS-12.4.5"
771		}
772		if b.parts["model"] == "iPhone11" {
773			d["os"] = "iOS-13.6"
774		}
775		if b.parts["model"] == "iPadPro" {
776			d["os"] = "iOS-13.6"
777		}
778	} else {
779		d["os"] = DEFAULT_OS_DEBIAN
780	}
781	if b.role("Test", "Perf") {
782		if b.os("Android") {
783			// For Android, the device type is a better dimension
784			// than CPU or GPU.
785			deviceInfo, ok := map[string][]string{
786				"AndroidOne":      {"sprout", "MOB30Q"},
787				"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
788				"GalaxyS9":        {"starlte", "QP1A.190711.020"},     // This is Android10.
789				"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
790				"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
791				"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
792				"P30":             {"HWELE", "HUAWEIELE-L29"},
793				"Pixel2XL":        {"taimen", "PPR1.180610.009"},
794				"Pixel3":          {"blueline", "PQ1A.190105.004"},
795				"Pixel3a":         {"sargo", "QP1A.190711.020"},
796				"Pixel4":          {"flame", "RPB2.200611.009"},       // R Preview
797				"Pixel4a":         {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build.
798				"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
799				"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
800				"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
801				"Wembley":         {"wembley", "SP2A.211004.001"},
802			}[b.parts["model"]]
803			if !ok {
804				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
805			}
806			d["device_type"] = deviceInfo[0]
807			d["device_os"] = deviceInfo[1]
808
809			// Tests using Android's HWAddress Sanitizer require an HWASan build of Android.
810			// See https://developer.android.com/ndk/guides/hwasan.
811			if b.extraConfig("HWASAN") {
812				d["android_hwasan_build"] = "1"
813			}
814		} else if b.os("iOS") {
815			device, ok := map[string]string{
816				"iPadMini4": "iPad5,1",
817				"iPhone6":   "iPhone7,2",
818				"iPhone7":   "iPhone9,1",
819				"iPhone8":   "iPhone10,1",
820				"iPhone11":  "iPhone12,1",
821				"iPadPro":   "iPad6,3",
822			}[b.parts["model"]]
823			if !ok {
824				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
825			}
826			d["device_type"] = device
827		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
828			modelMapping, ok := map[string]map[string]string{
829				"AppleM1": {
830					"MacMini9.1": "arm64-64-Apple_M1",
831				},
832				"AVX": {
833					"VMware7.1": "x86-64",
834				},
835				"AVX2": {
836					"GCE":            "x86-64-Haswell_GCE",
837					"MacBookAir7.2":  "x86-64-i5-5350U",
838					"MacBookPro11.5": "x86-64-i7-4870HQ",
839					"NUC5i7RYH":      "x86-64-i7-5557U",
840				},
841				"AVX512": {
842					"GCE":  "x86-64-Skylake_GCE",
843					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
844				},
845				"Rome": {
846					"GCE": "x86-64-AMD_Rome_GCE",
847				},
848				"SwiftShader": {
849					"GCE": "x86-64-Haswell_GCE",
850				},
851			}[b.parts["cpu_or_gpu_value"]]
852			if !ok {
853				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
854			}
855			cpu, ok := modelMapping[b.parts["model"]]
856			if !ok {
857				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
858			}
859			d["cpu"] = cpu
860			if b.model("GCE") && b.matchOs("Debian") {
861				d["os"] = DEFAULT_OS_LINUX_GCE
862			}
863			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
864				d["machine_type"] = MACHINE_TYPE_MEDIUM
865			}
866		} else {
867			if b.matchOs("Win") {
868				gpu, ok := map[string]string{
869					// At some point this might use the device ID, but for now it's like Chromebooks.
870					"Adreno630":     "Adreno630",
871					"GT610":         "10de:104a-23.21.13.9101",
872					"GTX660":        "10de:11c0-26.21.14.4120",
873					"GTX960":        "10de:1401-27.21.14.5671",
874					"IntelHD4400":   "8086:0a16-20.19.15.4963",
875					"IntelIris540":  "8086:1926-26.20.100.7463",
876					"IntelIris6100": "8086:162b-20.19.15.4963",
877					"IntelIris655":  "8086:3ea5-26.20.100.7463",
878					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
879					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
880					"QuadroP400":    "10de:1cb3-30.0.14.7168",
881				}[b.parts["cpu_or_gpu_value"]]
882				if !ok {
883					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
884				}
885				d["gpu"] = gpu
886			} else if b.isLinux() {
887				gpu, ok := map[string]string{
888					// Intel drivers come from CIPD, so no need to specify the version here.
889					"IntelBayTrail": "8086:0f31",
890					"IntelHD2000":   "8086:0102",
891					"IntelHD405":    "8086:22b1",
892					"IntelIris640":  "8086:5926",
893					"QuadroP400":    "10de:1cb3-430.14",
894				}[b.parts["cpu_or_gpu_value"]]
895				if !ok {
896					log.Fatalf("Entry %q not found in Ubuntu GPU mapping.", b.parts["cpu_or_gpu_value"])
897				}
898				d["gpu"] = gpu
899
900				// The Debian10 machines in the skolo are 10.10, not 10.3.
901				if b.matchOs("Debian") {
902					d["os"] = DEFAULT_OS_DEBIAN
903				}
904
905			} else if b.matchOs("Mac") {
906				gpu, ok := map[string]string{
907					"AppleM1":       "AppleM1",
908					"IntelHD6000":   "8086:1626",
909					"IntelHD615":    "8086:591e",
910					"IntelIris5100": "8086:0a2e",
911					"RadeonHD8870M": "1002:6821-4.0.20-3.2.8",
912				}[b.parts["cpu_or_gpu_value"]]
913				if !ok {
914					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
915				}
916				if gpu == "AppleM1" {
917					// No GPU dimension yet, but we can constrain by CPU.
918					d["cpu"] = "arm64-64-Apple_M1"
919				} else {
920					d["gpu"] = gpu
921				}
922				// Yuck. We have two different types of MacMini7,1 with the same GPU but different CPUs.
923				if b.gpu("IntelIris5100") {
924					// Run all tasks on Golo machines for now.
925					d["cpu"] = "x86-64-i7-4578U"
926				}
927			} else if b.os("ChromeOS") {
928				version, ok := map[string]string{
929					"IntelUHDGraphics605": "14233.0.0",
930					"RadeonVega3":         "14233.0.0",
931					"Adreno618":           "14150.39.0",
932					"MaliT860":            "14092.77.0",
933				}[b.parts["cpu_or_gpu_value"]]
934				if !ok {
935					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
936				}
937				d["gpu"] = b.parts["cpu_or_gpu_value"]
938				d["release_version"] = version
939			} else {
940				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
941			}
942		}
943	} else {
944		d["gpu"] = "none"
945		if d["os"] == DEFAULT_OS_LINUX_GCE {
946			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats") {
947				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
948				return
949			}
950			// Use many-core machines for Build tasks.
951			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
952			return
953		} else if d["os"] == DEFAULT_OS_WIN {
954			// Windows CPU bots.
955			d["cpu"] = "x86-64-Haswell_GCE"
956			// Use many-core machines for Build tasks.
957			d["machine_type"] = MACHINE_TYPE_LARGE
958		} else if d["os"] == DEFAULT_OS_MAC {
959			// Mac CPU bots are no longer VMs.
960			d["cpu"] = "x86-64"
961			d["cores"] = "12"
962			delete(d, "gpu")
963		}
964	}
965
966	dims := make([]string, 0, len(d))
967	for k, v := range d {
968		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
969	}
970	sort.Strings(dims)
971	b.dimension(dims...)
972}
973
974// bundleRecipes generates the task to bundle and isolate the recipes. Returns
975// the name of the task, which may be added as a dependency.
976func (b *jobBuilder) bundleRecipes() string {
977	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
978		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
979		b.cipd(specs.CIPD_PKGS_PYTHON_LINUX_AMD64...)
980		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
981		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
982		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
983		b.idempotent()
984		b.cas(CAS_RECIPES)
985	})
986	return BUNDLE_RECIPES_NAME
987}
988
989// buildTaskDrivers generates the task to compile the task driver code to run on
990// all platforms. Returns the name of the task, which may be added as a
991// dependency.
992func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
993	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
994	b.addTask(name, func(b *taskBuilder) {
995		b.usesGo()
996		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
997			specs.PLACEHOLDER_ISOLATED_OUTDIR,
998			goos,
999			goarch)
1000		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1001		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1002		b.idempotent()
1003		b.cas(CAS_TASK_DRIVERS)
1004	})
1005	return name
1006}
1007
1008// updateGoDeps generates the task to update Go dependencies.
1009func (b *jobBuilder) updateGoDeps() {
1010	b.addTask(b.Name, func(b *taskBuilder) {
1011		b.usesGo()
1012		b.asset("protoc")
1013		b.cmd(
1014			"./update_go_deps",
1015			"--project_id", "skia-swarming-bots",
1016			"--task_id", specs.PLACEHOLDER_TASK_ID,
1017			"--task_name", b.Name,
1018			"--workdir", ".",
1019			"--gerrit_project", "skia",
1020			"--gerrit_url", "https://skia-review.googlesource.com",
1021			"--repo", specs.PLACEHOLDER_REPO,
1022			"--revision", specs.PLACEHOLDER_REVISION,
1023			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1024			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1025			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1026		)
1027		b.dep(b.buildTaskDrivers("linux", "amd64"))
1028		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1029		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1030		b.cas(CAS_EMPTY)
1031		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1032	})
1033}
1034
1035// createDockerImage creates the specified docker image. Returns the name of the
1036// generated task.
1037func (b *jobBuilder) createDockerImage(wasm bool) string {
1038	// First, derive the name of the task.
1039	imageName := "skia-release"
1040	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1041	if wasm {
1042		imageName = "skia-wasm-release"
1043		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1044	}
1045	imageDir := path.Join("docker", imageName)
1046
1047	// Add the task.
1048	b.addTask(taskName, func(b *taskBuilder) {
1049		// TODO(borenet): Make this task not use Git.
1050		b.usesGit()
1051		b.cmd(
1052			"./build_push_docker_image",
1053			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1054			"--dockerfile_dir", imageDir,
1055			"--project_id", "skia-swarming-bots",
1056			"--task_id", specs.PLACEHOLDER_TASK_ID,
1057			"--task_name", b.Name,
1058			"--workdir", ".",
1059			"--gerrit_project", "skia",
1060			"--gerrit_url", "https://skia-review.googlesource.com",
1061			"--repo", specs.PLACEHOLDER_REPO,
1062			"--revision", specs.PLACEHOLDER_REVISION,
1063			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1064			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1065			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1066			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1067		)
1068		b.dep(b.buildTaskDrivers("linux", "amd64"))
1069		// TODO(borenet): Does this task need go/go/bin in PATH?
1070		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1071		b.cas(CAS_EMPTY)
1072		b.serviceAccount(b.cfg.ServiceAccountCompile)
1073		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1074		b.usesDocker()
1075		b.cache(CACHES_DOCKER...)
1076	})
1077	return taskName
1078}
1079
1080// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1081// (eg: fiddler, debugger, api) using the skia-release docker image.
1082func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1083	b.addTask(b.Name, func(b *taskBuilder) {
1084		// TODO(borenet): Make this task not use Git.
1085		b.usesGit()
1086		b.cmd(
1087			"./push_apps_from_skia_image",
1088			"--project_id", "skia-swarming-bots",
1089			"--task_id", specs.PLACEHOLDER_TASK_ID,
1090			"--task_name", b.Name,
1091			"--workdir", ".",
1092			"--gerrit_project", "buildbot",
1093			"--gerrit_url", "https://skia-review.googlesource.com",
1094			"--repo", specs.PLACEHOLDER_REPO,
1095			"--revision", specs.PLACEHOLDER_REVISION,
1096			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1097			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1098			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1099		)
1100		b.dep(b.buildTaskDrivers("linux", "amd64"))
1101		b.dep(b.createDockerImage(false))
1102		// TODO(borenet): Does this task need go/go/bin in PATH?
1103		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1104		b.cas(CAS_EMPTY)
1105		b.serviceAccount(b.cfg.ServiceAccountCompile)
1106		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1107		b.usesDocker()
1108		b.cache(CACHES_DOCKER...)
1109	})
1110}
1111
1112// createPushAppsFromWASMDockerImage creates and pushes docker images of some apps
1113// (eg: jsfiddle, skottie, particles) using the skia-wasm-release docker image.
1114func (b *jobBuilder) createPushAppsFromWASMDockerImage() {
1115	b.addTask(b.Name, func(b *taskBuilder) {
1116		// TODO(borenet): Make this task not use Git.
1117		b.usesGit()
1118		b.cmd(
1119			"./push_apps_from_wasm_image",
1120			"--project_id", "skia-swarming-bots",
1121			"--task_id", specs.PLACEHOLDER_TASK_ID,
1122			"--task_name", b.Name,
1123			"--workdir", ".",
1124			"--gerrit_project", "buildbot",
1125			"--gerrit_url", "https://skia-review.googlesource.com",
1126			"--repo", specs.PLACEHOLDER_REPO,
1127			"--revision", specs.PLACEHOLDER_REVISION,
1128			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1129			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1130			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1131		)
1132		b.dep(b.buildTaskDrivers("linux", "amd64"))
1133		b.dep(b.createDockerImage(true))
1134		// TODO(borenet): Does this task need go/go/bin in PATH?
1135		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1136		b.cas(CAS_EMPTY)
1137		b.serviceAccount(b.cfg.ServiceAccountCompile)
1138		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1139		b.usesDocker()
1140		b.cache(CACHES_DOCKER...)
1141	})
1142}
1143
1144var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1145
1146func (b *taskBuilder) maybeAddIosDevImage() {
1147	for _, dim := range b.Spec.Dimensions {
1148		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1149			var asset string
1150			switch m[1] {
1151			// Other patch versions can be added to the same case.
1152			case "11.4.1":
1153				asset = "ios-dev-image-11.4"
1154			case "12.4.5":
1155				asset = "ios-dev-image-12.4"
1156			case "13.3.1":
1157				asset = "ios-dev-image-13.3"
1158			case "13.4.1":
1159				asset = "ios-dev-image-13.4"
1160			case "13.5.1":
1161				asset = "ios-dev-image-13.5"
1162			case "13.6":
1163				asset = "ios-dev-image-13.6"
1164			default:
1165				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1166			}
1167			b.asset(asset)
1168			break
1169		} else if strings.Contains(dim, "iOS") {
1170			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1171		}
1172	}
1173}
1174
1175// compile generates a compile task. Returns the name of the compile task.
1176func (b *jobBuilder) compile() string {
1177	name := b.deriveCompileTaskName()
1178	if b.extraConfig("WasmGMTests") {
1179		b.compileWasmGMTests(name)
1180	} else {
1181		b.addTask(name, func(b *taskBuilder) {
1182			recipe := "compile"
1183			casSpec := CAS_COMPILE
1184			if b.extraConfig("NoDEPS", "CMake", "CommandBuffer", "Flutter") {
1185				recipe = "sync_and_compile"
1186				casSpec = CAS_RUN_RECIPE
1187				b.recipeProps(EXTRA_PROPS)
1188				b.usesGit()
1189				if !b.extraConfig("NoDEPS") {
1190					b.cache(CACHES_WORKDIR...)
1191				}
1192			} else {
1193				b.idempotent()
1194			}
1195			b.kitchenTask(recipe, OUTPUT_BUILD)
1196			b.cas(casSpec)
1197			b.serviceAccount(b.cfg.ServiceAccountCompile)
1198			b.swarmDimensions()
1199			if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") {
1200				b.usesDocker()
1201				b.cache(CACHES_DOCKER...)
1202			}
1203
1204			// Android bots require a toolchain.
1205			if b.extraConfig("Android") {
1206				if b.matchOs("Mac") {
1207					b.asset("android_ndk_darwin")
1208				} else if b.matchOs("Win") {
1209					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1210					pkg.Path = "n"
1211					b.cipd(pkg)
1212				} else {
1213					b.asset("android_ndk_linux")
1214				}
1215			} else if b.extraConfig("Chromebook") {
1216				b.asset("clang_linux")
1217				if b.arch("x86_64") {
1218					b.asset("chromebook_x86_64_gles")
1219				} else if b.arch("arm") {
1220					b.asset("armhf_sysroot")
1221					b.asset("chromebook_arm_gles")
1222				}
1223			} else if b.isLinux() {
1224				if b.compiler("Clang") {
1225					b.asset("clang_linux")
1226				}
1227				if b.extraConfig("SwiftShader") {
1228					b.asset("cmake_linux")
1229				}
1230				b.asset("ccache_linux")
1231				b.usesCCache()
1232			} else if b.matchOs("Win") {
1233				b.asset("win_toolchain")
1234				if b.compiler("Clang") {
1235					b.asset("clang_win")
1236				}
1237			} else if b.matchOs("Mac") {
1238				b.cipd(CIPD_PKGS_XCODE...)
1239				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1240					Name: "xcode",
1241					Path: "cache/Xcode.app",
1242				})
1243				b.asset("ccache_mac")
1244				b.usesCCache()
1245				if b.extraConfig("CommandBuffer") {
1246					b.timeout(2 * time.Hour)
1247				}
1248				if b.extraConfig("iOS") {
1249					b.asset("provisioning_profile_ios")
1250				}
1251			}
1252		})
1253	}
1254
1255	// All compile tasks are runnable as their own Job. Assert that the Job
1256	// is listed in jobs.
1257	if !In(name, b.jobs) {
1258		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1259	}
1260
1261	return name
1262}
1263
1264// recreateSKPs generates a RecreateSKPs task.
1265func (b *jobBuilder) recreateSKPs() {
1266	cmd := []string{
1267		"./recreate_skps",
1268		"--local=false",
1269		"--project_id", "skia-swarming-bots",
1270		"--task_id", specs.PLACEHOLDER_TASK_ID,
1271		"--task_name", b.Name,
1272		"--skia_revision", specs.PLACEHOLDER_REVISION,
1273		"--patch_ref", specs.PLACEHOLDER_PATCH_REF,
1274		"--git_cache", "cache/git",
1275		"--checkout_root", "cache/work",
1276		"--dm_path", "build/dm",
1277	}
1278	if b.matchExtraConfig("DryRun") {
1279		cmd = append(cmd, "--dry_run")
1280	}
1281	b.addTask(b.Name, func(b *taskBuilder) {
1282		b.cas(CAS_RECREATE_SKPS)
1283		b.dep(b.buildTaskDrivers("linux", "amd64"))
1284		b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM.
1285		b.cmd(cmd...)
1286		b.cipd(CIPD_PKG_LUCI_AUTH)
1287		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1288		b.dimension(
1289			"pool:SkiaCT",
1290			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1291		)
1292		b.usesGo()
1293		b.cache(CACHES_WORKDIR...)
1294		b.timeout(6 * time.Hour)
1295		b.usesPython()
1296		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
1297		b.attempts(2)
1298	})
1299}
1300
1301// checkGeneratedFiles verifies that no generated SKSL files have been edited
1302// by hand.
1303func (b *jobBuilder) checkGeneratedFiles() {
1304	b.addTask(b.Name, func(b *taskBuilder) {
1305		b.recipeProps(EXTRA_PROPS)
1306		b.kitchenTask("check_generated_files", OUTPUT_NONE)
1307		b.serviceAccount(b.cfg.ServiceAccountCompile)
1308		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1309		b.usesGo()
1310		b.asset("clang_linux")
1311		b.asset("ccache_linux")
1312		b.usesCCache()
1313		b.cache(CACHES_WORKDIR...)
1314	})
1315}
1316
1317// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1318func (b *jobBuilder) checkGnToBp() {
1319	b.addTask(b.Name, func(b *taskBuilder) {
1320		b.cas(CAS_COMPILE)
1321		b.dep(b.buildTaskDrivers("linux", "amd64"))
1322		b.cmd("./run_gn_to_bp",
1323			"--local=false",
1324			"--project_id", "skia-swarming-bots",
1325			"--task_id", specs.PLACEHOLDER_TASK_ID,
1326			"--task_name", b.Name,
1327		)
1328		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1329		b.usesPython()
1330		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1331	})
1332}
1333
1334// housekeeper generates a Housekeeper task.
1335func (b *jobBuilder) housekeeper() {
1336	b.addTask(b.Name, func(b *taskBuilder) {
1337		b.recipeProps(EXTRA_PROPS)
1338		b.kitchenTask("housekeeper", OUTPUT_NONE)
1339		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1340		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1341		b.usesGit()
1342		b.cache(CACHES_WORKDIR...)
1343	})
1344}
1345
1346// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1347// the name of the last task in the generated chain of tasks, which the Job
1348// should add as a dependency.
1349func (b *jobBuilder) g3FrameworkCanary() {
1350	b.addTask(b.Name, func(b *taskBuilder) {
1351		b.cas(CAS_EMPTY)
1352		b.dep(b.buildTaskDrivers("linux", "amd64"))
1353		b.cmd("./g3_canary",
1354			"--local=false",
1355			"--project_id", "skia-swarming-bots",
1356			"--task_id", specs.PLACEHOLDER_TASK_ID,
1357			"--task_name", b.Name,
1358			"--repo", specs.PLACEHOLDER_REPO,
1359			"--revision", specs.PLACEHOLDER_REVISION,
1360			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1361			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1362			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1363		)
1364		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1365		b.cipd(CIPD_PKG_LUCI_AUTH)
1366		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1367		b.timeout(3 * time.Hour)
1368		b.attempts(1)
1369	})
1370}
1371
1372// infra generates an infra_tests task.
1373func (b *jobBuilder) infra() {
1374	b.addTask(b.Name, func(b *taskBuilder) {
1375		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1376			b.dimension(
1377				// Specify CPU to avoid running builds on bots with a more unique CPU.
1378				"cpu:x86-64-Haswell_GCE",
1379				"gpu:none",
1380				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1381				fmt.Sprintf("os:%s", DEFAULT_OS_WIN),
1382				fmt.Sprintf("pool:%s", b.cfg.Pool),
1383			)
1384		} else {
1385			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1386		}
1387		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1388		b.kitchenTask("infra", OUTPUT_NONE)
1389		b.cas(CAS_WHOLE_REPO)
1390		b.serviceAccount(b.cfg.ServiceAccountCompile)
1391		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1392		b.idempotent()
1393		b.usesGo()
1394	})
1395}
1396
1397// buildstats generates a builtstats task, which compiles code and generates
1398// statistics about the build.
1399func (b *jobBuilder) buildstats() {
1400	compileTaskName := b.compile()
1401	b.addTask(b.Name, func(b *taskBuilder) {
1402		b.recipeProps(EXTRA_PROPS)
1403		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1404		b.dep(compileTaskName)
1405		b.asset("bloaty")
1406		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1407		b.usesDocker()
1408		b.usesGit()
1409		b.cache(CACHES_WORKDIR...)
1410	})
1411	// Upload release results (for tracking in perf)
1412	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1413	if b.release() && !b.arch("x86_64") {
1414		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1415		depName := b.Name
1416		b.addTask(uploadName, func(b *taskBuilder) {
1417			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1418			b.recipeProps(EXTRA_PROPS)
1419			// TODO(borenet): I'm not sure why the upload task is
1420			// using the BuildStats task name, but I've done this
1421			// to maintain existing behavior.
1422			b.Name = depName
1423			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1424			b.Name = uploadName
1425			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1426			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1427			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1428			b.dep(depName)
1429		})
1430	}
1431}
1432
1433// doUpload indicates whether the given Job should upload its results.
1434func (b *jobBuilder) doUpload() bool {
1435	for _, s := range b.cfg.NoUpload {
1436		m, err := regexp.MatchString(s, b.Name)
1437		if err != nil {
1438			log.Fatal(err)
1439		}
1440		if m {
1441			return false
1442		}
1443	}
1444	return true
1445}
1446
1447// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1448func (b *taskBuilder) commonTestPerfAssets() {
1449	// Docker-based tests don't need the standard CIPD assets
1450	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1451		return
1452	}
1453	if b.extraConfig("Skpbench") {
1454		// Skpbench only needs skps
1455		b.asset("skp", "mskp")
1456	} else if b.os("Android", "ChromeOS", "iOS") {
1457		b.asset("skp", "svg", "skimage")
1458	} else {
1459		// for desktop machines
1460		b.asset("skimage", "skp", "svg")
1461	}
1462
1463	if b.isLinux() && b.matchExtraConfig("SAN") {
1464		b.asset("clang_linux")
1465	}
1466
1467	if b.isLinux() {
1468		if b.extraConfig("Vulkan") {
1469			b.asset("linux_vulkan_sdk")
1470		}
1471		if b.matchGpu("Intel") {
1472			b.asset("mesa_intel_driver_linux")
1473		}
1474	}
1475	if b.matchOs("Win") && b.extraConfig("ProcDump") {
1476		b.asset("procdump_win")
1477	}
1478}
1479
1480// directUpload adds prerequisites for uploading to GCS.
1481func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1482	b.recipeProp("gs_bucket", gsBucket)
1483	b.serviceAccount(serviceAccount)
1484	b.cipd(specs.CIPD_PKGS_GSUTIL...)
1485}
1486
1487// dm generates a Test task using dm.
1488func (b *jobBuilder) dm() {
1489	compileTaskName := ""
1490	// LottieWeb doesn't require anything in Skia to be compiled.
1491	if !b.extraConfig("LottieWeb") {
1492		compileTaskName = b.compile()
1493	}
1494	directUpload := false
1495	b.addTask(b.Name, func(b *taskBuilder) {
1496		cas := CAS_TEST
1497		recipe := "test"
1498		if b.extraConfig("PathKit") {
1499			cas = CAS_PATHKIT
1500			recipe = "test_pathkit"
1501			if b.doUpload() {
1502				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1503				directUpload = true
1504			}
1505		} else if b.extraConfig("CanvasKit") {
1506			cas = CAS_CANVASKIT
1507			recipe = "test_canvaskit"
1508			if b.doUpload() {
1509				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1510				directUpload = true
1511			}
1512		} else if b.extraConfig("LottieWeb") {
1513			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1514			// more of the files, especially those brought in via DEPS in the
1515			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1516			// Test.+LottieWeb is that the former pulls in the lottie build via
1517			// npm and the latter always tests at lottie's
1518			// ToT.
1519			cas = CAS_LOTTIE_CI
1520			recipe = "test_lottie_web"
1521			if b.doUpload() {
1522				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1523				directUpload = true
1524			}
1525		} else {
1526			// Default recipe supports direct upload.
1527			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1528			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1529			if b.doUpload() && !b.matchOs("Win") {
1530				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1531				directUpload = true
1532			}
1533		}
1534		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1535		b.recipeProps(EXTRA_PROPS)
1536		iid := b.internalHardwareLabel()
1537		iidStr := ""
1538		if iid != nil {
1539			iidStr = strconv.Itoa(*iid)
1540		}
1541		if recipe == "test" {
1542			b.dmFlags(iidStr)
1543		}
1544		b.kitchenTask(recipe, OUTPUT_TEST)
1545		b.cas(cas)
1546		b.swarmDimensions()
1547		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") {
1548			b.usesDocker()
1549		}
1550		if compileTaskName != "" {
1551			b.dep(compileTaskName)
1552		}
1553		if b.os("Android") && b.extraConfig("ASAN") {
1554			b.asset("android_ndk_linux")
1555		}
1556		b.commonTestPerfAssets()
1557		if b.matchExtraConfig("Lottie") {
1558			b.asset("lottie-samples")
1559		}
1560		b.expiration(20 * time.Hour)
1561
1562		b.timeout(4 * time.Hour)
1563		if b.extraConfig("Valgrind") {
1564			b.timeout(9 * time.Hour)
1565			b.expiration(48 * time.Hour)
1566			b.asset("valgrind")
1567			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1568			// to ensure there are always bots free for CQ tasks.
1569			b.dimension("valgrind:1")
1570		} else if b.extraConfig("MSAN") {
1571			b.timeout(9 * time.Hour)
1572		} else if b.arch("x86") && b.debug() {
1573			// skia:6737
1574			b.timeout(6 * time.Hour)
1575		}
1576		b.maybeAddIosDevImage()
1577	})
1578
1579	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1580	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1581	if b.doUpload() && !directUpload {
1582		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1583		depName := b.Name
1584		b.addTask(uploadName, func(b *taskBuilder) {
1585			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1586			b.recipeProps(EXTRA_PROPS)
1587			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1588			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1589			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1590			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1591			b.dep(depName)
1592		})
1593	}
1594}
1595
1596func (b *jobBuilder) fm() {
1597	goos := "linux"
1598	if strings.Contains(b.parts["os"], "Win") {
1599		goos = "windows"
1600	}
1601	if strings.Contains(b.parts["os"], "Mac") {
1602		goos = "darwin"
1603	}
1604
1605	b.addTask(b.Name, func(b *taskBuilder) {
1606		b.asset("skimage", "skp", "svg")
1607		b.cas(CAS_TEST)
1608		b.dep(b.buildTaskDrivers(goos, "amd64"), b.compile())
1609		b.cmd("./fm_driver${EXECUTABLE_SUFFIX}",
1610			"--local=false",
1611			"--resources=skia/resources",
1612			"--imgs=skimage",
1613			"--skps=skp",
1614			"--svgs=svg",
1615			"--project_id", "skia-swarming-bots",
1616			"--task_id", specs.PLACEHOLDER_TASK_ID,
1617			"--bot", b.Name,
1618			"--gold="+strconv.FormatBool(!b.matchExtraConfig("SAN")),
1619			"--gold_hashes_url", b.cfg.GoldHashesURL,
1620			"build/fm${EXECUTABLE_SUFFIX}")
1621		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1622		b.swarmDimensions()
1623		b.attempts(1)
1624
1625		if b.isLinux() && b.matchExtraConfig("SAN") {
1626			b.asset("clang_linux")
1627			// Sanitizers may want to run llvm-symbolizer for readable stack traces.
1628			b.addToPATH("clang_linux/bin")
1629
1630			// Point sanitizer builds at our prebuilt libc++ for this sanitizer.
1631			if b.extraConfig("MSAN") {
1632				// We'd see false positives in std::basic_string<char> if this weren't set.
1633				b.env("LD_LIBRARY_PATH", "clang_linux/msan")
1634			} else if b.extraConfig("TSAN") {
1635				// Occasional false positives may crop up in the standard library without this.
1636				b.env("LD_LIBRARY_PATH", "clang_linux/tsan")
1637			} else {
1638				// This isn't strictly required, but we usually get better sanitizer
1639				// diagnostics from libc++ than the default OS-provided libstdc++.
1640				b.env("LD_LIBRARY_PATH", "clang_linux/lib")
1641			}
1642		}
1643	})
1644}
1645
1646// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1647// Canary-G3 does not use this path because it is very different from other autorollers.
1648func (b *jobBuilder) canary(rollerName string) {
1649	b.addTask(b.Name, func(b *taskBuilder) {
1650		b.cas(CAS_EMPTY)
1651		b.dep(b.buildTaskDrivers("linux", "amd64"))
1652		b.cmd("./canary",
1653			"--local=false",
1654			"--project_id", "skia-swarming-bots",
1655			"--task_id", specs.PLACEHOLDER_TASK_ID,
1656			"--task_name", b.Name,
1657			"--roller_name", rollerName,
1658			"--repo", specs.PLACEHOLDER_REPO,
1659			"--revision", specs.PLACEHOLDER_REVISION,
1660			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1661			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1662			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1663		)
1664		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1665		b.cipd(CIPD_PKG_LUCI_AUTH)
1666		b.serviceAccount(b.cfg.ServiceAccountCanary)
1667		b.timeout(3 * time.Hour)
1668		b.attempts(1)
1669	})
1670}
1671
1672// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1673// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1674func (b *jobBuilder) puppeteer() {
1675	compileTaskName := b.compile()
1676	b.addTask(b.Name, func(b *taskBuilder) {
1677		b.defaultSwarmDimensions()
1678		b.usesNode()
1679		b.cipd(CIPD_PKG_LUCI_AUTH)
1680		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1681		b.output(OUTPUT_PERF)
1682		b.timeout(60 * time.Minute)
1683		b.cas(CAS_PUPPETEER)
1684		b.serviceAccount(b.cfg.ServiceAccountCompile)
1685
1686		webglversion := "2"
1687		if b.extraConfig("WebGL1") {
1688			webglversion = "1"
1689		}
1690
1691		if b.extraConfig("SkottieFrames") {
1692			b.cmd(
1693				"./perf_puppeteer_skottie_frames",
1694				"--project_id", "skia-swarming-bots",
1695				"--git_hash", specs.PLACEHOLDER_REVISION,
1696				"--task_id", specs.PLACEHOLDER_TASK_ID,
1697				"--task_name", b.Name,
1698				"--canvaskit_bin_path", "./build",
1699				"--lotties_path", "./lotties_with_assets",
1700				"--node_bin_path", "./node/node/bin",
1701				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1702				"--output_path", OUTPUT_PERF,
1703				"--os_trace", b.parts["os"],
1704				"--model_trace", b.parts["model"],
1705				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1706				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1707				"--webgl_version", webglversion, // ignore when running with cpu backend
1708			)
1709			// This CIPD package was made by hand with the following invocation:
1710			//   cipd create -name skia/internal/lotties_with_assets -in ./lotties/ -tag version:0
1711			//   cipd acl-edit skia/internal/lotties_with_assets -reader group:project-skia-external-task-accounts
1712			//   cipd acl-edit skia/internal/lotties_with_assets -reader user:pool-skia@chromium-swarm.iam.gserviceaccount.com
1713			// Where lotties is a hand-selected set of lottie animations and (optionally) assets used in
1714			// them (e.g. fonts, images).
1715			b.cipd(&specs.CipdPackage{
1716				Name:    "skia/internal/lotties_with_assets",
1717				Path:    "lotties_with_assets",
1718				Version: "version:1",
1719			})
1720		} else if b.extraConfig("RenderSKP") {
1721			b.cmd(
1722				"./perf_puppeteer_render_skps",
1723				"--project_id", "skia-swarming-bots",
1724				"--git_hash", specs.PLACEHOLDER_REVISION,
1725				"--task_id", specs.PLACEHOLDER_TASK_ID,
1726				"--task_name", b.Name,
1727				"--canvaskit_bin_path", "./build",
1728				"--skps_path", "./skp",
1729				"--node_bin_path", "./node/node/bin",
1730				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1731				"--output_path", OUTPUT_PERF,
1732				"--os_trace", b.parts["os"],
1733				"--model_trace", b.parts["model"],
1734				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1735				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1736				"--webgl_version", webglversion,
1737			)
1738			b.asset("skp")
1739		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1740			b.cmd(
1741				"./perf_puppeteer_canvas",
1742				"--project_id", "skia-swarming-bots",
1743				"--git_hash", specs.PLACEHOLDER_REVISION,
1744				"--task_id", specs.PLACEHOLDER_TASK_ID,
1745				"--task_name", b.Name,
1746				"--canvaskit_bin_path", "./build",
1747				"--node_bin_path", "./node/node/bin",
1748				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1749				"--output_path", OUTPUT_PERF,
1750				"--os_trace", b.parts["os"],
1751				"--model_trace", b.parts["model"],
1752				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1753				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1754				"--webgl_version", webglversion,
1755			)
1756			b.asset("skp")
1757		}
1758
1759	})
1760
1761	// Upload results to Perf after.
1762	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1763	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1764	depName := b.Name
1765	b.addTask(uploadName, func(b *taskBuilder) {
1766		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1767		b.recipeProps(EXTRA_PROPS)
1768		// TODO(borenet): I'm not sure why the upload task is
1769		// using the Perf task name, but I've done this to
1770		// maintain existing behavior.
1771		b.Name = depName
1772		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1773		b.Name = uploadName
1774		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1775		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1776		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1777		b.dep(depName)
1778	})
1779}
1780
1781// perf generates a Perf task.
1782func (b *jobBuilder) perf() {
1783	compileTaskName := ""
1784	// LottieWeb doesn't require anything in Skia to be compiled.
1785	if !b.extraConfig("LottieWeb") {
1786		compileTaskName = b.compile()
1787	}
1788	doUpload := b.release() && b.doUpload()
1789	b.addTask(b.Name, func(b *taskBuilder) {
1790		recipe := "perf"
1791		cas := CAS_PERF
1792		if b.extraConfig("Skpbench") {
1793			recipe = "skpbench"
1794			cas = CAS_SKPBENCH
1795		} else if b.extraConfig("PathKit") {
1796			cas = CAS_PATHKIT
1797			recipe = "perf_pathkit"
1798		} else if b.extraConfig("CanvasKit") {
1799			cas = CAS_CANVASKIT
1800			recipe = "perf_canvaskit"
1801		} else if b.extraConfig("SkottieTracing") {
1802			recipe = "perf_skottietrace"
1803		} else if b.extraConfig("SkottieWASM") {
1804			recipe = "perf_skottiewasm_lottieweb"
1805			cas = CAS_SKOTTIE_WASM
1806		} else if b.extraConfig("LottieWeb") {
1807			recipe = "perf_skottiewasm_lottieweb"
1808			cas = CAS_LOTTIE_WEB
1809		}
1810		b.recipeProps(EXTRA_PROPS)
1811		if recipe == "perf" {
1812			b.nanobenchFlags(doUpload)
1813		} else if recipe == "skpbench" {
1814			b.skpbenchFlags()
1815		}
1816		b.kitchenTask(recipe, OUTPUT_PERF)
1817		b.cas(cas)
1818		b.swarmDimensions()
1819		if b.extraConfig("CanvasKit", "Docker", "PathKit") {
1820			b.usesDocker()
1821		}
1822		if compileTaskName != "" {
1823			b.dep(compileTaskName)
1824		}
1825		b.commonTestPerfAssets()
1826		b.expiration(20 * time.Hour)
1827		b.timeout(4 * time.Hour)
1828
1829		if b.extraConfig("Valgrind") {
1830			b.timeout(9 * time.Hour)
1831			b.expiration(48 * time.Hour)
1832			b.asset("valgrind")
1833			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1834			// to ensure there are always bots free for CQ tasks.
1835			b.dimension("valgrind:1")
1836		} else if b.extraConfig("MSAN") {
1837			b.timeout(9 * time.Hour)
1838		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
1839			// skia:6737
1840			b.timeout(6 * time.Hour)
1841		} else if b.extraConfig("LottieWeb", "SkottieWASM") {
1842			b.asset("node", "lottie-samples")
1843		} else if b.matchExtraConfig("Skottie") {
1844			b.asset("lottie-samples")
1845		}
1846
1847		if b.os("Android") && b.cpu() {
1848			b.asset("text_blob_traces")
1849		}
1850		b.maybeAddIosDevImage()
1851
1852		iid := b.internalHardwareLabel()
1853		if iid != nil {
1854			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
1855		}
1856	})
1857
1858	// Upload results if necessary.
1859	if doUpload {
1860		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1861		depName := b.Name
1862		b.addTask(uploadName, func(b *taskBuilder) {
1863			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1864			b.recipeProps(EXTRA_PROPS)
1865			// TODO(borenet): I'm not sure why the upload task is
1866			// using the Perf task name, but I've done this to
1867			// maintain existing behavior.
1868			b.Name = depName
1869			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1870			b.Name = uploadName
1871			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1872			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1873			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1874			b.dep(depName)
1875		})
1876	}
1877}
1878
1879// presubmit generates a task which runs the presubmit for this repo.
1880func (b *jobBuilder) presubmit() {
1881	b.addTask(b.Name, func(b *taskBuilder) {
1882		b.recipeProps(map[string]string{
1883			"category":         "cq",
1884			"patch_gerrit_url": "https://skia-review.googlesource.com",
1885			"patch_project":    "skia",
1886			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
1887			"reason":           "CQ",
1888			"repo_name":        "skia",
1889		})
1890		b.recipeProps(EXTRA_PROPS)
1891		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
1892		b.cas(CAS_RUN_RECIPE)
1893		b.serviceAccount(b.cfg.ServiceAccountCompile)
1894		// Use MACHINE_TYPE_LARGE because it seems to save time versus
1895		// MEDIUM and we want presubmit to be fast.
1896		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1897		b.usesGit()
1898		b.cipd(&specs.CipdPackage{
1899			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
1900			Path:    "recipe_bundle",
1901			Version: "git_revision:a8bcedad6768e206c4d2bd1718caa849f29cd42d",
1902		})
1903	})
1904}
1905
1906// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1907// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1908// former anyway.
1909func (b *jobBuilder) compileWasmGMTests(compileName string) {
1910	b.addTask(compileName, func(b *taskBuilder) {
1911		b.attempts(1)
1912		b.usesDocker()
1913		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1914		b.cipd(CIPD_PKG_LUCI_AUTH)
1915		b.dep(b.buildTaskDrivers("linux", "amd64"))
1916		b.output("wasm_out")
1917		b.timeout(60 * time.Minute)
1918		b.cas(CAS_COMPILE)
1919		b.serviceAccount(b.cfg.ServiceAccountCompile)
1920		b.cache(CACHES_DOCKER...)
1921		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
1922		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
1923		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
1924		// even bother.
1925		b.cmd(
1926			"./compile_wasm_gm_tests",
1927			"--project_id", "skia-swarming-bots",
1928			"--task_id", specs.PLACEHOLDER_TASK_ID,
1929			"--task_name", compileName,
1930			"--out_path", "./wasm_out",
1931			"--skia_path", "./skia",
1932			"--work_path", "./cache/docker/wasm_gm",
1933		)
1934	})
1935}
1936
1937// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1938// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1939// former anyway.
1940func (b *jobBuilder) runWasmGMTests() {
1941	compileTaskName := b.compile()
1942
1943	b.addTask(b.Name, func(b *taskBuilder) {
1944		b.attempts(1)
1945		b.usesNode()
1946		b.swarmDimensions()
1947		b.cipd(CIPD_PKG_LUCI_AUTH)
1948		b.cipd(CIPD_PKGS_GOLDCTL...)
1949		b.dep(b.buildTaskDrivers("linux", "amd64"))
1950		b.dep(compileTaskName)
1951		b.timeout(60 * time.Minute)
1952		b.cas(CAS_WASM_GM)
1953		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1954		b.cmd(
1955			"./run_wasm_gm_tests",
1956			"--project_id", "skia-swarming-bots",
1957			"--task_id", specs.PLACEHOLDER_TASK_ID,
1958			"--task_name", b.Name,
1959			"--test_harness_path", "./tools/run-wasm-gm-tests",
1960			"--built_path", "./wasm_out",
1961			"--node_bin_path", "./node/node/bin",
1962			"--resource_path", "./resources",
1963			"--work_path", "./wasm_gm/work",
1964			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
1965			"--gold_hashes_url", b.cfg.GoldHashesURL,
1966			"--git_commit", specs.PLACEHOLDER_REVISION,
1967			"--changelist_id", specs.PLACEHOLDER_ISSUE,
1968			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
1969			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
1970			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
1971			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
1972			"--gold_key", "alpha_type:Premul",
1973			"--gold_key", "arch:wasm",
1974			"--gold_key", "browser:Chrome",
1975			"--gold_key", "color_depth:8888",
1976			"--gold_key", "config:gles",
1977			"--gold_key", "configuration:Release",
1978			"--gold_key", "cpu_or_gpu_value:QuadroP400",
1979			"--gold_key", "model:Golo",
1980			"--gold_key", "os:Ubuntu18",
1981		)
1982	})
1983}
1984