• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1// Copyright 2016 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5package gen_tasks_logic
6
7/*
8	Generate the tasks.json file.
9*/
10
11import (
12	"encoding/json"
13	"fmt"
14	"io/ioutil"
15	"log"
16	"path"
17	"path/filepath"
18	"regexp"
19	"runtime"
20	"sort"
21	"strconv"
22	"strings"
23	"time"
24
25	"go.skia.org/infra/go/cas/rbe"
26	"go.skia.org/infra/go/cipd"
27	"go.skia.org/infra/task_scheduler/go/specs"
28)
29
30const (
31	CAS_CANVASKIT    = "canvaskit"
32	CAS_COMPILE      = "compile"
33	CAS_EMPTY        = "empty" // TODO(borenet): It'd be nice if this wasn't necessary.
34	CAS_LOTTIE_CI    = "lottie-ci"
35	CAS_LOTTIE_WEB   = "lottie-web"
36	CAS_PATHKIT      = "pathkit"
37	CAS_PERF         = "perf"
38	CAS_PUPPETEER    = "puppeteer"
39	CAS_RUN_RECIPE   = "run-recipe"
40	CAS_RECIPES      = "recipes"
41	CAS_SKOTTIE_WASM = "skottie-wasm"
42	CAS_SKPBENCH     = "skpbench"
43	CAS_SKQP         = "skqp"
44	CAS_TASK_DRIVERS = "task-drivers"
45	CAS_TEST         = "test"
46	CAS_WASM_GM      = "wasm-gm"
47	CAS_WHOLE_REPO   = "whole-repo"
48
49	BUILD_TASK_DRIVERS_PREFIX  = "Housekeeper-PerCommit-BuildTaskDrivers"
50	BUNDLE_RECIPES_NAME        = "Housekeeper-PerCommit-BundleRecipes"
51	ISOLATE_GCLOUD_LINUX_NAME  = "Housekeeper-PerCommit-IsolateGCloudLinux"
52	ISOLATE_SKIMAGE_NAME       = "Housekeeper-PerCommit-IsolateSkImage"
53	ISOLATE_SKP_NAME           = "Housekeeper-PerCommit-IsolateSKP"
54	ISOLATE_MSKP_NAME          = "Housekeeper-PerCommit-IsolateMSKP"
55	ISOLATE_SVG_NAME           = "Housekeeper-PerCommit-IsolateSVG"
56	ISOLATE_NDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidNDKLinux"
57	ISOLATE_SDK_LINUX_NAME     = "Housekeeper-PerCommit-IsolateAndroidSDKLinux"
58	ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain"
59
60	DEFAULT_OS_DEBIAN              = "Debian-10.3"
61	DEFAULT_OS_LINUX_GCE           = "Debian-10.3"
62	OLD_OS_LINUX_GCE               = "Debian-9.8"
63	COMPILE_TASK_NAME_OS_LINUX     = "Debian10"
64	COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9"
65	DEFAULT_OS_MAC                 = "Mac-10.15.7"
66	DEFAULT_OS_WIN                 = "Windows-Server-17763"
67
68	// Small is a 2-core machine.
69	// TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient?
70	MACHINE_TYPE_SMALL = "n1-highmem-2"
71	// Medium is a 16-core machine
72	MACHINE_TYPE_MEDIUM = "n1-standard-16"
73	// Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for
74	// any of our tasks.)
75	MACHINE_TYPE_LARGE = "n1-highcpu-64"
76
77	// Swarming output dirs.
78	OUTPUT_NONE  = "output_ignored" // This will result in outputs not being isolated.
79	OUTPUT_BUILD = "build"
80	OUTPUT_TEST  = "test"
81	OUTPUT_PERF  = "perf"
82
83	// Name prefix for upload jobs.
84	PREFIX_UPLOAD = "Upload"
85)
86
87var (
88	// "Constants"
89
90	// Named caches used by tasks.
91	CACHES_GIT = []*specs.Cache{
92		{
93			Name: "git",
94			Path: "cache/git",
95		},
96		{
97			Name: "git_cache",
98			Path: "cache/git_cache",
99		},
100	}
101	CACHES_GO = []*specs.Cache{
102		{
103			Name: "go_cache",
104			Path: "cache/go_cache",
105		},
106		{
107			Name: "gopath",
108			Path: "cache/gopath",
109		},
110	}
111	CACHES_WORKDIR = []*specs.Cache{
112		{
113			Name: "work",
114			Path: "cache/work",
115		},
116	}
117	CACHES_CCACHE = []*specs.Cache{
118		{
119			Name: "ccache",
120			Path: "cache/ccache",
121		},
122	}
123	// The "docker" cache is used as a persistent working directory for
124	// tasks which use Docker. It is not to be confused with Docker's own
125	// cache, which stores images. We do not currently use a named Swarming
126	// cache for the latter.
127	// TODO(borenet): We should ensure that any task which uses Docker does
128	// not also use the normal "work" cache, to prevent issues like
129	// https://bugs.chromium.org/p/skia/issues/detail?id=9749.
130	CACHES_DOCKER = []*specs.Cache{
131		{
132			Name: "docker",
133			Path: "cache/docker",
134		},
135	}
136
137	// CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for
138	// lottie-ci.  This is global so that it can be overridden by other
139	// repositories which import this file.
140	CAS_SPEC_LOTTIE_CI = &specs.CasSpec{
141		Root: "..",
142		Paths: []string{
143			"skia/infra/bots/run_recipe.py",
144			"skia/infra/lottiecap",
145			"skia/tools/lottie-web-perf",
146			"skia/tools/lottiecap",
147		},
148		Excludes: []string{rbe.ExcludeGitDir},
149	}
150
151	// CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is
152	// global so that it can be overridden by other repositories which import
153	// this file.
154	CAS_SPEC_WHOLE_REPO = &specs.CasSpec{
155		Root:     "..",
156		Paths:    []string{"skia"},
157		Excludes: []string{rbe.ExcludeGitDir},
158	}
159
160	// TODO(borenet): This hacky and bad.
161	CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}")
162
163	CIPD_PKGS_GOLDCTL = []*specs.CipdPackage{cipd.MustGetPackage("skia/tools/goldctl/${platform}")}
164
165	CIPD_PKGS_XCODE = []*specs.CipdPackage{
166		// https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317
167		// This package is really just an installer for XCode.
168		{
169			Name: "infra/tools/mac_toolchain/${platform}",
170			Path: "mac_toolchain",
171			// When this is updated, also update
172			// https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56
173			Version: "git_revision:796d2b92cff93fc2059623ce0a66284373ceea0a",
174		},
175	}
176
177	// These properties are required by some tasks, eg. for running
178	// bot_update, but they prevent de-duplication, so they should only be
179	// used where necessary.
180	EXTRA_PROPS = map[string]string{
181		"buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
182		"patch_issue":          specs.PLACEHOLDER_ISSUE_INT,
183		"patch_ref":            specs.PLACEHOLDER_PATCH_REF,
184		"patch_repo":           specs.PLACEHOLDER_PATCH_REPO,
185		"patch_set":            specs.PLACEHOLDER_PATCHSET_INT,
186		"patch_storage":        specs.PLACEHOLDER_PATCH_STORAGE,
187		"repository":           specs.PLACEHOLDER_REPO,
188		"revision":             specs.PLACEHOLDER_REVISION,
189		"task_id":              specs.PLACEHOLDER_TASK_ID,
190	}
191
192	// ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration
193	// for how the CIPD package should be installed for a given task.
194	ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{
195		"gcloud_linux": {
196			uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME,
197			path:           "gcloud_linux",
198		},
199		"skimage": {
200			uploadTaskName: ISOLATE_SKIMAGE_NAME,
201			path:           "skimage",
202		},
203		"skp": {
204			uploadTaskName: ISOLATE_SKP_NAME,
205			path:           "skp",
206		},
207		"svg": {
208			uploadTaskName: ISOLATE_SVG_NAME,
209			path:           "svg",
210		},
211		"mskp": {
212			uploadTaskName: ISOLATE_MSKP_NAME,
213			path:           "mskp",
214		},
215		"android_ndk_linux": {
216			uploadTaskName: ISOLATE_NDK_LINUX_NAME,
217			path:           "android_ndk_linux",
218		},
219		"android_sdk_linux": {
220			uploadTaskName: ISOLATE_SDK_LINUX_NAME,
221			path:           "android_sdk_linux",
222		},
223		"win_toolchain": {
224			alwaysIsolate:  true,
225			uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME,
226			path:           "win_toolchain",
227		},
228	}
229
230	// Set dontReduceOpsTaskSplitting option on these models
231	DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{
232		"NUC5PPYH",
233	}
234)
235
236// Config contains general configuration information.
237type Config struct {
238	// Directory containing assets. Assumed to be relative to the directory
239	// which contains the calling gen_tasks.go file. If not specified, uses
240	// the infra/bots/assets from this repo.
241	AssetsDir string `json:"assets_dir"`
242
243	// Path to the builder name schema JSON file. Assumed to be relative to
244	// the directory which contains the calling gen_tasks.go file. If not
245	// specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json
246	// from this repo.
247	BuilderNameSchemaFile string `json:"builder_name_schema"`
248
249	// URL of the Skia Gold known hashes endpoint.
250	GoldHashesURL string `json:"gold_hashes_url"`
251
252	// GCS bucket used for GM results.
253	GsBucketGm string `json:"gs_bucket_gm"`
254
255	// GCS bucket used for Nanobench results.
256	GsBucketNano string `json:"gs_bucket_nano"`
257
258	// Optional function which returns a bot ID for internal devices.
259	InternalHardwareLabel func(parts map[string]string) *int `json:"-"`
260
261	// List of task names for which we'll never upload results.
262	NoUpload []string `json:"no_upload"`
263
264	// PathToSkia is the relative path from the root of the current checkout to
265	// the root of the Skia checkout.
266	PathToSkia string `json:"path_to_skia"`
267
268	// Swarming pool used for triggering tasks.
269	Pool string `json:"pool"`
270
271	// LUCI project associated with this repo.
272	Project string `json:"project"`
273
274	// Service accounts.
275	ServiceAccountCanary       string `json:"service_account_canary"`
276	ServiceAccountCompile      string `json:"service_account_compile"`
277	ServiceAccountHousekeeper  string `json:"service_account_housekeeper"`
278	ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"`
279	ServiceAccountUploadBinary string `json:"service_account_upload_binary"`
280	ServiceAccountUploadGM     string `json:"service_account_upload_gm"`
281	ServiceAccountUploadNano   string `json:"service_account_upload_nano"`
282
283	// Optional override function which derives Swarming bot dimensions
284	// from parts of task names.
285	SwarmDimensions func(parts map[string]string) []string `json:"-"`
286}
287
288// LoadConfig loads the Config from a cfg.json file which is the sibling of the
289// calling gen_tasks.go file.
290func LoadConfig() *Config {
291	cfgDir := getCallingDirName()
292	var cfg Config
293	LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg)
294	return &cfg
295}
296
297// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the
298// caller from needing a dependency on the specs package.
299func CheckoutRoot() string {
300	root, err := specs.GetCheckoutRoot()
301	if err != nil {
302		log.Fatal(err)
303	}
304	return root
305}
306
307// LoadJson loads JSON from the given file and unmarshals it into the given
308// destination.
309func LoadJson(filename string, dest interface{}) {
310	b, err := ioutil.ReadFile(filename)
311	if err != nil {
312		log.Fatalf("Unable to read %q: %s", filename, err)
313	}
314	if err := json.Unmarshal(b, dest); err != nil {
315		log.Fatalf("Unable to parse %q: %s", filename, err)
316	}
317}
318
319// In returns true if |s| is *in* |a| slice.
320// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the
321// huge set of additional dependencies added by that package.
322func In(s string, a []string) bool {
323	for _, x := range a {
324		if x == s {
325			return true
326		}
327	}
328	return false
329}
330
331// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json
332// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it
333// is similarly loaded from a cfg.json file which is the sibling of the calling
334// gen_tasks.go file.
335func GenTasks(cfg *Config) {
336	b := specs.MustNewTasksCfgBuilder()
337
338	// Find the paths to the infra/bots directories in this repo and the
339	// repo of the calling file.
340	relpathTargetDir := getThisDirName()
341	relpathBaseDir := getCallingDirName()
342
343	var jobs []string
344	LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobs)
345
346	if cfg == nil {
347		cfg = new(Config)
348		LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg)
349	}
350
351	// Create the JobNameSchema.
352	builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json")
353	if cfg.BuilderNameSchemaFile != "" {
354		builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile)
355	}
356	schema, err := NewJobNameSchema(builderNameSchemaFile)
357	if err != nil {
358		log.Fatal(err)
359	}
360
361	// Set the assets dir.
362	assetsDir := filepath.Join(relpathTargetDir, "assets")
363	if cfg.AssetsDir != "" {
364		assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir)
365	}
366	b.SetAssetsDir(assetsDir)
367
368	// Create Tasks and Jobs.
369	builder := &builder{
370		TasksCfgBuilder: b,
371		cfg:             cfg,
372		jobNameSchema:   schema,
373		jobs:            jobs,
374	}
375	for _, name := range jobs {
376		jb := newJobBuilder(builder, name)
377		jb.genTasksForJob()
378		jb.finish()
379	}
380
381	// Create CasSpecs.
382	b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{
383		Root: "..",
384		Paths: []string{
385			"skia/infra/bots/run_recipe.py",
386			"skia/infra/canvaskit",
387			"skia/modules/canvaskit",
388			"skia/modules/pathkit/perf/perfReporter.js",
389			"skia/modules/pathkit/tests/testReporter.js",
390		},
391		Excludes: []string{rbe.ExcludeGitDir},
392	})
393	b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec)
394	b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI)
395	b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{
396		Root: "..",
397		Paths: []string{
398			"skia/infra/bots/run_recipe.py",
399			"skia/tools/lottie-web-perf",
400		},
401		Excludes: []string{rbe.ExcludeGitDir},
402	})
403	b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{
404		Root: "..",
405		Paths: []string{
406			"skia/infra/bots/run_recipe.py",
407			"skia/infra/pathkit",
408			"skia/modules/pathkit",
409		},
410		Excludes: []string{rbe.ExcludeGitDir},
411	})
412	b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{
413		Root: "..",
414		Paths: []string{
415			"skia/infra/bots/assets",
416			"skia/infra/bots/run_recipe.py",
417			"skia/platform_tools/ios/bin",
418			"skia/resources",
419			"skia/tools/valgrind.supp",
420		},
421		Excludes: []string{rbe.ExcludeGitDir},
422	})
423	b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{
424		Root: "../skia", // Needed for other repos.
425		Paths: []string{
426			"tools/perf-canvaskit-puppeteer",
427		},
428		Excludes: []string{rbe.ExcludeGitDir},
429	})
430	b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{
431		Root: "..",
432		Paths: []string{
433			"skia/infra/config/recipes.cfg",
434			"skia/infra/bots/bundle_recipes.sh",
435			"skia/infra/bots/README.recipes.md",
436			"skia/infra/bots/recipe_modules",
437			"skia/infra/bots/recipes",
438			"skia/infra/bots/recipes.py",
439		},
440		Excludes: []string{rbe.ExcludeGitDir},
441	})
442	b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{
443		Root: "..",
444		Paths: []string{
445			"skia/infra/bots/run_recipe.py",
446		},
447		Excludes: []string{rbe.ExcludeGitDir},
448	})
449	b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{
450		Root: "..",
451		Paths: []string{
452			"skia/infra/bots/run_recipe.py",
453			"skia/tools/skottie-wasm-perf",
454		},
455		Excludes: []string{rbe.ExcludeGitDir},
456	})
457	b.MustAddCasSpec(CAS_SKPBENCH, &specs.CasSpec{
458		Root: "..",
459		Paths: []string{
460			"skia/infra/bots/assets",
461			"skia/infra/bots/run_recipe.py",
462			"skia/tools/skpbench",
463			"skia/tools/valgrind.supp",
464		},
465		Excludes: []string{rbe.ExcludeGitDir},
466	})
467	b.MustAddCasSpec(CAS_SKQP, &specs.CasSpec{
468		Root: "..",
469		Paths: []string{
470			"skia/infra/bots/run_recipe.py",
471			"skia/infra/skqp",
472		},
473		Excludes: []string{rbe.ExcludeGitDir},
474	})
475	b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{
476		Root: "..",
477		Paths: []string{
478			"skia/go.mod",
479			"skia/go.sum",
480			"skia/infra/bots/build_task_drivers.sh",
481			"skia/infra/bots/run_recipe.py",
482			"skia/infra/bots/task_drivers",
483		},
484		Excludes: []string{rbe.ExcludeGitDir},
485	})
486	b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{
487		Root: "..",
488		Paths: []string{
489			"skia/infra/bots/assets",
490			"skia/infra/bots/run_recipe.py",
491			"skia/platform_tools/ios/bin",
492			"skia/resources",
493			"skia/tools/valgrind.supp",
494		},
495		Excludes: []string{rbe.ExcludeGitDir},
496	})
497	b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{
498		Root: "../skia", // Needed for other repos.
499		Paths: []string{
500			"resources",
501			"tools/run-wasm-gm-tests",
502		},
503		Excludes: []string{rbe.ExcludeGitDir},
504	})
505	b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO)
506	generateCompileCAS(b, cfg)
507
508	builder.MustFinish()
509}
510
511// getThisDirName returns the infra/bots directory which is an ancestor of this
512// file.
513func getThisDirName() string {
514	_, thisFileName, _, ok := runtime.Caller(0)
515	if !ok {
516		log.Fatal("Unable to find path to current file.")
517	}
518	return filepath.Dir(filepath.Dir(thisFileName))
519}
520
521// getCallingDirName returns the infra/bots directory which is an ancestor of
522// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go
523// file appears two steps up the stack; do not call from a function which is not
524// directly called by gen_tasks.go.
525func getCallingDirName() string {
526	_, callingFileName, _, ok := runtime.Caller(2)
527	if !ok {
528		log.Fatal("Unable to find path to calling file.")
529	}
530	return filepath.Dir(callingFileName)
531}
532
533// builder is a wrapper for specs.TasksCfgBuilder.
534type builder struct {
535	*specs.TasksCfgBuilder
536	cfg           *Config
537	jobNameSchema *JobNameSchema
538	jobs          []string
539}
540
541// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go
542// does by default.
543func marshalJson(data interface{}) string {
544	j, err := json.Marshal(data)
545	if err != nil {
546		log.Fatal(err)
547	}
548	return strings.Replace(string(j), "\\u003c", "<", -1)
549}
550
551// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the
552// recipe bundle.
553func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) {
554	b.cipd(CIPD_PKG_LUCI_AUTH)
555	b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}"))
556	b.usesPython()
557	b.recipeProp("swarm_out_dir", outputDir)
558	if outputDir != OUTPUT_NONE {
559		b.output(outputDir)
560	}
561	python := "cipd_bin_packages/vpython${EXECUTABLE_SUFFIX}"
562	b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project)
563	// Most recipes want this isolate; they can override if necessary.
564	b.cas(CAS_RUN_RECIPE)
565	b.timeout(time.Hour)
566	b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
567	b.Spec.ExtraTags = map[string]string{
568		"log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project),
569	}
570
571	// Attempts.
572	if !b.role("Build", "Upload") && b.extraConfig("ASAN", "MSAN", "TSAN", "Valgrind") {
573		// Sanitizers often find non-deterministic issues that retries would hide.
574		b.attempts(1)
575	} else {
576		// Retry by default to hide random bot/hardware failures.
577		b.attempts(2)
578	}
579}
580
581// kitchenTask sets up the task to run a recipe via Kitchen.
582func (b *taskBuilder) kitchenTask(recipe string, outputDir string) {
583	b.kitchenTaskNoBundle(recipe, outputDir)
584	b.dep(b.bundleRecipes())
585}
586
587// internalHardwareLabel returns the internal ID for the bot, if any.
588func (b *taskBuilder) internalHardwareLabel() *int {
589	if b.cfg.InternalHardwareLabel != nil {
590		return b.cfg.InternalHardwareLabel(b.parts)
591	}
592	return nil
593}
594
595// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances.
596func (b *taskBuilder) linuxGceDimensions(machineType string) {
597	b.dimension(
598		// Specify CPU to avoid running builds on bots with a more unique CPU.
599		"cpu:x86-64-Haswell_GCE",
600		"gpu:none",
601		// Currently all Linux GCE tasks run on 16-CPU machines.
602		fmt.Sprintf("machine_type:%s", machineType),
603		fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
604		fmt.Sprintf("pool:%s", b.cfg.Pool),
605	)
606}
607
608// deriveCompileTaskName returns the name of a compile task based on the given
609// job name.
610func (b *jobBuilder) deriveCompileTaskName() string {
611	if b.role("Test", "Perf", "FM") {
612		task_os := b.parts["os"]
613		ec := []string{}
614		if val := b.parts["extra_config"]; val != "" {
615			ec = strings.Split(val, "_")
616			ignore := []string{
617				"Skpbench", "AbandonGpuContext", "PreAbandonGpuContext", "Valgrind",
618				"ReleaseAndAbandonGpuContext", "FSAA", "FAAA", "FDAA", "NativeFonts", "GDI",
619				"NoGPUThreads", "ProcDump", "DDL1", "DDL3", "OOPRDDL", "T8888",
620				"DDLTotal", "DDLRecord", "9x9", "BonusConfigs", "SkottieTracing", "SkottieWASM",
621				"GpuTess", "DMSAA", "DMSAAStats", "Mskp", "Docker", "PDF", "SkVM", "Puppeteer",
622				"SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile", "WebGL2"}
623			keep := make([]string, 0, len(ec))
624			for _, part := range ec {
625				if !In(part, ignore) {
626					keep = append(keep, part)
627				}
628			}
629			ec = keep
630		}
631		if b.os("Android") {
632			if !In("Android", ec) {
633				ec = append([]string{"Android"}, ec...)
634			}
635			task_os = COMPILE_TASK_NAME_OS_LINUX
636		} else if b.os("ChromeOS") {
637			ec = append([]string{"Chromebook", "GLES"}, ec...)
638			task_os = COMPILE_TASK_NAME_OS_LINUX
639		} else if b.os("iOS") {
640			ec = append([]string{task_os}, ec...)
641			task_os = "Mac"
642		} else if b.matchOs("Win") {
643			task_os = "Win"
644		} else if b.compiler("GCC") {
645			// GCC compiles are now on a Docker container. We use the same OS and
646			// version to compile as to test.
647			ec = append(ec, "Docker")
648		} else if b.matchOs("Ubuntu", "Debian") {
649			task_os = COMPILE_TASK_NAME_OS_LINUX
650		} else if b.matchOs("Mac") {
651			task_os = "Mac"
652		}
653		jobNameMap := map[string]string{
654			"role":          "Build",
655			"os":            task_os,
656			"compiler":      b.parts["compiler"],
657			"target_arch":   b.parts["arch"],
658			"configuration": b.parts["configuration"],
659		}
660		if b.extraConfig("PathKit") {
661			ec = []string{"PathKit"}
662		}
663		if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") {
664			if b.cpu() {
665				ec = []string{"CanvasKit_CPU"}
666			} else {
667				ec = []string{"CanvasKit"}
668			}
669
670		}
671		if len(ec) > 0 {
672			jobNameMap["extra_config"] = strings.Join(ec, "_")
673		}
674		name, err := b.jobNameSchema.MakeJobName(jobNameMap)
675		if err != nil {
676			log.Fatal(err)
677		}
678		return name
679	} else if b.parts["role"] == "BuildStats" {
680		return strings.Replace(b.Name, "BuildStats", "Build", 1)
681	} else {
682		return b.Name
683	}
684}
685
686// swarmDimensions generates swarming bot dimensions for the given task.
687func (b *taskBuilder) swarmDimensions() {
688	if b.cfg.SwarmDimensions != nil {
689		dims := b.cfg.SwarmDimensions(b.parts)
690		if dims != nil {
691			b.dimension(dims...)
692			return
693		}
694	}
695	b.defaultSwarmDimensions()
696}
697
698// defaultSwarmDimensions generates default swarming bot dimensions for the given task.
699func (b *taskBuilder) defaultSwarmDimensions() {
700	d := map[string]string{
701		"pool": b.cfg.Pool,
702	}
703	if os, ok := b.parts["os"]; ok {
704		d["os"], ok = map[string]string{
705			"Android":    "Android",
706			"ChromeOS":   "ChromeOS",
707			"Debian9":    DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker.
708			"Debian10":   DEFAULT_OS_LINUX_GCE,
709			"Mac":        DEFAULT_OS_MAC,
710			"Mac10.13":   "Mac-10.13.6",
711			"Mac10.14":   "Mac-10.14.3",
712			"Mac10.15.1": "Mac-10.15.1",
713			"Mac10.15.7": "Mac-10.15.7", // Same as 'Mac', but explicit.
714			"Mac11":      "Mac-11.1",
715			"Ubuntu18":   "Ubuntu-18.04",
716			"Win":        DEFAULT_OS_WIN,
717			"Win10":      "Windows-10-19041",
718			"Win2019":    DEFAULT_OS_WIN,
719			"Win7":       "Windows-7-SP1",
720			"Win8":       "Windows-8.1-SP0",
721			"iOS":        "iOS-13.3.1",
722		}[os]
723		if !ok {
724			log.Fatalf("Entry %q not found in OS mapping.", os)
725		}
726		if os == "Win10" && b.parts["model"] == "Golo" {
727			// ChOps-owned machines have Windows 10 v1709.
728			d["os"] = "Windows-10-16299"
729		}
730		if os == "Mac10.14" && b.parts["model"] == "VMware7.1" {
731			// ChOps VMs are at a newer version of MacOS.
732			d["os"] = "Mac-10.14.6"
733		}
734		if os == "Mac10.15" && b.parts["model"] == "VMware7.1" {
735			// ChOps VMs are at a newer version of MacOS.
736			d["os"] = "Mac-10.15.7"
737		}
738		if b.parts["model"] == "iPhone6" {
739			// This is the latest iOS that supports iPhone6.
740			d["os"] = "iOS-12.4.5"
741		}
742		if b.parts["model"] == "iPhone11" {
743			d["os"] = "iOS-13.6"
744		}
745		if b.parts["model"] == "iPadPro" {
746			d["os"] = "iOS-13.6"
747		}
748	} else {
749		d["os"] = DEFAULT_OS_DEBIAN
750	}
751	if b.role("Test", "Perf") {
752		if b.os("Android") {
753			// For Android, the device type is a better dimension
754			// than CPU or GPU.
755			deviceInfo, ok := map[string][]string{
756				"AndroidOne": {"sprout", "MOB30Q"},
757				// S6 dimensions are more general than we would like. See skbug.com/11337 for context.
758				"GalaxyS6":        {"universal7420", "NRD90M"},
759				"GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo.
760				"GalaxyS9":        {"starlte", "QP1A.190711.020"},     // This is Android10.
761				"GalaxyS20":       {"exynos990", "QP1A.190711.020"},
762				"MotoG4":          {"athene", "NPJS25.93-14.7-8"},
763				"NVIDIA_Shield":   {"foster", "OPR6.170623.010_3507953_1441.7411"},
764				"Nexus5":          {"hammerhead", "M4B30Z_3437181"},
765				"Nexus5x":         {"bullhead", "OPR6.170623.023"},
766				"Nexus7":          {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7
767				"P30":             {"HWELE", "HUAWEIELE-L29"},
768				"Pixel":           {"sailfish", "PPR1.180610.009"},
769				"Pixel2XL":        {"taimen", "PPR1.180610.009"},
770				"Pixel3":          {"blueline", "PQ1A.190105.004"},
771				"Pixel3a":         {"sargo", "QP1A.190711.020"},
772				"Pixel4":          {"flame", "RPB2.200611.009"}, // R Preview
773				"Pixel4XL":        {"coral", "QD1A.190821.011.C4"},
774				"Pixel5":          {"redfin", "RD1A.200810.022.A4"},
775				"TecnoSpark3Pro":  {"TECNO-KB8", "PPR1.180610.011"},
776			}[b.parts["model"]]
777			if !ok {
778				log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"])
779			}
780			d["device_type"] = deviceInfo[0]
781			d["device_os"] = deviceInfo[1]
782		} else if b.os("iOS") {
783			device, ok := map[string]string{
784				"iPadMini4": "iPad5,1",
785				"iPhone6":   "iPhone7,2",
786				"iPhone7":   "iPhone9,1",
787				"iPhone8":   "iPhone10,1",
788				"iPhone11":  "iPhone12,1",
789				"iPadPro":   "iPad6,3",
790			}[b.parts["model"]]
791			if !ok {
792				log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"])
793			}
794			d["device_type"] = device
795			// Temporarily use this dimension to ensure we only use the new libimobiledevice, since the
796			// old version won't work with current recipes.
797			d["libimobiledevice"] = "1582155448"
798		} else if b.extraConfig("SKQP") && b.cpu("Emulator") {
799			if !b.model("NUC7i5BNK") || d["os"] != DEFAULT_OS_DEBIAN {
800				log.Fatalf("Please update defaultSwarmDimensions for SKQP::Emulator %s %s.", b.parts["os"], b.parts["model"])
801			}
802			d["cpu"] = "x86-64-i5-7260U"
803			d["os"] = DEFAULT_OS_DEBIAN
804			// KVM means Kernel-based Virtual Machine, that is, can this vm virtualize commands
805			// For us, this means, can we run an x86 android emulator on it.
806			// kjlubick tried running this on GCE, but it was a bit too slow on the large install.
807			// So, we run on bare metal machines in the Skolo (that should also have KVM).
808			d["kvm"] = "1"
809			d["docker_installed"] = "true"
810		} else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") {
811			modelMapping, ok := map[string]map[string]string{
812				"AppleM1": {
813					"MacMini9.1": "arm64-64-Apple_M1",
814				},
815				"AVX": {
816					"VMware7.1": "x86-64-E5-2697_v2",
817				},
818				"AVX2": {
819					"GCE":            "x86-64-Haswell_GCE",
820					"MacBookAir7.2":  "x86-64-i5-5350U",
821					"MacBookPro11.5": "x86-64-i7-4870HQ",
822					"NUC5i7RYH":      "x86-64-i7-5557U",
823				},
824				"AVX512": {
825					"GCE":  "x86-64-Skylake_GCE",
826					"Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel",
827				},
828				"Rome": {
829					"GCE": "x86-64-AMD_Rome_GCE",
830				},
831				"SwiftShader": {
832					"GCE": "x86-64-Haswell_GCE",
833				},
834			}[b.parts["cpu_or_gpu_value"]]
835			if !ok {
836				log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"])
837			}
838			cpu, ok := modelMapping[b.parts["model"]]
839			if !ok {
840				log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"])
841			}
842			d["cpu"] = cpu
843			if b.model("GCE") && b.matchOs("Debian") {
844				d["os"] = DEFAULT_OS_LINUX_GCE
845			}
846			if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" {
847				d["machine_type"] = MACHINE_TYPE_MEDIUM
848			}
849		} else {
850			if b.matchOs("Win") {
851				gpu, ok := map[string]string{
852					// At some point this might use the device ID, but for now it's like Chromebooks.
853					"Adreno630":     "Adreno630",
854					"GT610":         "10de:104a-23.21.13.9101",
855					"GTX660":        "10de:11c0-26.21.14.4120",
856					"GTX960":        "10de:1401-27.21.14.5671",
857					"IntelHD4400":   "8086:0a16-20.19.15.4963",
858					"IntelIris540":  "8086:1926-26.20.100.7463",
859					"IntelIris6100": "8086:162b-20.19.15.4963",
860					"IntelIris655":  "8086:3ea5-26.20.100.7463",
861					"RadeonHD7770":  "1002:683d-26.20.13031.18002",
862					"RadeonR9M470X": "1002:6646-26.20.13031.18002",
863					"QuadroP400":    "10de:1cb3-25.21.14.1678",
864				}[b.parts["cpu_or_gpu_value"]]
865				if !ok {
866					log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"])
867				}
868				d["gpu"] = gpu
869			} else if b.isLinux() {
870				gpu, ok := map[string]string{
871					// Intel drivers come from CIPD, so no need to specify the version here.
872					"IntelBayTrail": "8086:0f31",
873					"IntelHD2000":   "8086:0102",
874					"IntelHD405":    "8086:22b1",
875					"IntelIris640":  "8086:5926",
876					"QuadroP400":    "10de:1cb3-430.14",
877				}[b.parts["cpu_or_gpu_value"]]
878				if !ok {
879					log.Fatalf("Entry %q not found in Ubuntu GPU mapping.", b.parts["cpu_or_gpu_value"])
880				}
881				d["gpu"] = gpu
882			} else if b.matchOs("Mac") {
883				gpu, ok := map[string]string{
884					"AppleM1":       "AppleM1",
885					"IntelHD6000":   "8086:1626",
886					"IntelHD615":    "8086:591e",
887					"IntelIris5100": "8086:0a2e",
888					"RadeonHD8870M": "1002:6821-4.0.20-3.2.8",
889				}[b.parts["cpu_or_gpu_value"]]
890				if !ok {
891					log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"])
892				}
893				if gpu == "AppleM1" {
894					// No GPU dimension yet, but we can constrain by CPU.
895					d["cpu"] = "arm64-64-Apple_M1"
896				} else {
897					d["gpu"] = gpu
898				}
899				// Yuck. We have two different types of MacMini7,1 with the same GPU but different CPUs.
900				if b.gpu("IntelIris5100") {
901					// Run all tasks on Golo machines for now.
902					d["cpu"] = "x86-64-i7-4578U"
903				}
904			} else if b.os("ChromeOS") {
905				version, ok := map[string]string{
906					"MaliT604":            "10575.22.0",
907					"MaliT764":            "10575.22.0",
908					"MaliT860":            "10575.22.0",
909					"PowerVRGX6250":       "10575.22.0",
910					"TegraK1":             "10575.22.0",
911					"IntelHDGraphics615":  "10575.22.0",
912					"IntelUHDGraphics605": "13729.56.0",
913					"RadeonVega3":         "13729.56.0",
914				}[b.parts["cpu_or_gpu_value"]]
915				if !ok {
916					log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"])
917				}
918				d["gpu"] = b.parts["cpu_or_gpu_value"]
919				d["release_version"] = version
920			} else {
921				log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"])
922			}
923		}
924	} else {
925		d["gpu"] = "none"
926		if d["os"] == DEFAULT_OS_LINUX_GCE {
927			if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats") {
928				b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
929				return
930			}
931			// Use many-core machines for Build tasks.
932			b.linuxGceDimensions(MACHINE_TYPE_LARGE)
933			return
934		} else if d["os"] == DEFAULT_OS_WIN {
935			// Windows CPU bots.
936			d["cpu"] = "x86-64-Haswell_GCE"
937			// Use many-core machines for Build tasks.
938			d["machine_type"] = MACHINE_TYPE_LARGE
939		} else if d["os"] == DEFAULT_OS_MAC {
940			// Mac CPU bots.
941			d["cpu"] = "x86-64-E5-2697_v2"
942		}
943	}
944
945	dims := make([]string, 0, len(d))
946	for k, v := range d {
947		dims = append(dims, fmt.Sprintf("%s:%s", k, v))
948	}
949	sort.Strings(dims)
950	b.dimension(dims...)
951}
952
953// bundleRecipes generates the task to bundle and isolate the recipes. Returns
954// the name of the task, which may be added as a dependency.
955func (b *jobBuilder) bundleRecipes() string {
956	b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) {
957		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
958		b.cipd(specs.CIPD_PKGS_PYTHON_LINUX_AMD64...)
959		b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR)
960		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
961		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin")
962		b.idempotent()
963		b.cas(CAS_RECIPES)
964	})
965	return BUNDLE_RECIPES_NAME
966}
967
968// buildTaskDrivers generates the task to compile the task driver code to run on
969// all platforms. Returns the name of the task, which may be added as a
970// dependency.
971func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string {
972	name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch
973	b.addTask(name, func(b *taskBuilder) {
974		b.usesGo()
975		b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh",
976			specs.PLACEHOLDER_ISOLATED_OUTDIR,
977			goos,
978			goarch)
979		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
980		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
981		b.idempotent()
982		b.cas(CAS_TASK_DRIVERS)
983	})
984	return name
985}
986
987// updateGoDeps generates the task to update Go dependencies.
988func (b *jobBuilder) updateGoDeps() {
989	b.addTask(b.Name, func(b *taskBuilder) {
990		b.usesGo()
991		b.asset("protoc")
992		b.cmd(
993			"./update_go_deps",
994			"--project_id", "skia-swarming-bots",
995			"--task_id", specs.PLACEHOLDER_TASK_ID,
996			"--task_name", b.Name,
997			"--workdir", ".",
998			"--gerrit_project", "skia",
999			"--gerrit_url", "https://skia-review.googlesource.com",
1000			"--repo", specs.PLACEHOLDER_REPO,
1001			"--revision", specs.PLACEHOLDER_REVISION,
1002			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1003			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1004			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1005			"--alsologtostderr",
1006		)
1007		b.dep(b.buildTaskDrivers("linux", "amd64"))
1008		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1009		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1010		b.cas(CAS_EMPTY)
1011		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1012	})
1013}
1014
1015// createDockerImage creates the specified docker image. Returns the name of the
1016// generated task.
1017func (b *jobBuilder) createDockerImage(wasm bool) string {
1018	// First, derive the name of the task.
1019	imageName := "skia-release"
1020	taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release"
1021	if wasm {
1022		imageName = "skia-wasm-release"
1023		taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release"
1024	}
1025	imageDir := path.Join("docker", imageName)
1026
1027	// Add the task.
1028	b.addTask(taskName, func(b *taskBuilder) {
1029		// TODO(borenet): Make this task not use Git.
1030		b.usesGit()
1031		b.cmd(
1032			"./build_push_docker_image",
1033			"--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName),
1034			"--dockerfile_dir", imageDir,
1035			"--project_id", "skia-swarming-bots",
1036			"--task_id", specs.PLACEHOLDER_TASK_ID,
1037			"--task_name", b.Name,
1038			"--workdir", ".",
1039			"--gerrit_project", "skia",
1040			"--gerrit_url", "https://skia-review.googlesource.com",
1041			"--repo", specs.PLACEHOLDER_REPO,
1042			"--revision", specs.PLACEHOLDER_REVISION,
1043			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1044			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1045			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1046			"--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR,
1047			"--alsologtostderr",
1048		)
1049		b.dep(b.buildTaskDrivers("linux", "amd64"))
1050		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1051		b.cas(CAS_EMPTY)
1052		b.serviceAccount(b.cfg.ServiceAccountCompile)
1053		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1054		b.usesDocker()
1055		b.cache(CACHES_DOCKER...)
1056	})
1057	return taskName
1058}
1059
1060// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps
1061// (eg: fiddler, debugger, api) using the skia-release docker image.
1062func (b *jobBuilder) createPushAppsFromSkiaDockerImage() {
1063	b.addTask(b.Name, func(b *taskBuilder) {
1064		// TODO(borenet): Make this task not use Git.
1065		b.usesGit()
1066		b.cmd(
1067			"./push_apps_from_skia_image",
1068			"--project_id", "skia-swarming-bots",
1069			"--task_id", specs.PLACEHOLDER_TASK_ID,
1070			"--task_name", b.Name,
1071			"--workdir", ".",
1072			"--gerrit_project", "buildbot",
1073			"--gerrit_url", "https://skia-review.googlesource.com",
1074			"--repo", specs.PLACEHOLDER_REPO,
1075			"--revision", specs.PLACEHOLDER_REVISION,
1076			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1077			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1078			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1079			"--alsologtostderr",
1080		)
1081		b.dep(b.buildTaskDrivers("linux", "amd64"))
1082		b.dep(b.createDockerImage(false))
1083		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1084		b.cas(CAS_EMPTY)
1085		b.serviceAccount(b.cfg.ServiceAccountCompile)
1086		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1087		b.usesDocker()
1088		b.cache(CACHES_DOCKER...)
1089	})
1090}
1091
1092// createPushAppsFromWASMDockerImage creates and pushes docker images of some apps
1093// (eg: jsfiddle, skottie, particles) using the skia-wasm-release docker image.
1094func (b *jobBuilder) createPushAppsFromWASMDockerImage() {
1095	b.addTask(b.Name, func(b *taskBuilder) {
1096		// TODO(borenet): Make this task not use Git.
1097		b.usesGit()
1098		b.cmd(
1099			"./push_apps_from_wasm_image",
1100			"--project_id", "skia-swarming-bots",
1101			"--task_id", specs.PLACEHOLDER_TASK_ID,
1102			"--task_name", b.Name,
1103			"--workdir", ".",
1104			"--gerrit_project", "buildbot",
1105			"--gerrit_url", "https://skia-review.googlesource.com",
1106			"--repo", specs.PLACEHOLDER_REPO,
1107			"--revision", specs.PLACEHOLDER_REVISION,
1108			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1109			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1110			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1111			"--alsologtostderr",
1112		)
1113		b.dep(b.buildTaskDrivers("linux", "amd64"))
1114		b.dep(b.createDockerImage(true))
1115		b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin", "go/go/bin")
1116		b.cas(CAS_EMPTY)
1117		b.serviceAccount(b.cfg.ServiceAccountCompile)
1118		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1119		b.usesDocker()
1120		b.cache(CACHES_DOCKER...)
1121	})
1122}
1123
1124var iosRegex = regexp.MustCompile(`os:iOS-(.*)`)
1125
1126func (b *taskBuilder) maybeAddIosDevImage() {
1127	for _, dim := range b.Spec.Dimensions {
1128		if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 {
1129			var asset string
1130			switch m[1] {
1131			// Other patch versions can be added to the same case.
1132			case "11.4.1":
1133				asset = "ios-dev-image-11.4"
1134			case "12.4.5":
1135				asset = "ios-dev-image-12.4"
1136			case "13.3.1":
1137				asset = "ios-dev-image-13.3"
1138			case "13.4.1":
1139				asset = "ios-dev-image-13.4"
1140			case "13.5.1":
1141				asset = "ios-dev-image-13.5"
1142			case "13.6":
1143				asset = "ios-dev-image-13.6"
1144			default:
1145				log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1])
1146			}
1147			b.asset(asset)
1148			break
1149		} else if strings.Contains(dim, "iOS") {
1150			log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim)
1151		}
1152	}
1153}
1154
1155// compile generates a compile task. Returns the name of the compile task.
1156func (b *jobBuilder) compile() string {
1157	name := b.deriveCompileTaskName()
1158	if b.extraConfig("WasmGMTests") {
1159		b.compileWasmGMTests(name)
1160	} else {
1161		b.addTask(name, func(b *taskBuilder) {
1162			recipe := "compile"
1163			casSpec := CAS_COMPILE
1164			if b.extraConfig("NoDEPS", "CMake", "CommandBuffer", "Flutter", "SKQP") {
1165				recipe = "sync_and_compile"
1166				casSpec = CAS_RUN_RECIPE
1167				b.recipeProps(EXTRA_PROPS)
1168				b.usesGit()
1169				if !b.extraConfig("NoDEPS") {
1170					b.cache(CACHES_WORKDIR...)
1171				}
1172			} else {
1173				b.idempotent()
1174			}
1175			b.kitchenTask(recipe, OUTPUT_BUILD)
1176			b.cas(casSpec)
1177			b.serviceAccount(b.cfg.ServiceAccountCompile)
1178			b.swarmDimensions()
1179			if b.extraConfig("Docker", "LottieWeb", "SKQP", "CMake") || b.compiler("EMCC") {
1180				b.usesDocker()
1181				b.cache(CACHES_DOCKER...)
1182			}
1183
1184			// Android bots require a toolchain.
1185			if b.extraConfig("Android") {
1186				if b.matchOs("Mac") {
1187					b.asset("android_ndk_darwin")
1188				} else if b.matchOs("Win") {
1189					pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows")
1190					pkg.Path = "n"
1191					b.cipd(pkg)
1192				} else if !b.extraConfig("SKQP") {
1193					b.asset("android_ndk_linux")
1194				}
1195			} else if b.extraConfig("Chromebook") {
1196				b.asset("clang_linux")
1197				if b.arch("x86_64") {
1198					b.asset("chromebook_x86_64_gles")
1199				} else if b.arch("arm") {
1200					b.asset("armhf_sysroot")
1201					b.asset("chromebook_arm_gles")
1202				}
1203			} else if b.isLinux() {
1204				if b.compiler("Clang") {
1205					b.asset("clang_linux")
1206				}
1207				if b.extraConfig("SwiftShader") {
1208					b.asset("cmake_linux")
1209				}
1210				if b.extraConfig("OpenCL") {
1211					b.asset("opencl_headers", "opencl_ocl_icd_linux")
1212				}
1213				b.asset("ccache_linux")
1214				b.usesCCache()
1215			} else if b.matchOs("Win") {
1216				b.asset("win_toolchain")
1217				if b.compiler("Clang") {
1218					b.asset("clang_win")
1219				}
1220				if b.extraConfig("OpenCL") {
1221					b.asset("opencl_headers")
1222				}
1223			} else if b.matchOs("Mac") {
1224				b.cipd(CIPD_PKGS_XCODE...)
1225				b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{
1226					Name: "xcode",
1227					Path: "cache/Xcode.app",
1228				})
1229				b.asset("ccache_mac")
1230				b.usesCCache()
1231				if b.extraConfig("CommandBuffer") {
1232					b.timeout(2 * time.Hour)
1233				}
1234				if b.extraConfig("iOS") {
1235					b.asset("provisioning_profile_ios")
1236				}
1237			}
1238		})
1239	}
1240
1241	// All compile tasks are runnable as their own Job. Assert that the Job
1242	// is listed in jobs.
1243	if !In(name, b.jobs) {
1244		log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name)
1245	}
1246
1247	return name
1248}
1249
1250// recreateSKPs generates a RecreateSKPs task.
1251func (b *jobBuilder) recreateSKPs() {
1252	b.addTask(b.Name, func(b *taskBuilder) {
1253		b.recipeProps(EXTRA_PROPS)
1254		b.kitchenTask("recreate_skps", OUTPUT_NONE)
1255		b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs)
1256		b.dimension(
1257			"pool:SkiaCT",
1258			fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE),
1259		)
1260		b.usesGo()
1261		b.cache(CACHES_WORKDIR...)
1262		b.timeout(4 * time.Hour)
1263	})
1264}
1265
1266// checkGeneratedFiles verifies that no generated SKSL files have been edited
1267// by hand.
1268func (b *jobBuilder) checkGeneratedFiles() {
1269	b.addTask(b.Name, func(b *taskBuilder) {
1270		b.recipeProps(EXTRA_PROPS)
1271		b.kitchenTask("check_generated_files", OUTPUT_NONE)
1272		b.serviceAccount(b.cfg.ServiceAccountCompile)
1273		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1274		b.usesGo()
1275		b.asset("clang_linux")
1276		b.asset("ccache_linux")
1277		b.usesCCache()
1278		b.cache(CACHES_WORKDIR...)
1279	})
1280}
1281
1282// checkGnToBp verifies that the gn_to_bp.py script continues to work.
1283func (b *jobBuilder) checkGnToBp() {
1284	b.addTask(b.Name, func(b *taskBuilder) {
1285		b.cas(CAS_COMPILE)
1286		b.dep(b.buildTaskDrivers("linux", "amd64"))
1287		b.cmd("./run_gn_to_bp",
1288			"--local=false",
1289			"--project_id", "skia-swarming-bots",
1290			"--task_id", specs.PLACEHOLDER_TASK_ID,
1291			"--task_name", b.Name,
1292			"--alsologtostderr")
1293		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1294		b.usesPython()
1295		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1296	})
1297}
1298
1299// housekeeper generates a Housekeeper task.
1300func (b *jobBuilder) housekeeper() {
1301	b.addTask(b.Name, func(b *taskBuilder) {
1302		b.recipeProps(EXTRA_PROPS)
1303		b.kitchenTask("housekeeper", OUTPUT_NONE)
1304		b.serviceAccount(b.cfg.ServiceAccountHousekeeper)
1305		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1306		b.usesGit()
1307		b.cache(CACHES_WORKDIR...)
1308	})
1309}
1310
1311// g3FrameworkCanary generates a G3 Framework Canary task. Returns
1312// the name of the last task in the generated chain of tasks, which the Job
1313// should add as a dependency.
1314func (b *jobBuilder) g3FrameworkCanary() {
1315	b.addTask(b.Name, func(b *taskBuilder) {
1316		b.cas(CAS_EMPTY)
1317		b.dep(b.buildTaskDrivers("linux", "amd64"))
1318		b.cmd("./g3_canary",
1319			"--local=false",
1320			"--project_id", "skia-swarming-bots",
1321			"--task_id", specs.PLACEHOLDER_TASK_ID,
1322			"--task_name", b.Name,
1323			"--repo", specs.PLACEHOLDER_REPO,
1324			"--revision", specs.PLACEHOLDER_REVISION,
1325			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1326			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1327			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1328			"--alsologtostderr")
1329		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1330		b.cipd(CIPD_PKG_LUCI_AUTH)
1331		b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com")
1332		b.timeout(3 * time.Hour)
1333		b.attempts(1)
1334	})
1335}
1336
1337// infra generates an infra_tests task.
1338func (b *jobBuilder) infra() {
1339	b.addTask(b.Name, func(b *taskBuilder) {
1340		if b.matchOs("Win") || b.matchExtraConfig("Win") {
1341			b.dimension(
1342				// Specify CPU to avoid running builds on bots with a more unique CPU.
1343				"cpu:x86-64-Haswell_GCE",
1344				"gpu:none",
1345				fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances.
1346				fmt.Sprintf("os:%s", DEFAULT_OS_WIN),
1347				fmt.Sprintf("pool:%s", b.cfg.Pool),
1348			)
1349		} else {
1350			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1351		}
1352		b.recipeProp("repository", specs.PLACEHOLDER_REPO)
1353		b.kitchenTask("infra", OUTPUT_NONE)
1354		b.cas(CAS_WHOLE_REPO)
1355		b.serviceAccount(b.cfg.ServiceAccountCompile)
1356		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1357		b.idempotent()
1358		b.usesGo()
1359	})
1360}
1361
1362// buildstats generates a builtstats task, which compiles code and generates
1363// statistics about the build.
1364func (b *jobBuilder) buildstats() {
1365	compileTaskName := b.compile()
1366	b.addTask(b.Name, func(b *taskBuilder) {
1367		b.recipeProps(EXTRA_PROPS)
1368		b.kitchenTask("compute_buildstats", OUTPUT_PERF)
1369		b.dep(compileTaskName)
1370		b.asset("bloaty")
1371		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1372		b.usesDocker()
1373		b.usesGit()
1374		b.cache(CACHES_WORKDIR...)
1375	})
1376	// Upload release results (for tracking in perf)
1377	// We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator)
1378	if b.release() && !b.arch("x86_64") {
1379		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1380		depName := b.Name
1381		b.addTask(uploadName, func(b *taskBuilder) {
1382			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1383			b.recipeProps(EXTRA_PROPS)
1384			// TODO(borenet): I'm not sure why the upload task is
1385			// using the BuildStats task name, but I've done this
1386			// to maintain existing behavior.
1387			b.Name = depName
1388			b.kitchenTask("upload_buildstats_results", OUTPUT_NONE)
1389			b.Name = uploadName
1390			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1391			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1392			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1393			b.dep(depName)
1394		})
1395	}
1396}
1397
1398// doUpload indicates whether the given Job should upload its results.
1399func (b *jobBuilder) doUpload() bool {
1400	for _, s := range b.cfg.NoUpload {
1401		m, err := regexp.MatchString(s, b.Name)
1402		if err != nil {
1403			log.Fatal(err)
1404		}
1405		if m {
1406			return false
1407		}
1408	}
1409	return true
1410}
1411
1412// commonTestPerfAssets adds the assets needed by Test and Perf tasks.
1413func (b *taskBuilder) commonTestPerfAssets() {
1414	// Docker-based tests don't need the standard CIPD assets
1415	if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) {
1416		return
1417	}
1418	if b.extraConfig("Skpbench") {
1419		// Skpbench only needs skps
1420		b.asset("skp", "mskp")
1421	} else if b.os("Android", "ChromeOS", "iOS") {
1422		b.asset("skp", "svg", "skimage")
1423	} else {
1424		// for desktop machines
1425		b.asset("skimage", "skp", "svg")
1426	}
1427
1428	if b.isLinux() && b.matchExtraConfig("SAN") {
1429		b.asset("clang_linux")
1430	}
1431
1432	if b.isLinux() {
1433		if b.extraConfig("Vulkan") {
1434			b.asset("linux_vulkan_sdk")
1435		}
1436		if b.matchGpu("Intel") {
1437			b.asset("mesa_intel_driver_linux")
1438		}
1439		if b.extraConfig("OpenCL") {
1440			b.asset("opencl_ocl_icd_linux", "opencl_intel_neo_linux")
1441		}
1442	}
1443	if b.matchOs("Win") && b.extraConfig("ProcDump") {
1444		b.asset("procdump_win")
1445	}
1446}
1447
1448// directUpload adds prerequisites for uploading to GCS.
1449func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) {
1450	b.recipeProp("gs_bucket", gsBucket)
1451	b.serviceAccount(serviceAccount)
1452	b.cipd(specs.CIPD_PKGS_GSUTIL...)
1453}
1454
1455// dm generates a Test task using dm.
1456func (b *jobBuilder) dm() {
1457	compileTaskName := ""
1458	// LottieWeb doesn't require anything in Skia to be compiled.
1459	if !b.extraConfig("LottieWeb") {
1460		compileTaskName = b.compile()
1461	}
1462	directUpload := false
1463	b.addTask(b.Name, func(b *taskBuilder) {
1464		cas := CAS_TEST
1465		recipe := "test"
1466		if b.extraConfig("SKQP") {
1467			cas = CAS_SKQP
1468			recipe = "skqp_test"
1469			if b.cpu("Emulator") {
1470				recipe = "test_skqp_emulator"
1471			}
1472		} else if b.extraConfig("OpenCL") {
1473			// TODO(dogben): Longer term we may not want this to be called a
1474			// "Test" task, but until we start running hs_bench or kx, it will
1475			// be easier to fit into the current job name schema.
1476			recipe = "compute_test"
1477		} else if b.extraConfig("PathKit") {
1478			cas = CAS_PATHKIT
1479			recipe = "test_pathkit"
1480			if b.doUpload() {
1481				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1482				directUpload = true
1483			}
1484		} else if b.extraConfig("CanvasKit") {
1485			cas = CAS_CANVASKIT
1486			recipe = "test_canvaskit"
1487			if b.doUpload() {
1488				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1489				directUpload = true
1490			}
1491		} else if b.extraConfig("LottieWeb") {
1492			// CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes
1493			// more of the files, especially those brought in via DEPS in the
1494			// lottie-ci repo. The main difference between Perf.+LottieWeb and
1495			// Test.+LottieWeb is that the former pulls in the lottie build via
1496			// npm and the latter always tests at lottie's
1497			// ToT.
1498			cas = CAS_LOTTIE_CI
1499			recipe = "test_lottie_web"
1500			if b.doUpload() {
1501				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1502				directUpload = true
1503			}
1504		} else {
1505			// Default recipe supports direct upload.
1506			// TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil.
1507			// https://bugs.chromium.org/p/chromium/issues/detail?id=1192611
1508			if b.doUpload() && !b.matchOs("Win") {
1509				b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM)
1510				directUpload = true
1511			}
1512		}
1513		b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL)
1514		b.recipeProps(EXTRA_PROPS)
1515		iid := b.internalHardwareLabel()
1516		iidStr := ""
1517		if iid != nil {
1518			iidStr = strconv.Itoa(*iid)
1519		}
1520		if recipe == "test" {
1521			b.dmFlags(iidStr)
1522		}
1523		b.kitchenTask(recipe, OUTPUT_TEST)
1524		b.cas(cas)
1525		b.swarmDimensions()
1526		if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit", "SKQP") {
1527			b.usesDocker()
1528		}
1529		if compileTaskName != "" {
1530			b.dep(compileTaskName)
1531		}
1532		if b.os("Android") && b.extraConfig("ASAN") {
1533			b.asset("android_ndk_linux")
1534		}
1535		b.commonTestPerfAssets()
1536		if b.matchExtraConfig("Lottie") {
1537			b.asset("lottie-samples")
1538		}
1539		if b.extraConfig("SKQP") {
1540			if !b.cpu("Emulator") {
1541				b.asset("gcloud_linux")
1542			}
1543		}
1544		b.expiration(20 * time.Hour)
1545
1546		b.timeout(4 * time.Hour)
1547		if b.extraConfig("Valgrind") {
1548			b.timeout(9 * time.Hour)
1549			b.expiration(48 * time.Hour)
1550			b.asset("valgrind")
1551			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1552			// to ensure there are always bots free for CQ tasks.
1553			b.dimension("valgrind:1")
1554		} else if b.extraConfig("MSAN") {
1555			b.timeout(9 * time.Hour)
1556		} else if b.arch("x86") && b.debug() {
1557			// skia:6737
1558			b.timeout(6 * time.Hour)
1559		}
1560		b.maybeAddIosDevImage()
1561	})
1562
1563	// Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same
1564	// time as normal tests (which would be nice), cfg.json needs to have Coverage removed.
1565	if b.doUpload() && !directUpload {
1566		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1567		depName := b.Name
1568		b.addTask(uploadName, func(b *taskBuilder) {
1569			b.recipeProp("gs_bucket", b.cfg.GsBucketGm)
1570			b.recipeProps(EXTRA_PROPS)
1571			// TODO(borenet): I'm not sure why the upload task is
1572			// using the Test task name, but I've done this
1573			// to maintain existing behavior.
1574			b.Name = depName
1575			b.kitchenTask("upload_dm_results", OUTPUT_NONE)
1576			b.Name = uploadName
1577			b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1578			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1579			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1580			b.dep(depName)
1581		})
1582	}
1583}
1584
1585func (b *jobBuilder) fm() {
1586	goos := "linux"
1587	if strings.Contains(b.parts["os"], "Win") {
1588		goos = "windows"
1589	}
1590	if strings.Contains(b.parts["os"], "Mac") {
1591		goos = "darwin"
1592	}
1593
1594	b.addTask(b.Name, func(b *taskBuilder) {
1595		b.asset("skimage", "skp", "svg")
1596		b.cas(CAS_TEST)
1597		b.dep(b.buildTaskDrivers(goos, "amd64"), b.compile())
1598		b.cmd("./fm_driver${EXECUTABLE_SUFFIX}",
1599			"--local=false",
1600			"--resources=skia/resources",
1601			"--imgs=skimage",
1602			"--skps=skp",
1603			"--svgs=svg",
1604			"--project_id", "skia-swarming-bots",
1605			"--task_id", specs.PLACEHOLDER_TASK_ID,
1606			"--bot", b.Name,
1607			"--gold="+strconv.FormatBool(!b.matchExtraConfig("SAN")),
1608			"build/fm${EXECUTABLE_SUFFIX}")
1609		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1610		b.swarmDimensions()
1611		b.attempts(1)
1612
1613		if b.isLinux() && b.matchExtraConfig("SAN") {
1614			b.asset("clang_linux")
1615			// Sanitizers may want to run llvm-symbolizer for readable stack traces.
1616			b.addToPATH("clang_linux/bin")
1617
1618			// Point sanitizer builds at our prebuilt libc++ for this sanitizer.
1619			if b.extraConfig("MSAN") {
1620				// We'd see false positives in std::basic_string<char> if this weren't set.
1621				b.env("LD_LIBRARY_PATH", "clang_linux/msan")
1622			} else if b.extraConfig("TSAN") {
1623				// Occasional false positives may crop up in the standard library without this.
1624				b.env("LD_LIBRARY_PATH", "clang_linux/tsan")
1625			} else {
1626				// This isn't strictly required, but we usually get better sanitizer
1627				// diagnostics from libc++ than the default OS-provided libstdc++.
1628				b.env("LD_LIBRARY_PATH", "clang_linux/lib")
1629			}
1630		}
1631	})
1632}
1633
1634// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers.
1635// Canary-G3 does not use this path because it is very different from other autorollers.
1636func (b *jobBuilder) canary(rollerName string) {
1637	b.addTask(b.Name, func(b *taskBuilder) {
1638		b.cas(CAS_EMPTY)
1639		b.dep(b.buildTaskDrivers("linux", "amd64"))
1640		b.cmd("./canary",
1641			"--local=false",
1642			"--project_id", "skia-swarming-bots",
1643			"--task_id", specs.PLACEHOLDER_TASK_ID,
1644			"--task_name", b.Name,
1645			"--roller_name", rollerName,
1646			"--repo", specs.PLACEHOLDER_REPO,
1647			"--revision", specs.PLACEHOLDER_REVISION,
1648			"--patch_issue", specs.PLACEHOLDER_ISSUE,
1649			"--patch_set", specs.PLACEHOLDER_PATCHSET,
1650			"--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER,
1651			"--alsologtostderr")
1652		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1653		b.cipd(CIPD_PKG_LUCI_AUTH)
1654		b.serviceAccount(b.cfg.ServiceAccountCanary)
1655		b.timeout(3 * time.Hour)
1656		b.attempts(1)
1657	})
1658}
1659
1660// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to
1661// benchmark something using Chromium (e.g. CanvasKit, LottieWeb).
1662func (b *jobBuilder) puppeteer() {
1663	compileTaskName := b.compile()
1664	b.addTask(b.Name, func(b *taskBuilder) {
1665		b.defaultSwarmDimensions()
1666		b.usesNode()
1667		b.cipd(CIPD_PKG_LUCI_AUTH)
1668		b.dep(b.buildTaskDrivers("linux", "amd64"), compileTaskName)
1669		b.output(OUTPUT_PERF)
1670		b.timeout(60 * time.Minute)
1671		b.cas(CAS_PUPPETEER)
1672		b.serviceAccount(b.cfg.ServiceAccountCompile)
1673
1674		webglversion := "2"
1675		if b.extraConfig("WebGL1") {
1676			webglversion = "1"
1677		}
1678
1679		if b.extraConfig("SkottieFrames") {
1680			b.cmd(
1681				"./perf_puppeteer_skottie_frames",
1682				"--project_id", "skia-swarming-bots",
1683				"--git_hash", specs.PLACEHOLDER_REVISION,
1684				"--task_id", specs.PLACEHOLDER_TASK_ID,
1685				"--task_name", b.Name,
1686				"--canvaskit_bin_path", "./build",
1687				"--lotties_path", "./lotties_with_assets",
1688				"--node_bin_path", "./node/node/bin",
1689				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1690				"--output_path", OUTPUT_PERF,
1691				"--os_trace", b.parts["os"],
1692				"--model_trace", b.parts["model"],
1693				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1694				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1695				"--webgl_version", webglversion, // ignore when running with cpu backend
1696				"--alsologtostderr",
1697			)
1698			// This CIPD package was made by hand with the following invocation:
1699			//   cipd create -name skia/internal/lotties_with_assets -in ./lotties/ -tag version:0
1700			//   cipd acl-edit skia/internal/lotties_with_assets -reader group:project-skia-external-task-accounts
1701			//   cipd acl-edit skia/internal/lotties_with_assets -reader user:pool-skia@chromium-swarm.iam.gserviceaccount.com
1702			// Where lotties is a hand-selected set of lottie animations and (optionally) assets used in
1703			// them (e.g. fonts, images).
1704			b.cipd(&specs.CipdPackage{
1705				Name:    "skia/internal/lotties_with_assets",
1706				Path:    "lotties_with_assets",
1707				Version: "version:0",
1708			})
1709		} else if b.extraConfig("RenderSKP") {
1710			b.cmd(
1711				"./perf_puppeteer_render_skps",
1712				"--project_id", "skia-swarming-bots",
1713				"--git_hash", specs.PLACEHOLDER_REVISION,
1714				"--task_id", specs.PLACEHOLDER_TASK_ID,
1715				"--task_name", b.Name,
1716				"--canvaskit_bin_path", "./build",
1717				"--skps_path", "./skp",
1718				"--node_bin_path", "./node/node/bin",
1719				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1720				"--output_path", OUTPUT_PERF,
1721				"--os_trace", b.parts["os"],
1722				"--model_trace", b.parts["model"],
1723				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1724				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1725				"--webgl_version", webglversion,
1726				"--alsologtostderr",
1727			)
1728			b.asset("skp")
1729		} else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite
1730			b.cmd(
1731				"./perf_puppeteer_canvas",
1732				"--project_id", "skia-swarming-bots",
1733				"--git_hash", specs.PLACEHOLDER_REVISION,
1734				"--task_id", specs.PLACEHOLDER_TASK_ID,
1735				"--task_name", b.Name,
1736				"--canvaskit_bin_path", "./build",
1737				"--node_bin_path", "./node/node/bin",
1738				"--benchmark_path", "./tools/perf-canvaskit-puppeteer",
1739				"--output_path", OUTPUT_PERF,
1740				"--os_trace", b.parts["os"],
1741				"--model_trace", b.parts["model"],
1742				"--cpu_or_gpu_trace", b.parts["cpu_or_gpu"],
1743				"--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"],
1744				"--webgl_version", webglversion,
1745				"--alsologtostderr",
1746			)
1747			b.asset("skp")
1748		}
1749
1750	})
1751
1752	// Upload results to Perf after.
1753	// TODO(kjlubick,borenet) deduplicate this with the logic in perf().
1754	uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1755	depName := b.Name
1756	b.addTask(uploadName, func(b *taskBuilder) {
1757		b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1758		b.recipeProps(EXTRA_PROPS)
1759		// TODO(borenet): I'm not sure why the upload task is
1760		// using the Perf task name, but I've done this to
1761		// maintain existing behavior.
1762		b.Name = depName
1763		b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1764		b.Name = uploadName
1765		b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1766		b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1767		b.cipd(specs.CIPD_PKGS_GSUTIL...)
1768		b.dep(depName)
1769	})
1770}
1771
1772func (b *jobBuilder) cifuzz() {
1773	b.addTask(b.Name, func(b *taskBuilder) {
1774		b.attempts(1)
1775		b.usesDocker()
1776		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1777		b.cipd(CIPD_PKG_LUCI_AUTH)
1778		b.cipd(specs.CIPD_PKGS_GIT_LINUX_AMD64...)
1779		b.dep(b.buildTaskDrivers("linux", "amd64"))
1780		b.output("cifuzz_out")
1781		b.timeout(60 * time.Minute)
1782		b.cas(CAS_WHOLE_REPO)
1783		b.serviceAccount(b.cfg.ServiceAccountCompile)
1784		b.cmd(
1785			"./cifuzz",
1786			"--project_id", "skia-swarming-bots",
1787			"--task_id", specs.PLACEHOLDER_TASK_ID,
1788			"--task_name", b.Name,
1789			"--git_exe_path", "./cipd_bin_packages/git",
1790			"--out_path", "./cifuzz_out",
1791			"--skia_path", "./skia",
1792			"--work_path", "./cifuzz_work",
1793			"--alsologtostderr",
1794		)
1795	})
1796}
1797
1798// perf generates a Perf task.
1799func (b *jobBuilder) perf() {
1800	compileTaskName := ""
1801	// LottieWeb doesn't require anything in Skia to be compiled.
1802	if !b.extraConfig("LottieWeb") {
1803		compileTaskName = b.compile()
1804	}
1805	doUpload := b.release() && b.doUpload()
1806	b.addTask(b.Name, func(b *taskBuilder) {
1807		recipe := "perf"
1808		cas := CAS_PERF
1809		if b.extraConfig("Skpbench") {
1810			recipe = "skpbench"
1811			cas = CAS_SKPBENCH
1812		} else if b.extraConfig("PathKit") {
1813			cas = CAS_PATHKIT
1814			recipe = "perf_pathkit"
1815		} else if b.extraConfig("CanvasKit") {
1816			cas = CAS_CANVASKIT
1817			recipe = "perf_canvaskit"
1818		} else if b.extraConfig("SkottieTracing") {
1819			recipe = "perf_skottietrace"
1820		} else if b.extraConfig("SkottieWASM") {
1821			recipe = "perf_skottiewasm_lottieweb"
1822			cas = CAS_SKOTTIE_WASM
1823		} else if b.extraConfig("LottieWeb") {
1824			recipe = "perf_skottiewasm_lottieweb"
1825			cas = CAS_LOTTIE_WEB
1826		}
1827		b.recipeProps(EXTRA_PROPS)
1828		if recipe == "perf" {
1829			b.nanobenchFlags(doUpload)
1830		} else if recipe == "skpbench" {
1831			b.skpbenchFlags()
1832		}
1833		b.kitchenTask(recipe, OUTPUT_PERF)
1834		b.cas(cas)
1835		b.swarmDimensions()
1836		if b.extraConfig("CanvasKit", "Docker", "PathKit") {
1837			b.usesDocker()
1838		}
1839		if compileTaskName != "" {
1840			b.dep(compileTaskName)
1841		}
1842		b.commonTestPerfAssets()
1843		b.expiration(20 * time.Hour)
1844		b.timeout(4 * time.Hour)
1845
1846		if b.extraConfig("Valgrind") {
1847			b.timeout(9 * time.Hour)
1848			b.expiration(48 * time.Hour)
1849			b.asset("valgrind")
1850			// Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots
1851			// to ensure there are always bots free for CQ tasks.
1852			b.dimension("valgrind:1")
1853		} else if b.extraConfig("MSAN") {
1854			b.timeout(9 * time.Hour)
1855		} else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" {
1856			// skia:6737
1857			b.timeout(6 * time.Hour)
1858		} else if b.extraConfig("LottieWeb", "SkottieWASM") {
1859			b.asset("node", "lottie-samples")
1860		} else if b.matchExtraConfig("Skottie") {
1861			b.asset("lottie-samples")
1862		}
1863
1864		if b.os("Android") && b.cpu() {
1865			b.asset("text_blob_traces")
1866		}
1867		b.maybeAddIosDevImage()
1868
1869		iid := b.internalHardwareLabel()
1870		if iid != nil {
1871			b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid))
1872		}
1873	})
1874
1875	// Upload results if necessary.
1876	if doUpload {
1877		uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name)
1878		depName := b.Name
1879		b.addTask(uploadName, func(b *taskBuilder) {
1880			b.recipeProp("gs_bucket", b.cfg.GsBucketNano)
1881			b.recipeProps(EXTRA_PROPS)
1882			// TODO(borenet): I'm not sure why the upload task is
1883			// using the Perf task name, but I've done this to
1884			// maintain existing behavior.
1885			b.Name = depName
1886			b.kitchenTask("upload_nano_results", OUTPUT_NONE)
1887			b.Name = uploadName
1888			b.serviceAccount(b.cfg.ServiceAccountUploadNano)
1889			b.linuxGceDimensions(MACHINE_TYPE_SMALL)
1890			b.cipd(specs.CIPD_PKGS_GSUTIL...)
1891			b.dep(depName)
1892		})
1893	}
1894}
1895
1896// presubmit generates a task which runs the presubmit for this repo.
1897func (b *jobBuilder) presubmit() {
1898	b.addTask(b.Name, func(b *taskBuilder) {
1899		b.recipeProps(map[string]string{
1900			"category":         "cq",
1901			"patch_gerrit_url": "https://skia-review.googlesource.com",
1902			"patch_project":    "skia",
1903			"patch_ref":        specs.PLACEHOLDER_PATCH_REF,
1904			"reason":           "CQ",
1905			"repo_name":        "skia",
1906		})
1907		b.recipeProps(EXTRA_PROPS)
1908		b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE)
1909		b.cas(CAS_RUN_RECIPE)
1910		b.serviceAccount(b.cfg.ServiceAccountCompile)
1911		// Use MACHINE_TYPE_LARGE because it seems to save time versus
1912		// MEDIUM and we want presubmit to be fast.
1913		b.linuxGceDimensions(MACHINE_TYPE_LARGE)
1914		b.usesGit()
1915		b.cipd(&specs.CipdPackage{
1916			Name:    "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build",
1917			Path:    "recipe_bundle",
1918			Version: "git_revision:a8bcedad6768e206c4d2bd1718caa849f29cd42d",
1919		})
1920	})
1921}
1922
1923// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1924// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1925// former anyway.
1926func (b *jobBuilder) compileWasmGMTests(compileName string) {
1927	b.addTask(compileName, func(b *taskBuilder) {
1928		b.attempts(1)
1929		b.usesDocker()
1930		b.linuxGceDimensions(MACHINE_TYPE_MEDIUM)
1931		b.cipd(CIPD_PKG_LUCI_AUTH)
1932		b.dep(b.buildTaskDrivers("linux", "amd64"))
1933		b.output("wasm_out")
1934		b.timeout(60 * time.Minute)
1935		b.cas(CAS_COMPILE)
1936		b.serviceAccount(b.cfg.ServiceAccountCompile)
1937		b.cache(CACHES_DOCKER...)
1938		// For now, we only have one compile mode - a GPU release mode. This should be sufficient to
1939		// run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because
1940		// when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not
1941		// even bother.
1942		b.cmd(
1943			"./compile_wasm_gm_tests",
1944			"--project_id", "skia-swarming-bots",
1945			"--task_id", specs.PLACEHOLDER_TASK_ID,
1946			"--task_name", compileName,
1947			"--out_path", "./wasm_out",
1948			"--skia_path", "./skia",
1949			"--work_path", "./cache/docker/wasm_gm",
1950			"--alsologtostderr",
1951		)
1952	})
1953}
1954
1955// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM).
1956// We can use the same build for both CPU and GPU tests since the latter requires the code for the
1957// former anyway.
1958func (b *jobBuilder) runWasmGMTests() {
1959	compileTaskName := b.compile()
1960
1961	b.addTask(b.Name, func(b *taskBuilder) {
1962		b.attempts(1)
1963		b.usesNode()
1964		b.swarmDimensions()
1965		b.cipd(CIPD_PKG_LUCI_AUTH)
1966		b.cipd(CIPD_PKGS_GOLDCTL...)
1967		b.dep(b.buildTaskDrivers("linux", "amd64"))
1968		b.dep(compileTaskName)
1969		b.timeout(60 * time.Minute)
1970		b.cas(CAS_WASM_GM)
1971		b.serviceAccount(b.cfg.ServiceAccountUploadGM)
1972		b.cmd(
1973			"./run_wasm_gm_tests",
1974			"--project_id", "skia-swarming-bots",
1975			"--task_id", specs.PLACEHOLDER_TASK_ID,
1976			"--task_name", b.Name,
1977			"--test_harness_path", "./tools/run-wasm-gm-tests",
1978			"--built_path", "./wasm_out",
1979			"--node_bin_path", "./node/node/bin",
1980			"--resource_path", "./resources",
1981			"--work_path", "./wasm_gm/work",
1982			"--gold_ctl_path", "./cipd_bin_packages/goldctl",
1983			"--git_commit", specs.PLACEHOLDER_REVISION,
1984			"--changelist_id", specs.PLACEHOLDER_ISSUE,
1985			"--patchset_order", specs.PLACEHOLDER_PATCHSET,
1986			"--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID,
1987			// TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on.
1988			"--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config
1989			"--gold_key", "alpha_type:Premul",
1990			"--gold_key", "arch:wasm",
1991			"--gold_key", "browser:Chrome",
1992			"--gold_key", "color_depth:8888",
1993			"--gold_key", "config:gles",
1994			"--gold_key", "configuration:Release",
1995			"--gold_key", "cpu_or_gpu_value:QuadroP400",
1996			"--gold_key", "model:Golo",
1997			"--gold_key", "os:Ubuntu18",
1998			"--alsologtostderr",
1999		)
2000	})
2001}
2002