1// Copyright 2016 The Chromium Authors. All rights reserved. 2// Use of this source code is governed by a BSD-style license that can be 3// found in the LICENSE file. 4 5package gen_tasks_logic 6 7/* 8 Generate the tasks.json file. 9*/ 10 11import ( 12 "encoding/json" 13 "fmt" 14 "io/ioutil" 15 "log" 16 "path" 17 "path/filepath" 18 "regexp" 19 "runtime" 20 "sort" 21 "strconv" 22 "strings" 23 "time" 24 25 "go.skia.org/infra/go/cas/rbe" 26 "go.skia.org/infra/go/cipd" 27 "go.skia.org/infra/task_scheduler/go/specs" 28 "go.skia.org/skia/bazel/device_specific_configs" 29) 30 31const ( 32 CAS_BAZEL = "bazel" 33 CAS_CANVASKIT = "canvaskit" 34 CAS_COMPILE = "compile" 35 CAS_EMPTY = "empty" // TODO(borenet): It'd be nice if this wasn't necessary. 36 CAS_LOTTIE_CI = "lottie-ci" 37 CAS_LOTTIE_WEB = "lottie-web" 38 CAS_PATHKIT = "pathkit" 39 CAS_PERF = "perf" 40 CAS_PUPPETEER = "puppeteer" 41 CAS_RUN_RECIPE = "run-recipe" 42 CAS_RECIPES = "recipes" 43 CAS_RECREATE_SKPS = "recreate-skps" 44 CAS_SKOTTIE_WASM = "skottie-wasm" 45 CAS_TASK_DRIVERS = "task-drivers" 46 CAS_TEST = "test" 47 CAS_WASM_GM = "wasm-gm" 48 CAS_WHOLE_REPO = "whole-repo" 49 50 BUILD_TASK_DRIVERS_PREFIX = "Housekeeper-PerCommit-BuildTaskDrivers" 51 BUNDLE_RECIPES_NAME = "Housekeeper-PerCommit-BundleRecipes" 52 ISOLATE_GCLOUD_LINUX_NAME = "Housekeeper-PerCommit-IsolateGCloudLinux" 53 ISOLATE_SKIMAGE_NAME = "Housekeeper-PerCommit-IsolateSkImage" 54 ISOLATE_SKP_NAME = "Housekeeper-PerCommit-IsolateSKP" 55 ISOLATE_MSKP_NAME = "Housekeeper-PerCommit-IsolateMSKP" 56 ISOLATE_SVG_NAME = "Housekeeper-PerCommit-IsolateSVG" 57 ISOLATE_NDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidNDKLinux" 58 ISOLATE_SDK_LINUX_NAME = "Housekeeper-PerCommit-IsolateAndroidSDKLinux" 59 ISOLATE_WIN_TOOLCHAIN_NAME = "Housekeeper-PerCommit-IsolateWinToolchain" 60 61 DEBIAN_11_OS = "Debian-11.5" 62 DEFAULT_OS_DEBIAN = "Debian-10.10" 63 DEFAULT_OS_LINUX_GCE = "Debian-10.3" 64 OLD_OS_LINUX_GCE = "Debian-9.8" 65 COMPILE_TASK_NAME_OS_LINUX = "Debian10" 66 COMPILE_TASK_NAME_OS_LINUX_OLD = "Debian9" 67 DEFAULT_OS_MAC = "Mac-14.5" 68 DEFAULT_OS_WIN_GCE = "Windows-Server-17763" 69 UBUNTU_20_04_OS = "Ubuntu-20.04" 70 UBUNTU_22_04_OS = "Ubuntu-22.04" 71 UBUNTU_24_04_OS = "Ubuntu-24.04" 72 73 // Small is a 2-core machine. 74 // TODO(dogben): Would n1-standard-1 or n1-standard-2 be sufficient? 75 MACHINE_TYPE_SMALL = "n1-highmem-2" 76 // Medium is a 16-core machine 77 MACHINE_TYPE_MEDIUM = "n1-standard-16" 78 // Large is a 64-core machine. (We use "highcpu" because we don't need more than 57GB memory for 79 // any of our tasks.) 80 MACHINE_TYPE_LARGE = "n1-highcpu-64" 81 82 // Swarming output dirs. 83 OUTPUT_NONE = "output_ignored" // This will result in outputs not being isolated. 84 OUTPUT_BUILD = "build" 85 OUTPUT_BUILD_NOPATCH = "build_nopatch" 86 OUTPUT_TEST = "test" 87 OUTPUT_PERF = "perf" 88 OUTPUT_BAZEL = "bazel_output" 89 90 // Name prefix for upload jobs. 91 PREFIX_UPLOAD = "Upload" 92 93 // This will have to kept in sync with the kMin_Version in 94 // src/core/SkPicturePriv.h 95 // See the comment in that file on how to find the version to use here. 96 oldestSupportedSkpVersion = 293 97 98 // bazelCacheDirOnGCELinux is the path where Bazel should write its cache on Linux GCE machines. 99 // The Bazel cache can grow large (>10GB), so this should be in a partition with enough free 100 // space. On Linux GCE machines, the partition mounted at /mnt/pd0 is significantly larger than 101 // the partition mounted at /. 102 bazelCacheDirOnGCELinux = "/mnt/pd0/bazel_cache" 103 104 // bazelCacheDirOnSkoloLinux is like bazelCacheDirOnGCELinux for Skolo Linux machines. Unlike GCE 105 // Linux machines, the partition mounted at / on Skolo Linux machines is large enough. While 106 // using the default Bazel cache path would work, our Bazel task drivers demand an explicit path. 107 // We store the Bazel cache at /home/chrome-bot/bazel_cache rather than on the default location 108 // of /home/chrome-bot/cache/.bazel to make it obvious to someone examining a Skolo machine that 109 // we are overriding the default location. 110 bazelCacheDirOnSkoloLinux = "/home/chrome-bot/bazel_cache" 111 112 // bazelCacheDirOnWindows is like bazelCacheDirOnSkoloLinux. Unlike GCE Linux machines, we only 113 // have a single partition. While using the default cache path would work, our Bazel task 114 // drivers demand an explicit path. We store the Bazel cache at /home/chrome-bot/bazel_cache 115 // rather than on the default location of %APPDATA% to make it obvious to someone examining a 116 // Skolo machine that we are overriding the default location. Note that double-escaping the 117 // path separator is necessary because this string is passed to Bazel via multiple levels of 118 // subprocesses. 119 bazelCacheDirOnWindows = `C:\\Users\\chrome-bot\\bazel_cache` 120) 121 122var ( 123 // "Constants" 124 125 // Named caches used by tasks. 126 CACHES_GIT = []*specs.Cache{ 127 { 128 Name: "git", 129 Path: "cache/git", 130 }, 131 { 132 Name: "git_cache", 133 Path: "cache/git_cache", 134 }, 135 } 136 CACHES_GO = []*specs.Cache{ 137 { 138 Name: "go_cache", 139 Path: "cache/go_cache", 140 }, 141 { 142 Name: "gopath", 143 Path: "cache/gopath", 144 }, 145 } 146 CACHES_WORKDIR = []*specs.Cache{ 147 { 148 Name: "work", 149 Path: "cache/work", 150 }, 151 } 152 CACHES_CCACHE = []*specs.Cache{ 153 { 154 Name: "ccache", 155 Path: "cache/ccache", 156 }, 157 } 158 // The "docker" cache is used as a persistent working directory for 159 // tasks which use Docker. It is not to be confused with Docker's own 160 // cache, which stores images. We do not currently use a named Swarming 161 // cache for the latter. 162 // TODO(borenet): We should ensure that any task which uses Docker does 163 // not also use the normal "work" cache, to prevent issues like 164 // https://bugs.chromium.org/p/skia/issues/detail?id=9749. 165 CACHES_DOCKER = []*specs.Cache{ 166 { 167 Name: "docker", 168 Path: "cache/docker", 169 }, 170 } 171 172 // CAS_SPEC_LOTTIE_CI is a CasSpec which includes the files needed for 173 // lottie-ci. This is global so that it can be overridden by other 174 // repositories which import this file. 175 CAS_SPEC_LOTTIE_CI = &specs.CasSpec{ 176 Root: "..", 177 Paths: []string{ 178 "skia/.vpython3", 179 "skia/infra/bots/run_recipe.py", 180 "skia/infra/lottiecap", 181 "skia/tools/lottie-web-perf", 182 "skia/tools/lottiecap", 183 }, 184 Excludes: []string{rbe.ExcludeGitDir}, 185 } 186 187 // CAS_SPEC_WHOLE_REPO is a CasSpec which includes the entire repo. This is 188 // global so that it can be overridden by other repositories which import 189 // this file. 190 CAS_SPEC_WHOLE_REPO = &specs.CasSpec{ 191 Root: "..", 192 Paths: []string{"skia"}, 193 Excludes: []string{rbe.ExcludeGitDir}, 194 } 195 196 // TODO(borenet): This hacky and bad. 197 CIPD_PKG_LUCI_AUTH = cipd.MustGetPackage("infra/tools/luci-auth/${platform}") 198 199 CIPD_PKGS_GOLDCTL = cipd.MustGetPackage("skia/tools/goldctl/${platform}") 200 201 CIPD_PKGS_XCODE = []*specs.CipdPackage{ 202 // https://chromium.googlesource.com/chromium/tools/build/+/e19b7d9390e2bb438b566515b141ed2b9ed2c7c2/scripts/slave/recipe_modules/ios/api.py#317 203 // This package is really just an installer for XCode. 204 { 205 Name: "infra/tools/mac_toolchain/${platform}", 206 Path: "mac_toolchain", 207 // When this is updated, also update 208 // https://skia.googlesource.com/skcms.git/+/f1e2b45d18facbae2dece3aca673fe1603077846/infra/bots/gen_tasks.go#56 209 Version: "git_revision:e6f45bde6c5ee56924b1f905159b6a1a48ef25dd", 210 }, 211 } 212 213 // These properties are required by some tasks, eg. for running 214 // bot_update, but they prevent de-duplication, so they should only be 215 // used where necessary. 216 EXTRA_PROPS = map[string]string{ 217 "buildbucket_build_id": specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID, 218 "patch_issue": specs.PLACEHOLDER_ISSUE_INT, 219 "patch_ref": specs.PLACEHOLDER_PATCH_REF, 220 "patch_repo": specs.PLACEHOLDER_PATCH_REPO, 221 "patch_set": specs.PLACEHOLDER_PATCHSET_INT, 222 "patch_storage": specs.PLACEHOLDER_PATCH_STORAGE, 223 "repository": specs.PLACEHOLDER_REPO, 224 "revision": specs.PLACEHOLDER_REVISION, 225 "task_id": specs.PLACEHOLDER_TASK_ID, 226 } 227 228 // ISOLATE_ASSET_MAPPING maps the name of an asset to the configuration 229 // for how the CIPD package should be installed for a given task. 230 ISOLATE_ASSET_MAPPING = map[string]uploadAssetCASCfg{ 231 "gcloud_linux": { 232 uploadTaskName: ISOLATE_GCLOUD_LINUX_NAME, 233 path: "gcloud_linux", 234 }, 235 "skimage": { 236 uploadTaskName: ISOLATE_SKIMAGE_NAME, 237 path: "skimage", 238 }, 239 "skp": { 240 uploadTaskName: ISOLATE_SKP_NAME, 241 path: "skp", 242 }, 243 "svg": { 244 uploadTaskName: ISOLATE_SVG_NAME, 245 path: "svg", 246 }, 247 "mskp": { 248 uploadTaskName: ISOLATE_MSKP_NAME, 249 path: "mskp", 250 }, 251 "android_ndk_linux": { 252 uploadTaskName: ISOLATE_NDK_LINUX_NAME, 253 path: "android_ndk_linux", 254 }, 255 "android_sdk_linux": { 256 uploadTaskName: ISOLATE_SDK_LINUX_NAME, 257 path: "android_sdk_linux", 258 }, 259 "win_toolchain": { 260 alwaysIsolate: true, 261 uploadTaskName: ISOLATE_WIN_TOOLCHAIN_NAME, 262 path: "win_toolchain", 263 }, 264 } 265 266 // Set dontReduceOpsTaskSplitting option on these models 267 DONT_REDUCE_OPS_TASK_SPLITTING_MODELS = []string{ 268 "NUC5PPYH", 269 } 270) 271 272// Config contains general configuration information. 273type Config struct { 274 // Directory containing assets. Assumed to be relative to the directory 275 // which contains the calling gen_tasks.go file. If not specified, uses 276 // the infra/bots/assets from this repo. 277 AssetsDir string `json:"assets_dir"` 278 279 // Path to the builder name schema JSON file. Assumed to be relative to 280 // the directory which contains the calling gen_tasks.go file. If not 281 // specified, uses infra/bots/recipe_modules/builder_name_schema/builder_name_schema.json 282 // from this repo. 283 BuilderNameSchemaFile string `json:"builder_name_schema"` 284 285 // URL of the Skia Gold known hashes endpoint. 286 GoldHashesURL string `json:"gold_hashes_url"` 287 288 // GCS bucket used for GM results. 289 GsBucketGm string `json:"gs_bucket_gm"` 290 291 // GCS bucket used for Nanobench results. 292 GsBucketNano string `json:"gs_bucket_nano"` 293 294 // Optional function which returns a bot ID for internal devices. 295 InternalHardwareLabel func(parts map[string]string) *int `json:"-"` 296 297 // List of task names for which we'll never upload results. 298 NoUpload []string `json:"no_upload"` 299 300 // PathToSkia is the relative path from the root of the current checkout to 301 // the root of the Skia checkout. 302 PathToSkia string `json:"path_to_skia"` 303 304 // Swarming pool used for triggering tasks. 305 Pool string `json:"pool"` 306 307 // LUCI project associated with this repo. 308 Project string `json:"project"` 309 310 // Service accounts. 311 ServiceAccountCanary string `json:"service_account_canary"` 312 ServiceAccountCompile string `json:"service_account_compile"` 313 ServiceAccountHousekeeper string `json:"service_account_housekeeper"` 314 ServiceAccountRecreateSKPs string `json:"service_account_recreate_skps"` 315 ServiceAccountUploadBinary string `json:"service_account_upload_binary"` 316 ServiceAccountUploadGM string `json:"service_account_upload_gm"` 317 ServiceAccountUploadNano string `json:"service_account_upload_nano"` 318 319 // Optional override function which derives Swarming bot dimensions 320 // from parts of task names. 321 SwarmDimensions func(parts map[string]string) []string `json:"-"` 322} 323 324// JobInfo is the type of each entry in the jobs.json file. 325type JobInfo struct { 326 // The name of the job. 327 Name string `json:"name"` 328 329 // The optional CQ config of this job. If the CQ config is missing then the 330 // job will not be added to the CQ of this branch. 331 CQConfig *specs.CommitQueueJobConfig `json:"cq_config,omitempty"` 332} 333 334// LoadConfig loads the Config from a cfg.json file which is the sibling of the 335// calling gen_tasks.go file. 336func LoadConfig() *Config { 337 cfgDir := getCallingDirName() 338 var cfg Config 339 LoadJson(filepath.Join(cfgDir, "cfg.json"), &cfg) 340 return &cfg 341} 342 343// CheckoutRoot is a wrapper around specs.GetCheckoutRoot which prevents the 344// caller from needing a dependency on the specs package. 345func CheckoutRoot() string { 346 root, err := specs.GetCheckoutRoot() 347 if err != nil { 348 log.Fatal(err) 349 } 350 return root 351} 352 353// LoadJson loads JSON from the given file and unmarshals it into the given 354// destination. 355func LoadJson(filename string, dest interface{}) { 356 b, err := ioutil.ReadFile(filename) 357 if err != nil { 358 log.Fatalf("Unable to read %q: %s", filename, err) 359 } 360 if err := json.Unmarshal(b, dest); err != nil { 361 log.Fatalf("Unable to parse %q: %s", filename, err) 362 } 363} 364 365// In returns true if |s| is *in* |a| slice. 366// TODO(borenet): This is copied from go.skia.org/infra/go/util to avoid the 367// huge set of additional dependencies added by that package. 368func In(s string, a []string) bool { 369 for _, x := range a { 370 if x == s { 371 return true 372 } 373 } 374 return false 375} 376 377// GenTasks regenerates the tasks.json file. Loads the job list from a jobs.json 378// file which is the sibling of the calling gen_tasks.go file. If cfg is nil, it 379// is similarly loaded from a cfg.json file which is the sibling of the calling 380// gen_tasks.go file. 381func GenTasks(cfg *Config) { 382 b := specs.MustNewTasksCfgBuilder() 383 384 // Find the paths to the infra/bots directories in this repo and the 385 // repo of the calling file. 386 relpathTargetDir := getThisDirName() 387 relpathBaseDir := getCallingDirName() 388 389 // Parse jobs.json. 390 var jobsWithInfo []*JobInfo 391 LoadJson(filepath.Join(relpathBaseDir, "jobs.json"), &jobsWithInfo) 392 // Create a slice with only job names. 393 jobs := []string{} 394 for _, j := range jobsWithInfo { 395 jobs = append(jobs, j.Name) 396 } 397 398 if cfg == nil { 399 cfg = new(Config) 400 LoadJson(filepath.Join(relpathBaseDir, "cfg.json"), cfg) 401 } 402 403 // Create the JobNameSchema. 404 builderNameSchemaFile := filepath.Join(relpathTargetDir, "recipe_modules", "builder_name_schema", "builder_name_schema.json") 405 if cfg.BuilderNameSchemaFile != "" { 406 builderNameSchemaFile = filepath.Join(relpathBaseDir, cfg.BuilderNameSchemaFile) 407 } 408 schema, err := NewJobNameSchema(builderNameSchemaFile) 409 if err != nil { 410 log.Fatal(err) 411 } 412 413 // Set the assets dir. 414 assetsDir := filepath.Join(relpathTargetDir, "assets") 415 if cfg.AssetsDir != "" { 416 assetsDir = filepath.Join(relpathBaseDir, cfg.AssetsDir) 417 } 418 b.SetAssetsDir(assetsDir) 419 420 // Create Tasks and Jobs. 421 builder := &builder{ 422 TasksCfgBuilder: b, 423 cfg: cfg, 424 jobNameSchema: schema, 425 jobs: jobs, 426 } 427 for _, j := range jobsWithInfo { 428 jb := newJobBuilder(builder, j.Name) 429 jb.genTasksForJob() 430 jb.finish() 431 432 // Add the CQ spec if it is a CQ job. 433 if j.CQConfig != nil { 434 b.MustAddCQJob(j.Name, j.CQConfig) 435 } 436 } 437 438 // Create CasSpecs. 439 b.MustAddCasSpec(CAS_BAZEL, &specs.CasSpec{ 440 Root: "..", 441 Paths: []string{ 442 // Source code. 443 "skia/example", 444 "skia/experimental/rust_png", 445 "skia/include", 446 "skia/modules", 447 "skia/src", 448 "skia/tests", 449 "skia/third_party", 450 "skia/tools", 451 // Needed for tests. 452 "skia/bench", // Needed to run benchmark tests with Bazel. 453 "skia/gm", // Needed to run GMs with Bazel. 454 "skia/gn", // Some Python scripts still live here. 455 "skia/resources", 456 "skia/package.json", 457 "skia/package-lock.json", 458 "skia/DEPS", // Needed to check generation. 459 "skia/infra", // Many Go tests and Bazel tools live here. 460 "skia/go.mod", // Needed by Gazelle. 461 "skia/go.sum", // Needed by Gazelle. 462 // Needed to run Bazel. 463 "skia/.bazelignore", 464 "skia/.bazelrc", 465 "skia/.bazelversion", 466 "skia/BUILD.bazel", 467 "skia/LICENSE", // Referred to by default_applicable_licenses 468 "skia/WORKSPACE.bazel", 469 "skia/bazel", 470 "skia/go_repositories.bzl", 471 "skia/requirements.txt", 472 "skia/toolchain", 473 }, 474 Excludes: []string{ 475 rbe.ExcludeGitDir, 476 "skia/third_party/externals", 477 }, 478 }) 479 b.MustAddCasSpec(CAS_CANVASKIT, &specs.CasSpec{ 480 Root: "..", 481 Paths: []string{ 482 "skia/.vpython3", 483 "skia/infra/bots/run_recipe.py", 484 "skia/infra/canvaskit", 485 "skia/modules/canvaskit", 486 "skia/modules/pathkit/perf/perfReporter.js", 487 "skia/modules/pathkit/tests/testReporter.js", 488 }, 489 Excludes: []string{rbe.ExcludeGitDir}, 490 }) 491 b.MustAddCasSpec(CAS_EMPTY, specs.EmptyCasSpec) 492 b.MustAddCasSpec(CAS_LOTTIE_CI, CAS_SPEC_LOTTIE_CI) 493 b.MustAddCasSpec(CAS_LOTTIE_WEB, &specs.CasSpec{ 494 Root: "..", 495 Paths: []string{ 496 "skia/.vpython3", 497 "skia/infra/bots/run_recipe.py", 498 "skia/tools/lottie-web-perf", 499 }, 500 Excludes: []string{rbe.ExcludeGitDir}, 501 }) 502 b.MustAddCasSpec(CAS_PATHKIT, &specs.CasSpec{ 503 Root: "..", 504 Paths: []string{ 505 "skia/.vpython3", 506 "skia/infra/bots/run_recipe.py", 507 "skia/infra/pathkit", 508 "skia/modules/pathkit", 509 }, 510 Excludes: []string{rbe.ExcludeGitDir}, 511 }) 512 b.MustAddCasSpec(CAS_PERF, &specs.CasSpec{ 513 Root: "..", 514 Paths: []string{ 515 "skia/.vpython3", 516 "skia/infra/bots/assets", 517 "skia/infra/bots/run_recipe.py", 518 "skia/platform_tools/ios/bin", 519 "skia/resources", 520 "skia/tools/valgrind.supp", 521 }, 522 Excludes: []string{rbe.ExcludeGitDir}, 523 }) 524 b.MustAddCasSpec(CAS_PUPPETEER, &specs.CasSpec{ 525 Root: "../skia", // Needed for other repos. 526 Paths: []string{ 527 ".vpython3", 528 "tools/perf-canvaskit-puppeteer", 529 }, 530 Excludes: []string{rbe.ExcludeGitDir}, 531 }) 532 b.MustAddCasSpec(CAS_RECIPES, &specs.CasSpec{ 533 Root: "..", 534 Paths: []string{ 535 "skia/.vpython3", 536 "skia/infra/config/recipes.cfg", 537 "skia/infra/bots/bundle_recipes.sh", 538 "skia/infra/bots/README.recipes.md", 539 "skia/infra/bots/recipe_modules", 540 "skia/infra/bots/recipes", 541 "skia/infra/bots/recipes.py", 542 }, 543 Excludes: []string{rbe.ExcludeGitDir}, 544 }) 545 b.MustAddCasSpec(CAS_RUN_RECIPE, &specs.CasSpec{ 546 Root: "..", 547 Paths: []string{ 548 "skia/.vpython3", 549 "skia/infra/bots/run_recipe.py", 550 }, 551 Excludes: []string{rbe.ExcludeGitDir}, 552 }) 553 b.MustAddCasSpec(CAS_SKOTTIE_WASM, &specs.CasSpec{ 554 Root: "..", 555 Paths: []string{ 556 "skia/.vpython3", 557 "skia/infra/bots/run_recipe.py", 558 "skia/tools/skottie-wasm-perf", 559 }, 560 Excludes: []string{rbe.ExcludeGitDir}, 561 }) 562 b.MustAddCasSpec(CAS_TASK_DRIVERS, &specs.CasSpec{ 563 Root: "..", 564 Paths: []string{ 565 // Deps needed to use Bazel 566 "skia/.bazelrc", 567 "skia/.bazelversion", 568 "skia/BUILD.bazel", 569 "skia/LICENSE", 570 "skia/WORKSPACE.bazel", 571 "skia/bazel", 572 "skia/go_repositories.bzl", 573 "skia/include/config", // There's a WORKSPACE.bazel in here 574 "skia/requirements.txt", 575 "skia/toolchain", 576 // TODO(kjlubick, lukasza) remove after rust's png crate is updated 577 // and we don't need the patches anymore 578 "skia/experimental/rust_png", 579 // Actually needed to build the task drivers 580 "skia/infra/bots/BUILD.bazel", 581 "skia/infra/bots/build_task_drivers.sh", 582 "skia/infra/bots/task_drivers", 583 }, 584 Excludes: []string{rbe.ExcludeGitDir}, 585 }) 586 b.MustAddCasSpec(CAS_TEST, &specs.CasSpec{ 587 Root: "..", 588 Paths: []string{ 589 "skia/.vpython3", 590 "skia/infra/bots/assets", 591 "skia/infra/bots/run_recipe.py", 592 "skia/platform_tools/ios/bin", 593 "skia/resources", 594 "skia/tools/valgrind.supp", 595 }, 596 Excludes: []string{rbe.ExcludeGitDir}, 597 }) 598 b.MustAddCasSpec(CAS_WASM_GM, &specs.CasSpec{ 599 Root: "../skia", // Needed for other repos. 600 Paths: []string{ 601 ".vpython3", 602 "resources", 603 "tools/run-wasm-gm-tests", 604 }, 605 Excludes: []string{rbe.ExcludeGitDir}, 606 }) 607 b.MustAddCasSpec(CAS_WHOLE_REPO, CAS_SPEC_WHOLE_REPO) 608 b.MustAddCasSpec(CAS_RECREATE_SKPS, &specs.CasSpec{ 609 Root: "..", 610 Paths: []string{ 611 "skia/.vpython3", 612 "skia/DEPS", 613 "skia/bin/fetch-sk", 614 "skia/infra/bots/assets/skp", 615 "skia/infra/bots/utils.py", 616 "skia/tools/skp", 617 }, 618 Excludes: []string{rbe.ExcludeGitDir}, 619 }) 620 generateCompileCAS(b, cfg) 621 622 builder.MustFinish() 623} 624 625// getThisDirName returns the infra/bots directory which is an ancestor of this 626// file. 627func getThisDirName() string { 628 _, thisFileName, _, ok := runtime.Caller(0) 629 if !ok { 630 log.Fatal("Unable to find path to current file.") 631 } 632 return filepath.Dir(filepath.Dir(thisFileName)) 633} 634 635// getCallingDirName returns the infra/bots directory which is an ancestor of 636// the calling gen_tasks.go file. WARNING: assumes that the calling gen_tasks.go 637// file appears two steps up the stack; do not call from a function which is not 638// directly called by gen_tasks.go. 639func getCallingDirName() string { 640 _, callingFileName, _, ok := runtime.Caller(2) 641 if !ok { 642 log.Fatal("Unable to find path to calling file.") 643 } 644 return filepath.Dir(callingFileName) 645} 646 647// builder is a wrapper for specs.TasksCfgBuilder. 648type builder struct { 649 *specs.TasksCfgBuilder 650 cfg *Config 651 jobNameSchema *JobNameSchema 652 jobs []string 653} 654 655// marshalJson encodes the given data as JSON and fixes escaping of '<' which Go 656// does by default. 657func marshalJson(data interface{}) string { 658 j, err := json.Marshal(data) 659 if err != nil { 660 log.Fatal(err) 661 } 662 return strings.Replace(string(j), "\\u003c", "<", -1) 663} 664 665// kitchenTaskNoBundle sets up the task to run a recipe via Kitchen, without the 666// recipe bundle. 667func (b *taskBuilder) kitchenTaskNoBundle(recipe string, outputDir string) { 668 b.usesLUCIAuth() 669 b.cipd(cipd.MustGetPackage("infra/tools/luci/kitchen/${platform}")) 670 b.env("RECIPES_USE_PY3", "true") 671 b.envPrefixes("VPYTHON_DEFAULT_SPEC", "skia/.vpython3") 672 b.usesPython() 673 b.recipeProp("swarm_out_dir", outputDir) 674 if outputDir != OUTPUT_NONE { 675 b.output(outputDir) 676 } 677 const python = "cipd_bin_packages/vpython3${EXECUTABLE_SUFFIX}" 678 b.cmd(python, "-u", "skia/infra/bots/run_recipe.py", "${ISOLATED_OUTDIR}", recipe, b.getRecipeProps(), b.cfg.Project) 679 // Most recipes want this isolate; they can override if necessary. 680 b.cas(CAS_RUN_RECIPE) 681 b.timeout(time.Hour) 682 b.Spec.ExtraTags = map[string]string{ 683 "log_location": fmt.Sprintf("logdog://logs.chromium.org/%s/${SWARMING_TASK_ID}/+/annotations", b.cfg.Project), 684 } 685 686 // Attempts. 687 if !b.role("Build", "Upload") && b.extraConfig("ASAN", "HWASAN", "MSAN", "TSAN", "Valgrind") { 688 // Sanitizers often find non-deterministic issues that retries would hide. 689 b.attempts(1) 690 } else { 691 // Retry by default to hide random bot/hardware failures. 692 b.attempts(2) 693 } 694} 695 696// kitchenTask sets up the task to run a recipe via Kitchen. 697func (b *taskBuilder) kitchenTask(recipe string, outputDir string) { 698 b.kitchenTaskNoBundle(recipe, outputDir) 699 b.dep(b.bundleRecipes()) 700} 701 702// internalHardwareLabel returns the internal ID for the bot, if any. 703func (b *taskBuilder) internalHardwareLabel() *int { 704 if b.cfg.InternalHardwareLabel != nil { 705 return b.cfg.InternalHardwareLabel(b.parts) 706 } 707 return nil 708} 709 710// linuxGceDimensions adds the Swarming bot dimensions for Linux GCE instances. 711func (b *taskBuilder) linuxGceDimensions(machineType string) { 712 b.dimension( 713 // Specify CPU to avoid running builds on bots with a more unique CPU. 714 "cpu:x86-64-Haswell_GCE", 715 "gpu:none", 716 // Currently all Linux GCE tasks run on 16-CPU machines. 717 fmt.Sprintf("machine_type:%s", machineType), 718 fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE), 719 fmt.Sprintf("pool:%s", b.cfg.Pool), 720 ) 721} 722 723// codesizeTaskNameRegexp captures the "CodeSize-<binary name>-" prefix of a CodeSize task name. 724var codesizeTaskNameRegexp = regexp.MustCompile("^CodeSize-[a-zA-Z0-9_]+-") 725 726// deriveCompileTaskName returns the name of a compile task based on the given 727// job name. 728func (b *jobBuilder) deriveCompileTaskName() string { 729 if b.role("Test", "Perf") { 730 task_os := b.parts["os"] 731 ec := []string{} 732 if val := b.parts["extra_config"]; val != "" { 733 ec = strings.Split(val, "_") 734 ignore := []string{ 735 "AbandonGpuContext", "PreAbandonGpuContext", "Valgrind", 736 "FailFlushTimeCallbacks", "ReleaseAndAbandonGpuContext", 737 "NativeFonts", "GDI", "NoGPUThreads", "DDL1", "DDL3", 738 "DDLRecord", "BonusConfigs", "ColorSpaces", "GL", 739 "SkottieTracing", "SkottieWASM", "GpuTess", "DMSAAStats", "Docker", "PDF", 740 "Puppeteer", "SkottieFrames", "RenderSKP", "CanvasPerf", "AllPathsVolatile", 741 "WebGL2", "i5", "OldestSupportedSkpVersion", "FakeWGPU", "TintIR", "Protected", 742 "AndroidNDKFonts", "Upload"} 743 keep := make([]string, 0, len(ec)) 744 for _, part := range ec { 745 if !In(part, ignore) { 746 keep = append(keep, part) 747 } 748 } 749 ec = keep 750 } 751 if b.matchOs("Android") { 752 if !In("Android", ec) { 753 ec = append([]string{"Android"}, ec...) 754 } 755 task_os = COMPILE_TASK_NAME_OS_LINUX 756 } else if b.os("ChromeOS") { 757 ec = append([]string{"Chromebook", "GLES"}, ec...) 758 task_os = COMPILE_TASK_NAME_OS_LINUX 759 } else if b.os("iOS") { 760 ec = append([]string{task_os}, ec...) 761 if b.parts["compiler"] == "Xcode11.4.1" { 762 task_os = "Mac10.15.7" 763 } else { 764 task_os = "Mac" 765 } 766 } else if b.matchOs("Win") { 767 task_os = "Win" 768 } else if b.compiler("GCC") { 769 // GCC compiles are now on a Docker container. We use the same OS and 770 // version to compile as to test. 771 ec = append(ec, "Docker") 772 } else if b.matchOs("Debian11") { 773 // We compile using the Debian11 machines in the skolo. 774 task_os = "Debian11" 775 } else if b.matchOs("Ubuntu", "Debian") { 776 task_os = COMPILE_TASK_NAME_OS_LINUX 777 } else if b.matchOs("Mac") { 778 task_os = "Mac" 779 } 780 jobNameMap := map[string]string{ 781 "role": "Build", 782 "os": task_os, 783 "compiler": b.parts["compiler"], 784 "target_arch": b.parts["arch"], 785 "configuration": b.parts["configuration"], 786 } 787 if b.extraConfig("PathKit") { 788 ec = []string{"PathKit"} 789 // We prefer to compile this in the cloud because we have more resources there 790 jobNameMap["os"] = "Debian10" 791 } 792 if b.extraConfig("CanvasKit", "SkottieWASM", "Puppeteer") { 793 if b.cpu() { 794 ec = []string{"CanvasKit_CPU"} 795 } else { 796 ec = []string{"CanvasKit"} 797 } 798 // We prefer to compile this in the cloud because we have more resources there 799 jobNameMap["os"] = "Debian10" 800 } 801 if len(ec) > 0 { 802 jobNameMap["extra_config"] = strings.Join(ec, "_") 803 } 804 name, err := b.jobNameSchema.MakeJobName(jobNameMap) 805 if err != nil { 806 log.Fatal(err) 807 } 808 return name 809 } else if b.role("BuildStats") { 810 return strings.Replace(b.Name, "BuildStats", "Build", 1) 811 } else if b.role("CodeSize") { 812 return codesizeTaskNameRegexp.ReplaceAllString(b.Name, "Build-") 813 } else { 814 return b.Name 815 } 816} 817 818// swarmDimensions generates swarming bot dimensions for the given task. 819func (b *taskBuilder) swarmDimensions() { 820 if b.cfg.SwarmDimensions != nil { 821 dims := b.cfg.SwarmDimensions(b.parts) 822 if dims != nil { 823 b.dimension(dims...) 824 return 825 } 826 } 827 b.defaultSwarmDimensions() 828} 829 830// androidDeviceInfo maps Android models (as in the "model" part of a task) to the device_type and 831// device_os Swarming dimensions. 832var androidDeviceInfos = map[string][]string{ 833 "AndroidOne": {"sprout", "MOB30Q"}, 834 "GalaxyS7_G930FD": {"herolte", "R16NW_G930FXXS2ERH6"}, // This is Oreo. 835 "GalaxyS20": {"exynos990", "QP1A.190711.020"}, 836 "GalaxyS24": {"pineapple", "UP1A.231005.007"}, 837 "JioNext": {"msm8937", "RKQ1.210602.002"}, 838 "Mokey": {"mokey", "UDC_11161052"}, 839 "MokeyGo32": {"mokey_go32", "UQ1A.240105.003.A1_11159138"}, 840 "MotoG73": {"devonf", "U1TNS34.82-12-7-6"}, 841 "Nexus5": {"hammerhead", "M4B30Z_3437181"}, 842 "Nexus7": {"grouper", "LMY47V_1836172"}, // 2012 Nexus 7 843 "P30": {"HWELE", "HUAWEIELE-L29"}, 844 "Pixel3a": {"sargo", "QP1A.190711.020"}, 845 "Pixel4": {"flame", "RPB2.200611.009"}, // R Preview 846 "Pixel4a": {"sunfish", "AOSP.MASTER_7819821"}, // Pixel4a flashed with an Android HWASan build. 847 "Pixel4XL": {"coral", "QD1A.190821.011.C4"}, 848 "Pixel5": {"redfin", "RD1A.200810.022.A4"}, 849 "Pixel6": {"oriole", "SD1A.210817.037"}, 850 "Pixel7": {"panther", "AP4A.241205.013"}, 851 "Pixel7Pro": {"cheetah", "TD1A.221105.002"}, 852 "Pixel9": {"tokay", "AP4A.241205.013"}, 853 "TecnoSpark3Pro": {"TECNO-KB8", "PPR1.180610.011"}, 854 "Wembley": {"wembley", "SP2A.220505.008"}, 855} 856 857// defaultSwarmDimensions generates default swarming bot dimensions for the given task. 858func (b *taskBuilder) defaultSwarmDimensions() { 859 d := map[string]string{ 860 "pool": b.cfg.Pool, 861 } 862 if os, ok := b.parts["os"]; ok { 863 d["os"], ok = map[string]string{ 864 "Android": "Android", 865 "Android12": "Android", 866 "ChromeOS": "ChromeOS", 867 "Debian9": DEFAULT_OS_LINUX_GCE, // Runs in Deb9 Docker. 868 "Debian10": DEFAULT_OS_LINUX_GCE, 869 "Debian11": DEBIAN_11_OS, 870 "Mac": DEFAULT_OS_MAC, 871 "Mac10.15.1": "Mac-10.15.1", 872 "Mac10.15.7": "Mac-10.15.7", 873 "Mac11": "Mac-11.4", 874 "Mac12": "Mac-12", 875 "Mac13": "Mac-13", 876 "Mokey": "Android", 877 "MokeyGo32": "Android", 878 "Ubuntu18": "Ubuntu-18.04", 879 "Ubuntu20.04": UBUNTU_20_04_OS, 880 "Ubuntu22.04": UBUNTU_22_04_OS, 881 "Ubuntu24.04": UBUNTU_24_04_OS, 882 "Win": DEFAULT_OS_WIN_GCE, 883 "Win10": "Windows-10-19045", 884 "Win11": "Windows-11-26100.1742", 885 "Win2019": DEFAULT_OS_WIN_GCE, 886 "iOS": "iOS-13.3.1", 887 }[os] 888 if !ok { 889 log.Fatalf("Entry %q not found in OS mapping.", os) 890 } 891 if os == "Debian11" && b.extraConfig("Docker") { 892 d["os"] = DEFAULT_OS_LINUX_GCE 893 } 894 if os == "Win10" && b.parts["model"] == "Golo" { 895 // ChOps-owned machines have Windows 10 22H2. 896 d["os"] = "Windows-10-19045" 897 } 898 if b.parts["model"] == "iPadPro" { 899 d["os"] = "iOS-13.6" 900 } 901 } else { 902 d["os"] = DEFAULT_OS_DEBIAN 903 } 904 if b.role("Test", "Perf") { 905 if b.os("Android") { 906 // For Android, the device type is a better dimension 907 // than CPU or GPU. 908 deviceInfo, ok := androidDeviceInfos[b.parts["model"]] 909 if !ok { 910 log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"]) 911 } 912 d["device_type"] = deviceInfo[0] 913 d["device_os"] = deviceInfo[1] 914 915 // Tests using Android's HWAddress Sanitizer require an HWASan build of Android. 916 // See https://developer.android.com/ndk/guides/hwasan. 917 if b.extraConfig("HWASAN") { 918 d["android_hwasan_build"] = "1" 919 } 920 } else if b.os("Android12") { 921 // For Android, the device type is a better dimension 922 // than CPU or GPU. 923 deviceInfo, ok := map[string][]string{ 924 "Pixel5": {"redfin", "SP2A.220305.012"}, 925 }[b.parts["model"]] 926 if !ok { 927 log.Fatalf("Entry %q not found in Android mapping.", b.parts["model"]) 928 } 929 d["device_type"] = deviceInfo[0] 930 d["device_os"] = deviceInfo[1] 931 932 // Tests using Android's HWAddress Sanitizer require an HWASan build of Android. 933 // See https://developer.android.com/ndk/guides/hwasan. 934 if b.extraConfig("HWASAN") { 935 d["android_hwasan_build"] = "1" 936 } 937 } else if b.os("iOS") { 938 device, ok := map[string]string{ 939 "iPadMini4": "iPad5,1", 940 "iPhone7": "iPhone9,1", 941 "iPhone8": "iPhone10,1", 942 "iPadPro": "iPad6,3", 943 }[b.parts["model"]] 944 if !ok { 945 log.Fatalf("Entry %q not found in iOS mapping.", b.parts["model"]) 946 } 947 d["device_type"] = device 948 } else if b.cpu() || b.extraConfig("CanvasKit", "Docker", "SwiftShader") { 949 modelMapping, ok := map[string]map[string]string{ 950 "AppleM1": { 951 "MacMini9.1": "arm64-64-Apple_M1", 952 }, 953 "AppleM3": { 954 "MacBookPro15.3": "arm64-64-Apple_M3", 955 }, 956 "AppleIntel": { 957 "MacBookPro16.2": "x86-64", 958 }, 959 "AVX": { 960 "VMware7.1": "x86-64", 961 }, 962 "AVX2": { 963 "GCE": "x86-64-Haswell_GCE", 964 "MacBookAir7.2": "x86-64-i5-5350U", 965 "MacBookPro11.5": "x86-64-i7-4870HQ", 966 "MacMini7.1": "x86-64-i5-4278U", 967 "NUC5i7RYH": "x86-64-i7-5557U", 968 "NUC9i7QN": "x86-64-i7-9750H", 969 "NUC11TZi5": "x86-64-i5-1135G7", 970 }, 971 "AVX512": { 972 "GCE": "x86-64-Skylake_GCE", 973 "Golo": "Intel64_Family_6_Model_85_Stepping_7__GenuineIntel", 974 }, 975 "Rome": { 976 "GCE": "x86-64-AMD_Rome_GCE", 977 }, 978 "SwiftShader": { 979 "GCE": "x86-64-Haswell_GCE", 980 }, 981 }[b.parts["cpu_or_gpu_value"]] 982 if !ok { 983 log.Fatalf("Entry %q not found in CPU mapping.", b.parts["cpu_or_gpu_value"]) 984 } 985 cpu, ok := modelMapping[b.parts["model"]] 986 if !ok { 987 log.Fatalf("Entry %q not found in %q model mapping.", b.parts["model"], b.parts["cpu_or_gpu_value"]) 988 } 989 d["cpu"] = cpu 990 if b.model("GCE") && b.matchOs("Debian") { 991 d["os"] = DEFAULT_OS_LINUX_GCE 992 } 993 if b.model("GCE") && d["cpu"] == "x86-64-Haswell_GCE" { 994 d["machine_type"] = MACHINE_TYPE_MEDIUM 995 } 996 } else { 997 // It's a GPU job. 998 if b.matchOs("Win") { 999 gpu, ok := map[string]string{ 1000 "GTX1660": "10de:2184-31.0.15.4601", 1001 "IntelHD4400": "8086:0a16-20.19.15.4963", 1002 "IntelIris540": "8086:1926-31.0.101.2115", 1003 "IntelIris6100": "8086:162b-20.19.15.4963", 1004 "IntelIris655": "8086:3ea5-26.20.100.7463", 1005 "IntelIrisXe": "8086:9a49-32.0.101.5972", 1006 "RadeonHD7770": "1002:683d-26.20.13031.18002", 1007 "RadeonR9M470X": "1002:6646-26.20.13031.18002", 1008 "QuadroP400": "10de:1cb3-31.0.15.5222", 1009 "RadeonVega6": "1002:1636-31.0.14057.5006", 1010 "RadeonVega8": "1002:1638-31.0.21916.2", 1011 "RTX3060": "10de:2489-32.0.15.6094", 1012 }[b.parts["cpu_or_gpu_value"]] 1013 if !ok { 1014 log.Fatalf("Entry %q not found in Win GPU mapping.", b.parts["cpu_or_gpu_value"]) 1015 } 1016 d["gpu"] = gpu 1017 } else if b.isLinux() { 1018 gpu, ok := map[string]string{ 1019 // Intel drivers come from CIPD, so no need to specify the version here. 1020 "IntelHD2000": "8086:0102", 1021 "IntelHD405": "8086:22b1", 1022 "IntelIris640": "8086:5926", 1023 "QuadroP400": "10de:1cb3-510.60.02", 1024 "RTX3060": "10de:2489-470.182.03", 1025 "IntelIrisXe": "8086:9a49", 1026 "RadeonVega6": "1002:1636", 1027 "RadeonVega8": "1002:1638-23.2.1", 1028 }[b.parts["cpu_or_gpu_value"]] 1029 if !ok { 1030 log.Fatalf("Entry %q not found in Linux GPU mapping.", b.parts["cpu_or_gpu_value"]) 1031 } 1032 d["gpu"] = gpu 1033 1034 if b.matchOs("Debian11") { 1035 d["os"] = DEBIAN_11_OS 1036 } else if b.matchOs("Debian") { 1037 // The Debian10 machines in the skolo are 10.10, not 10.3. 1038 d["os"] = DEFAULT_OS_DEBIAN 1039 } 1040 if b.parts["cpu_or_gpu_value"] == "IntelIrisXe" { 1041 // The Intel Iris Xe devices are Debian 11.3. 1042 d["os"] = "Debian-bookworm/sid" 1043 } 1044 } else if b.matchOs("Mac") { 1045 gpu, ok := map[string]string{ 1046 "AppleM1": "AppleM1", 1047 "AppleM3": "apple:m3", 1048 "IntelHD6000": "8086:1626", 1049 "IntelHD615": "8086:591e", 1050 "IntelIris5100": "8086:0a2e", 1051 "IntelIrisPlus": "8086:8a53", 1052 "RadeonHD8870M": "1002:6821-4.0.20-3.2.8", 1053 }[b.parts["cpu_or_gpu_value"]] 1054 if !ok { 1055 log.Fatalf("Entry %q not found in Mac GPU mapping.", b.parts["cpu_or_gpu_value"]) 1056 } 1057 if gpu == "AppleM1" { 1058 // No GPU dimension yet, but we can constrain by CPU. 1059 d["cpu"] = "arm64-64-Apple_M1" 1060 } else { 1061 d["gpu"] = gpu 1062 } 1063 // We have two different types of MacMini7,1 with the same GPU but different CPUs. 1064 if b.gpu("IntelIris5100") { 1065 if b.extraConfig("i5") { 1066 // If we say "i5", run on our MacMini7,1s in the Skolo: 1067 d["cpu"] = "x86-64-i5-4278U" 1068 } else { 1069 // Otherwise, run on Golo machines, just because that's 1070 // where those jobs have always run. Plus, some of them 1071 // are Perf jobs, which we want to keep consistent. 1072 d["cpu"] = "x86-64-i7-4578U" 1073 } 1074 } 1075 } else if b.os("ChromeOS") { 1076 version, ok := map[string]string{ 1077 "IntelUHDGraphics605": "15236.2.0", 1078 "RadeonVega3": "14233.0.0", 1079 "Adreno618": "14150.39.0", 1080 "MaliT860": "14092.77.0", 1081 }[b.parts["cpu_or_gpu_value"]] 1082 if !ok { 1083 log.Fatalf("Entry %q not found in ChromeOS GPU mapping.", b.parts["cpu_or_gpu_value"]) 1084 } 1085 d["gpu"] = b.parts["cpu_or_gpu_value"] 1086 d["release_version"] = version 1087 } else { 1088 log.Fatalf("Unknown GPU mapping for OS %q.", b.parts["os"]) 1089 } 1090 } 1091 } else { 1092 if d["os"] == DEBIAN_11_OS { 1093 // The Debian11 compile machines in the skolo have 1094 // GPUs, but we still use them for compiles also. 1095 1096 // Dodge Raspberry Pis. 1097 d["cpu"] = "x86-64" 1098 // Target the AMDRyzen 5 4500U machines, as they are beefy and we have 1099 // 19 of them, and they are setup to compile. 1100 d["gpu"] = "1002:1636" 1101 } else { 1102 d["gpu"] = "none" 1103 } 1104 if d["os"] == DEFAULT_OS_LINUX_GCE { 1105 if b.extraConfig("CanvasKit", "CMake", "Docker", "PathKit") || b.role("BuildStats", "CodeSize") { 1106 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1107 return 1108 } 1109 // Use many-core machines for Build tasks. 1110 b.linuxGceDimensions(MACHINE_TYPE_LARGE) 1111 return 1112 } else if d["os"] == DEFAULT_OS_WIN_GCE { 1113 // Windows CPU bots. 1114 d["cpu"] = "x86-64-Haswell_GCE" 1115 // Use many-core machines for Build tasks. 1116 d["machine_type"] = MACHINE_TYPE_LARGE 1117 } else if d["os"] == DEFAULT_OS_MAC || d["os"] == "Mac-10.15.7" { 1118 // Mac CPU bots are no longer VMs. 1119 d["cpu"] = "x86-64" 1120 d["cores"] = "12" 1121 delete(d, "gpu") 1122 } 1123 } 1124 1125 dims := make([]string, 0, len(d)) 1126 for k, v := range d { 1127 dims = append(dims, fmt.Sprintf("%s:%s", k, v)) 1128 } 1129 sort.Strings(dims) 1130 b.dimension(dims...) 1131} 1132 1133// bundleRecipes generates the task to bundle and isolate the recipes. Returns 1134// the name of the task, which may be added as a dependency. 1135func (b *jobBuilder) bundleRecipes() string { 1136 b.addTask(BUNDLE_RECIPES_NAME, func(b *taskBuilder) { 1137 b.usesGit() 1138 b.cmd("/bin/bash", "skia/infra/bots/bundle_recipes.sh", specs.PLACEHOLDER_ISOLATED_OUTDIR) 1139 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1140 b.idempotent() 1141 b.cas(CAS_RECIPES) 1142 b.usesPython() 1143 }) 1144 return BUNDLE_RECIPES_NAME 1145} 1146 1147// buildTaskDrivers generates the task to compile the task driver code to run on 1148// all platforms. Returns the name of the task, which may be added as a 1149// dependency. 1150func (b *jobBuilder) buildTaskDrivers(goos, goarch string) string { 1151 name := BUILD_TASK_DRIVERS_PREFIX + "_" + goos + "_" + goarch 1152 b.addTask(name, func(b *taskBuilder) { 1153 b.cmd("/bin/bash", "skia/infra/bots/build_task_drivers.sh", 1154 specs.PLACEHOLDER_ISOLATED_OUTDIR, 1155 goos+"_"+goarch) 1156 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1157 b.usesBazel("linux_x64") 1158 b.idempotent() 1159 b.cas(CAS_TASK_DRIVERS) 1160 }) 1161 return name 1162} 1163 1164// createDockerImage creates the specified docker image. Returns the name of the 1165// generated task. 1166func (b *jobBuilder) createDockerImage(wasm bool) string { 1167 // First, derive the name of the task. 1168 imageName := "skia-release" 1169 taskName := "Housekeeper-PerCommit-CreateDockerImage_Skia_Release" 1170 if wasm { 1171 imageName = "skia-wasm-release" 1172 taskName = "Housekeeper-PerCommit-CreateDockerImage_Skia_WASM_Release" 1173 } 1174 imageDir := path.Join("docker", imageName) 1175 1176 // Add the task. 1177 b.addTask(taskName, func(b *taskBuilder) { 1178 // TODO(borenet): Make this task not use Git. 1179 b.usesGit() 1180 b.cmd( 1181 b.taskDriver("build_push_docker_image", false), 1182 "--image_name", fmt.Sprintf("gcr.io/skia-public/%s", imageName), 1183 "--dockerfile_dir", imageDir, 1184 "--project_id", "skia-swarming-bots", 1185 "--task_id", specs.PLACEHOLDER_TASK_ID, 1186 "--task_name", b.Name, 1187 "--workdir", ".", 1188 "--gerrit_project", "skia", 1189 "--gerrit_url", "https://skia-review.googlesource.com", 1190 "--repo", specs.PLACEHOLDER_REPO, 1191 "--revision", specs.PLACEHOLDER_REVISION, 1192 "--patch_issue", specs.PLACEHOLDER_ISSUE, 1193 "--patch_set", specs.PLACEHOLDER_PATCHSET, 1194 "--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER, 1195 "--swarm_out_dir", specs.PLACEHOLDER_ISOLATED_OUTDIR, 1196 ) 1197 b.cas(CAS_EMPTY) 1198 b.serviceAccount(b.cfg.ServiceAccountCompile) 1199 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1200 b.usesDocker() 1201 b.cache(CACHES_DOCKER...) 1202 b.timeout(time.Hour) 1203 }) 1204 return taskName 1205} 1206 1207// createPushAppsFromSkiaDockerImage creates and pushes docker images of some apps 1208// (eg: fiddler, api) using the skia-release docker image. 1209func (b *jobBuilder) createPushAppsFromSkiaDockerImage() { 1210 b.addTask(b.Name, func(b *taskBuilder) { 1211 // TODO(borenet): Make this task not use Git. 1212 b.usesGit() 1213 b.cmd( 1214 b.taskDriver("push_apps_from_skia_image", false), 1215 "--project_id", "skia-swarming-bots", 1216 "--task_id", specs.PLACEHOLDER_TASK_ID, 1217 "--task_name", b.Name, 1218 "--workdir", ".", 1219 "--repo", specs.PLACEHOLDER_REPO, 1220 "--revision", specs.PLACEHOLDER_REVISION, 1221 "--patch_issue", specs.PLACEHOLDER_ISSUE, 1222 "--patch_set", specs.PLACEHOLDER_PATCHSET, 1223 "--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER, 1224 "--bazel_cache_dir", bazelCacheDirOnGCELinux, 1225 ) 1226 b.dep(b.createDockerImage(false)) 1227 b.cas(CAS_EMPTY) 1228 b.usesBazel("linux_x64") 1229 b.serviceAccount(b.cfg.ServiceAccountCompile) 1230 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1231 b.usesDocker() 1232 b.cache(CACHES_DOCKER...) 1233 b.timeout(2 * time.Hour) 1234 }) 1235} 1236 1237var iosRegex = regexp.MustCompile(`os:iOS-(.*)`) 1238 1239func (b *taskBuilder) maybeAddIosDevImage() { 1240 for _, dim := range b.Spec.Dimensions { 1241 if m := iosRegex.FindStringSubmatch(dim); len(m) >= 2 { 1242 var asset string 1243 switch m[1] { 1244 // Other patch versions can be added to the same case. 1245 case "11.4.1": 1246 asset = "ios-dev-image-11.4" 1247 case "13.3.1": 1248 asset = "ios-dev-image-13.3" 1249 case "13.4.1": 1250 asset = "ios-dev-image-13.4" 1251 case "13.5.1": 1252 asset = "ios-dev-image-13.5" 1253 case "13.6": 1254 asset = "ios-dev-image-13.6" 1255 default: 1256 log.Fatalf("Unable to determine correct ios-dev-image asset for %s. If %s is a new iOS release, you must add a CIPD package containing the corresponding iOS dev image; see ios-dev-image-11.4 for an example.", b.Name, m[1]) 1257 } 1258 b.asset(asset) 1259 break 1260 } else if strings.Contains(dim, "iOS") { 1261 log.Fatalf("Must specify iOS version for %s to obtain correct dev image; os dimension is missing version: %s", b.Name, dim) 1262 } 1263 } 1264} 1265 1266// compile generates a compile task. Returns the name of the compile task. 1267func (b *jobBuilder) compile() string { 1268 name := b.deriveCompileTaskName() 1269 if b.extraConfig("WasmGMTests") { 1270 b.compileWasmGMTests(name) 1271 } else { 1272 b.addTask(name, func(b *taskBuilder) { 1273 recipe := "compile" 1274 casSpec := CAS_COMPILE 1275 if b.extraConfig("NoDEPS", "CMake", "Flutter", "NoPatch") || b.shellsOutToBazel() { 1276 recipe = "sync_and_compile" 1277 casSpec = CAS_RUN_RECIPE 1278 b.recipeProps(EXTRA_PROPS) 1279 b.usesGit() 1280 if !b.extraConfig("NoDEPS") { 1281 b.cache(CACHES_WORKDIR...) 1282 } 1283 } else { 1284 b.idempotent() 1285 } 1286 if b.extraConfig("NoPatch") { 1287 b.kitchenTask(recipe, OUTPUT_BUILD_NOPATCH) 1288 } else { 1289 b.kitchenTask(recipe, OUTPUT_BUILD) 1290 } 1291 b.cas(casSpec) 1292 b.serviceAccount(b.cfg.ServiceAccountCompile) 1293 b.swarmDimensions() 1294 if b.extraConfig("Docker", "LottieWeb", "CMake") || b.compiler("EMCC") { 1295 b.usesDocker() 1296 b.cache(CACHES_DOCKER...) 1297 } 1298 if b.extraConfig("Dawn") { 1299 // https://dawn.googlesource.com/dawn/+/516701da8184655a47c92a573cc84da7db5e69d4/generator/dawn_version_generator.py#21 1300 b.usesGit() 1301 } 1302 1303 // Android bots require a toolchain. 1304 if b.extraConfig("Android") { 1305 if b.matchOs("Mac") { 1306 b.asset("android_ndk_darwin") 1307 } else if b.matchOs("Win") { 1308 pkg := b.MustGetCipdPackageFromAsset("android_ndk_windows") 1309 pkg.Path = "n" 1310 b.cipd(pkg) 1311 } else { 1312 b.asset("android_ndk_linux") 1313 } 1314 } else if b.extraConfig("Chromebook") { 1315 b.asset("clang_linux") 1316 if b.arch("x86_64") { 1317 b.asset("chromebook_x86_64_gles") 1318 } else if b.arch("arm") { 1319 b.asset("armhf_sysroot") 1320 b.asset("chromebook_arm_gles") 1321 } 1322 } else if b.isLinux() { 1323 if b.compiler("Clang") { 1324 b.asset("clang_linux") 1325 } 1326 if b.extraConfig("SwiftShader") { 1327 b.asset("cmake_linux") 1328 } 1329 b.asset("ccache_linux") 1330 b.usesCCache() 1331 if b.shellsOutToBazel() { 1332 b.usesBazel("linux_x64") 1333 b.attempts(1) 1334 } 1335 } else if b.matchOs("Win") { 1336 b.asset("win_toolchain") 1337 if b.compiler("Clang") { 1338 b.asset("clang_win") 1339 } 1340 if b.extraConfig("DWriteCore") { 1341 b.asset("dwritecore") 1342 } 1343 } else if b.matchOs("Mac") { 1344 b.cipd(CIPD_PKGS_XCODE...) 1345 b.Spec.Caches = append(b.Spec.Caches, &specs.Cache{ 1346 Name: "xcode", 1347 Path: "cache/Xcode.app", 1348 }) 1349 b.asset("ccache_mac") 1350 b.usesCCache() 1351 if b.extraConfig("iOS") { 1352 b.asset("provisioning_profile_ios") 1353 } 1354 if b.shellsOutToBazel() { 1355 // All of our current Mac compile machines are x64 Mac only. 1356 b.usesBazel("mac_x64") 1357 b.attempts(1) 1358 } 1359 } 1360 }) 1361 } 1362 1363 // All compile tasks are runnable as their own Job. Assert that the Job 1364 // is listed in jobs. 1365 if !In(name, b.jobs) { 1366 log.Fatalf("Job %q is missing from the jobs list! Derived from: %q", name, b.Name) 1367 } 1368 1369 return name 1370} 1371 1372// recreateSKPs generates a RecreateSKPs task. 1373func (b *jobBuilder) recreateSKPs() { 1374 b.addTask(b.Name, func(b *taskBuilder) { 1375 cmd := []string{ 1376 b.taskDriver("recreate_skps", false), 1377 "--local=false", 1378 "--project_id", "skia-swarming-bots", 1379 "--task_id", specs.PLACEHOLDER_TASK_ID, 1380 "--task_name", b.Name, 1381 "--skia_revision", specs.PLACEHOLDER_REVISION, 1382 "--patch_ref", specs.PLACEHOLDER_PATCH_REF, 1383 "--git_cache", "cache/git", 1384 "--checkout_root", "cache/work", 1385 "--dm_path", "build/dm", 1386 } 1387 if b.matchExtraConfig("DryRun") { 1388 cmd = append(cmd, "--dry_run") 1389 } 1390 1391 b.cas(CAS_RECREATE_SKPS) 1392 b.dep("Build-Debian10-Clang-x86_64-Release") // To get DM. 1393 b.cmd(cmd...) 1394 b.usesLUCIAuth() 1395 b.serviceAccount(b.cfg.ServiceAccountRecreateSKPs) 1396 b.dimension( 1397 "pool:SkiaCT", 1398 fmt.Sprintf("os:%s", DEFAULT_OS_LINUX_GCE), 1399 ) 1400 b.usesGo() 1401 b.cache(CACHES_WORKDIR...) 1402 b.timeout(6 * time.Hour) 1403 b.usesPython() 1404 b.attempts(2) 1405 }) 1406} 1407 1408// checkGeneratedFiles verifies that no generated SKSL files have been edited by hand, and that 1409// we do not get any diffs after regenerating all files (go generate, Gazelle, etc.). 1410func (b *jobBuilder) checkGeneratedFiles() { 1411 b.addTask(b.Name, func(b *taskBuilder) { 1412 b.cas(CAS_BAZEL) 1413 b.cmd( 1414 b.taskDriver("check_generated_files", false), 1415 "--local=false", 1416 "--git_path=cipd_bin_packages/git", 1417 "--project_id", "skia-swarming-bots", 1418 "--task_id", specs.PLACEHOLDER_TASK_ID, 1419 "--task_name", b.Name, 1420 "--bazel_cache_dir", bazelCacheDirOnGCELinux, 1421 "--bazel_arg=--config=for_linux_x64_with_rbe", 1422 "--bazel_arg=--jobs=100", 1423 ) 1424 b.usesBazel("linux_x64") 1425 b.usesGit() 1426 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1427 b.serviceAccount(b.cfg.ServiceAccountHousekeeper) 1428 }) 1429} 1430 1431// goLinters runs various Go linters (gofmt, errcheck, etc.) and fails if there are any errors or 1432// diffs. 1433func (b *jobBuilder) goLinters() { 1434 b.addTask(b.Name, func(b *taskBuilder) { 1435 b.cas(CAS_BAZEL) 1436 b.cmd( 1437 b.taskDriver("go_linters", false), 1438 "--local=false", 1439 "--git_path=cipd_bin_packages/git", 1440 "--project_id", "skia-swarming-bots", 1441 "--task_id", specs.PLACEHOLDER_TASK_ID, 1442 "--task_name", b.Name, 1443 "--bazel_cache_dir", bazelCacheDirOnGCELinux, 1444 "--bazel_arg=--config=for_linux_x64_with_rbe", 1445 "--bazel_arg=--jobs=100", 1446 ) 1447 b.usesBazel("linux_x64") 1448 b.usesGit() 1449 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1450 b.serviceAccount(b.cfg.ServiceAccountHousekeeper) 1451 }) 1452} 1453 1454// checkGnToBp verifies that the gn_to_bp.py script continues to work. 1455func (b *jobBuilder) checkGnToBp() { 1456 b.addTask(b.Name, func(b *taskBuilder) { 1457 b.cas(CAS_COMPILE) 1458 b.cmd( 1459 b.taskDriver("run_gn_to_bp", false), 1460 "--local=false", 1461 "--project_id", "skia-swarming-bots", 1462 "--task_id", specs.PLACEHOLDER_TASK_ID, 1463 "--task_name", b.Name, 1464 ) 1465 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1466 b.usesPython() 1467 b.serviceAccount(b.cfg.ServiceAccountHousekeeper) 1468 }) 1469} 1470 1471// housekeeper generates a Housekeeper task. 1472func (b *jobBuilder) housekeeper() { 1473 b.addTask(b.Name, func(b *taskBuilder) { 1474 b.recipeProps(EXTRA_PROPS) 1475 b.kitchenTask("housekeeper", OUTPUT_NONE) 1476 b.serviceAccount(b.cfg.ServiceAccountHousekeeper) 1477 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1478 b.usesGit() 1479 b.cache(CACHES_WORKDIR...) 1480 }) 1481} 1482 1483// g3FrameworkCanary generates a G3 Framework Canary task. Returns 1484// the name of the last task in the generated chain of tasks, which the Job 1485// should add as a dependency. 1486func (b *jobBuilder) g3FrameworkCanary() { 1487 b.addTask(b.Name, func(b *taskBuilder) { 1488 b.cas(CAS_EMPTY) 1489 b.cmd( 1490 b.taskDriver("g3_canary", false), 1491 "--local=false", 1492 "--project_id", "skia-swarming-bots", 1493 "--task_id", specs.PLACEHOLDER_TASK_ID, 1494 "--task_name", b.Name, 1495 "--repo", specs.PLACEHOLDER_REPO, 1496 "--revision", specs.PLACEHOLDER_REVISION, 1497 "--patch_issue", specs.PLACEHOLDER_ISSUE, 1498 "--patch_set", specs.PLACEHOLDER_PATCHSET, 1499 "--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER, 1500 ) 1501 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1502 b.usesLUCIAuth() 1503 b.serviceAccount("skia-g3-framework-compile@skia-swarming-bots.iam.gserviceaccount.com") 1504 b.timeout(3 * time.Hour) 1505 b.attempts(1) 1506 }) 1507} 1508 1509// infra generates an infra_tests task. 1510func (b *jobBuilder) infra() { 1511 b.addTask(b.Name, func(b *taskBuilder) { 1512 if b.matchOs("Win") || b.matchExtraConfig("Win") { 1513 b.dimension( 1514 // Specify CPU to avoid running builds on bots with a more unique CPU. 1515 "cpu:x86-64-Haswell_GCE", 1516 "gpu:none", 1517 fmt.Sprintf("machine_type:%s", MACHINE_TYPE_MEDIUM), // We don't have any small Windows instances. 1518 fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE), 1519 fmt.Sprintf("pool:%s", b.cfg.Pool), 1520 ) 1521 } else { 1522 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1523 } 1524 b.recipeProp("repository", specs.PLACEHOLDER_REPO) 1525 b.kitchenTask("infra", OUTPUT_NONE) 1526 b.cas(CAS_WHOLE_REPO) 1527 b.serviceAccount(b.cfg.ServiceAccountCompile) 1528 b.usesGSUtil() 1529 b.idempotent() 1530 b.usesGo() 1531 }) 1532} 1533 1534// buildstats generates a builtstats task, which compiles code and generates 1535// statistics about the build. 1536func (b *jobBuilder) buildstats() { 1537 compileTaskName := b.compile() 1538 b.addTask(b.Name, func(b *taskBuilder) { 1539 b.recipeProps(EXTRA_PROPS) 1540 b.kitchenTask("compute_buildstats", OUTPUT_PERF) 1541 b.dep(compileTaskName) 1542 b.asset("bloaty") 1543 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 1544 b.usesDocker() 1545 b.usesGit() 1546 b.cache(CACHES_WORKDIR...) 1547 }) 1548 // Upload release results (for tracking in perf) 1549 // We have some jobs that are FYI (e.g. Debug-CanvasKit, tree-map generator) 1550 if b.release() && !b.arch("x86_64") { 1551 uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name) 1552 depName := b.Name 1553 b.addTask(uploadName, func(b *taskBuilder) { 1554 b.recipeProp("gs_bucket", b.cfg.GsBucketNano) 1555 b.recipeProps(EXTRA_PROPS) 1556 // TODO(borenet): I'm not sure why the upload task is 1557 // using the BuildStats task name, but I've done this 1558 // to maintain existing behavior. 1559 b.Name = depName 1560 b.kitchenTask("upload_buildstats_results", OUTPUT_NONE) 1561 b.Name = uploadName 1562 b.serviceAccount(b.cfg.ServiceAccountUploadNano) 1563 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1564 b.usesGSUtil() 1565 b.dep(depName) 1566 }) 1567 } 1568} 1569 1570// codesize generates a codesize task, which takes binary produced by a 1571// compile task, runs Bloaty against it, and uploads the resulting code size 1572// statistics to the GCS bucket belonging to the codesize.skia.org service. 1573func (b *jobBuilder) codesize() { 1574 compileTaskName := b.compile() 1575 compileTaskNameNoPatch := compileTaskName 1576 if b.extraConfig("Android") { 1577 compileTaskNameNoPatch += "_NoPatch" // add a second "extra config" 1578 } else { 1579 compileTaskNameNoPatch += "-NoPatch" // add the only "extra config" 1580 } 1581 1582 bloatyCipdPkg := b.MustGetCipdPackageFromAsset("bloaty") 1583 1584 b.addTask(b.Name, func(b *taskBuilder) { 1585 b.cas(CAS_EMPTY) 1586 b.dep(compileTaskName) 1587 b.dep(compileTaskNameNoPatch) 1588 cmd := []string{ 1589 b.taskDriver("codesize", false), 1590 "--local=false", 1591 "--project_id", "skia-swarming-bots", 1592 "--task_id", specs.PLACEHOLDER_TASK_ID, 1593 "--task_name", b.Name, 1594 "--compile_task_name", compileTaskName, 1595 "--compile_task_name_no_patch", compileTaskNameNoPatch, 1596 // Note: the binary name cannot contain dashes, otherwise the naming 1597 // schema logic will partition it into multiple parts. 1598 // 1599 // If we ever need to define a CodeSize-* task for a binary with 1600 // dashes in its name (e.g. "my-binary"), a potential workaround is to 1601 // create a mapping from a new, non-dashed binary name (e.g. "my_binary") 1602 // to the actual binary name with dashes. This mapping can be hardcoded 1603 // in this function; no changes to the task driver would be necessary. 1604 "--binary_name", b.parts["binary_name"], 1605 "--bloaty_cipd_version", bloatyCipdPkg.Version, 1606 "--bloaty_binary", "bloaty/bloaty", 1607 1608 "--repo", specs.PLACEHOLDER_REPO, 1609 "--revision", specs.PLACEHOLDER_REVISION, 1610 "--patch_issue", specs.PLACEHOLDER_ISSUE, 1611 "--patch_set", specs.PLACEHOLDER_PATCHSET, 1612 "--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER, 1613 } 1614 if strings.Contains(compileTaskName, "Android") { 1615 b.asset("android_ndk_linux") 1616 cmd = append(cmd, "--strip_binary", 1617 "android_ndk_linux/toolchains/llvm/prebuilt/linux-x86_64/bin/llvm-strip") 1618 } else { 1619 b.asset("binutils_linux_x64") 1620 cmd = append(cmd, "--strip_binary", "binutils_linux_x64/strip") 1621 } 1622 b.cmd(cmd...) 1623 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1624 b.cache(CACHES_WORKDIR...) 1625 b.usesLUCIAuth() 1626 b.asset("bloaty") 1627 b.serviceAccount("skia-external-codesize@skia-swarming-bots.iam.gserviceaccount.com") 1628 b.timeout(20 * time.Minute) 1629 b.attempts(1) 1630 }) 1631} 1632 1633// doUpload indicates whether the given Job should upload its results. 1634func (b *jobBuilder) doUpload() bool { 1635 if b.extraConfig("Upload") { 1636 return true 1637 } 1638 for _, s := range b.cfg.NoUpload { 1639 m, err := regexp.MatchString(s, b.Name) 1640 if err != nil { 1641 log.Fatal(err) 1642 } 1643 if m { 1644 return false 1645 } 1646 } 1647 return true 1648} 1649 1650// commonTestPerfAssets adds the assets needed by Test and Perf tasks. 1651func (b *taskBuilder) commonTestPerfAssets() { 1652 // Docker-based tests don't need the standard CIPD assets 1653 if b.extraConfig("CanvasKit", "PathKit") || (b.role("Test") && b.extraConfig("LottieWeb")) { 1654 return 1655 } 1656 if b.os("Android", "ChromeOS", "iOS") { 1657 b.asset("skp", "svg", "skimage") 1658 } else if b.extraConfig("OldestSupportedSkpVersion") { 1659 b.assetWithVersion("skp", oldestSupportedSkpVersion) 1660 } else { 1661 // for desktop machines 1662 b.asset("skimage", "skp", "svg") 1663 } 1664 1665 if b.isLinux() && b.matchExtraConfig("SAN") { 1666 b.asset("clang_linux") 1667 } 1668 1669 if b.isLinux() { 1670 if b.extraConfig("Vulkan") { 1671 b.asset("linux_vulkan_sdk") 1672 } 1673 if b.matchGpu("Intel") { 1674 if b.matchGpu("IrisXe") { 1675 b.asset("mesa_intel_driver_linux_22") 1676 } else { 1677 // Use this for legacy drivers that were culled in v22 of Mesa. 1678 // https://www.phoronix.com/scan.php?page=news_item&px=Mesa-22.0-Drops-OpenSWR 1679 b.asset("mesa_intel_driver_linux") 1680 } 1681 } 1682 } 1683 1684 if b.matchOs("Win") && b.extraConfig("DWriteCore") { 1685 b.asset("dwritecore") 1686 } 1687} 1688 1689// directUpload adds prerequisites for uploading to GCS. 1690func (b *taskBuilder) directUpload(gsBucket, serviceAccount string) { 1691 b.recipeProp("gs_bucket", gsBucket) 1692 b.serviceAccount(serviceAccount) 1693 b.usesGSUtil() 1694} 1695 1696// dm generates a Test task using dm. 1697func (b *jobBuilder) dm() { 1698 compileTaskName := "" 1699 // LottieWeb doesn't require anything in Skia to be compiled. 1700 if !b.extraConfig("LottieWeb") { 1701 compileTaskName = b.compile() 1702 } 1703 directUpload := false 1704 b.addTask(b.Name, func(b *taskBuilder) { 1705 cas := CAS_TEST 1706 recipe := "test" 1707 if b.extraConfig("PathKit") { 1708 cas = CAS_PATHKIT 1709 recipe = "test_pathkit" 1710 if b.doUpload() { 1711 b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM) 1712 directUpload = true 1713 } 1714 } else if b.extraConfig("CanvasKit") { 1715 cas = CAS_CANVASKIT 1716 recipe = "test_canvaskit" 1717 if b.doUpload() { 1718 b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM) 1719 directUpload = true 1720 } 1721 } else if b.extraConfig("LottieWeb") { 1722 // CAS_LOTTIE_CI differs from CAS_LOTTIE_WEB in that it includes 1723 // more of the files, especially those brought in via DEPS in the 1724 // lottie-ci repo. The main difference between Perf.+LottieWeb and 1725 // Test.+LottieWeb is that the former pulls in the lottie build via 1726 // npm and the latter always tests at lottie's 1727 // ToT. 1728 cas = CAS_LOTTIE_CI 1729 recipe = "test_lottie_web" 1730 if b.doUpload() { 1731 b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM) 1732 directUpload = true 1733 } 1734 } else { 1735 // Default recipe supports direct upload. 1736 // TODO(http://skbug.com/11785): Windows jobs are unable to extract gsutil. 1737 // https://bugs.chromium.org/p/chromium/issues/detail?id=1192611 1738 if b.doUpload() && !b.matchOs("Win") { 1739 b.directUpload(b.cfg.GsBucketGm, b.cfg.ServiceAccountUploadGM) 1740 directUpload = true 1741 } 1742 } 1743 b.recipeProp("gold_hashes_url", b.cfg.GoldHashesURL) 1744 b.recipeProps(EXTRA_PROPS) 1745 iid := b.internalHardwareLabel() 1746 iidStr := "" 1747 if iid != nil { 1748 iidStr = strconv.Itoa(*iid) 1749 } 1750 if recipe == "test" { 1751 b.dmFlags(iidStr) 1752 } 1753 b.kitchenTask(recipe, OUTPUT_TEST) 1754 b.cas(cas) 1755 b.swarmDimensions() 1756 if b.extraConfig("CanvasKit", "Docker", "LottieWeb", "PathKit") { 1757 b.usesDocker() 1758 } 1759 if compileTaskName != "" { 1760 b.dep(compileTaskName) 1761 } 1762 if b.matchOs("Android") && b.extraConfig("ASAN") { 1763 b.asset("android_ndk_linux") 1764 } 1765 if b.extraConfig("NativeFonts") && !b.matchOs("Android") { 1766 b.needsFontsForParagraphTests() 1767 } 1768 if b.extraConfig("Fontations") { 1769 b.cipd(&specs.CipdPackage{ 1770 Name: "chromium/third_party/googlefonts_testdata", 1771 Path: "googlefonts_testdata", 1772 Version: "version:20230913", 1773 }) 1774 } 1775 b.commonTestPerfAssets() 1776 if b.matchExtraConfig("Lottie") { 1777 b.asset("lottie-samples") 1778 } 1779 b.expiration(20 * time.Hour) 1780 1781 b.timeout(4 * time.Hour) 1782 if b.extraConfig("Valgrind") { 1783 b.timeout(9 * time.Hour) 1784 b.expiration(48 * time.Hour) 1785 b.asset("valgrind") 1786 // Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots 1787 // to ensure there are always bots free for CQ tasks. 1788 b.dimension("valgrind:1") 1789 } else if b.extraConfig("MSAN") { 1790 b.timeout(9 * time.Hour) 1791 } else if b.arch("x86") && b.debug() { 1792 // skia:6737 1793 b.timeout(6 * time.Hour) 1794 } else if b.matchOs("Mac11") { 1795 b.timeout(30 * time.Minute) 1796 } 1797 b.maybeAddIosDevImage() 1798 }) 1799 1800 // Upload results if necessary. TODO(kjlubick): If we do coverage analysis at the same 1801 // time as normal tests (which would be nice), cfg.json needs to have Coverage removed. 1802 if b.doUpload() && !directUpload { 1803 uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name) 1804 depName := b.Name 1805 b.addTask(uploadName, func(b *taskBuilder) { 1806 b.recipeProp("gs_bucket", b.cfg.GsBucketGm) 1807 b.recipeProps(EXTRA_PROPS) 1808 b.kitchenTask("upload_dm_results", OUTPUT_NONE) 1809 b.serviceAccount(b.cfg.ServiceAccountUploadGM) 1810 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1811 b.usesGSUtil() 1812 b.dep(depName) 1813 }) 1814 } 1815} 1816 1817// canary generates a task that uses TaskDrivers to trigger canary manual rolls on autorollers. 1818// Canary-G3 does not use this path because it is very different from other autorollers. 1819func (b *jobBuilder) canary(rollerName, canaryCQKeyword, targetProjectBaseURL string) { 1820 b.addTask(b.Name, func(b *taskBuilder) { 1821 b.cas(CAS_EMPTY) 1822 b.cmd( 1823 b.taskDriver("canary", false), 1824 "--local=false", 1825 "--project_id", "skia-swarming-bots", 1826 "--task_id", specs.PLACEHOLDER_TASK_ID, 1827 "--task_name", b.Name, 1828 "--roller_name", rollerName, 1829 "--cq_keyword", canaryCQKeyword, 1830 "--target_project_base_url", targetProjectBaseURL, 1831 "--repo", specs.PLACEHOLDER_REPO, 1832 "--revision", specs.PLACEHOLDER_REVISION, 1833 "--patch_issue", specs.PLACEHOLDER_ISSUE, 1834 "--patch_set", specs.PLACEHOLDER_PATCHSET, 1835 "--patch_server", specs.PLACEHOLDER_CODEREVIEW_SERVER, 1836 ) 1837 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1838 b.usesLUCIAuth() 1839 b.serviceAccount(b.cfg.ServiceAccountCanary) 1840 b.timeout(3 * time.Hour) 1841 b.attempts(1) 1842 }) 1843} 1844 1845// puppeteer generates a task that uses TaskDrivers combined with a node script and puppeteer to 1846// benchmark something using Chromium (e.g. CanvasKit, LottieWeb). 1847func (b *jobBuilder) puppeteer() { 1848 compileTaskName := b.compile() 1849 b.addTask(b.Name, func(b *taskBuilder) { 1850 b.defaultSwarmDimensions() 1851 b.usesNode() 1852 b.usesLUCIAuth() 1853 b.dep(compileTaskName) 1854 b.output(OUTPUT_PERF) 1855 b.timeout(60 * time.Minute) 1856 b.cas(CAS_PUPPETEER) 1857 b.serviceAccount(b.cfg.ServiceAccountCompile) 1858 1859 webglversion := "2" 1860 if b.extraConfig("WebGL1") { 1861 webglversion = "1" 1862 } 1863 1864 if b.extraConfig("SkottieFrames") { 1865 b.cmd( 1866 b.taskDriver("perf_puppeteer_skottie_frames", false), 1867 "--project_id", "skia-swarming-bots", 1868 "--git_hash", specs.PLACEHOLDER_REVISION, 1869 "--task_id", specs.PLACEHOLDER_TASK_ID, 1870 "--task_name", b.Name, 1871 "--canvaskit_bin_path", "./build", 1872 "--lotties_path", "./lotties_with_assets", 1873 "--node_bin_path", "./node/node/bin", 1874 "--benchmark_path", "./tools/perf-canvaskit-puppeteer", 1875 "--output_path", OUTPUT_PERF, 1876 "--os_trace", b.parts["os"], 1877 "--model_trace", b.parts["model"], 1878 "--cpu_or_gpu_trace", b.parts["cpu_or_gpu"], 1879 "--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"], 1880 "--webgl_version", webglversion, // ignore when running with cpu backend 1881 ) 1882 b.needsLottiesWithAssets() 1883 } else if b.extraConfig("RenderSKP") { 1884 b.cmd( 1885 b.taskDriver("perf_puppeteer_render_skps", false), 1886 "--project_id", "skia-swarming-bots", 1887 "--git_hash", specs.PLACEHOLDER_REVISION, 1888 "--task_id", specs.PLACEHOLDER_TASK_ID, 1889 "--task_name", b.Name, 1890 "--canvaskit_bin_path", "./build", 1891 "--skps_path", "./skp", 1892 "--node_bin_path", "./node/node/bin", 1893 "--benchmark_path", "./tools/perf-canvaskit-puppeteer", 1894 "--output_path", OUTPUT_PERF, 1895 "--os_trace", b.parts["os"], 1896 "--model_trace", b.parts["model"], 1897 "--cpu_or_gpu_trace", b.parts["cpu_or_gpu"], 1898 "--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"], 1899 "--webgl_version", webglversion, 1900 ) 1901 b.asset("skp") 1902 } else if b.extraConfig("CanvasPerf") { // refers to the canvas_perf.js test suite 1903 b.cmd( 1904 b.taskDriver("perf_puppeteer_canvas", false), 1905 "--project_id", "skia-swarming-bots", 1906 "--git_hash", specs.PLACEHOLDER_REVISION, 1907 "--task_id", specs.PLACEHOLDER_TASK_ID, 1908 "--task_name", b.Name, 1909 "--canvaskit_bin_path", "./build", 1910 "--node_bin_path", "./node/node/bin", 1911 "--benchmark_path", "./tools/perf-canvaskit-puppeteer", 1912 "--output_path", OUTPUT_PERF, 1913 "--os_trace", b.parts["os"], 1914 "--model_trace", b.parts["model"], 1915 "--cpu_or_gpu_trace", b.parts["cpu_or_gpu"], 1916 "--cpu_or_gpu_value_trace", b.parts["cpu_or_gpu_value"], 1917 "--webgl_version", webglversion, 1918 ) 1919 b.asset("skp") 1920 } 1921 1922 }) 1923 1924 // Upload results to Perf after. 1925 // TODO(kjlubick,borenet) deduplicate this with the logic in perf(). 1926 uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name) 1927 depName := b.Name 1928 b.addTask(uploadName, func(b *taskBuilder) { 1929 b.recipeProp("gs_bucket", b.cfg.GsBucketNano) 1930 b.recipeProps(EXTRA_PROPS) 1931 // TODO(borenet): I'm not sure why the upload task is 1932 // using the Perf task name, but I've done this to 1933 // maintain existing behavior. 1934 b.Name = depName 1935 b.kitchenTask("upload_nano_results", OUTPUT_NONE) 1936 b.Name = uploadName 1937 b.serviceAccount(b.cfg.ServiceAccountUploadNano) 1938 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 1939 b.usesGSUtil() 1940 b.dep(depName) 1941 }) 1942} 1943 1944// perf generates a Perf task. 1945func (b *jobBuilder) perf() { 1946 compileTaskName := "" 1947 // LottieWeb doesn't require anything in Skia to be compiled. 1948 if !b.extraConfig("LottieWeb") { 1949 compileTaskName = b.compile() 1950 } 1951 doUpload := !b.debug() && b.doUpload() 1952 b.addTask(b.Name, func(b *taskBuilder) { 1953 recipe := "perf" 1954 cas := CAS_PERF 1955 if b.extraConfig("PathKit") { 1956 cas = CAS_PATHKIT 1957 recipe = "perf_pathkit" 1958 } else if b.extraConfig("CanvasKit") { 1959 cas = CAS_CANVASKIT 1960 recipe = "perf_canvaskit" 1961 } else if b.extraConfig("SkottieTracing") { 1962 recipe = "perf_skottietrace" 1963 } else if b.extraConfig("SkottieWASM") { 1964 recipe = "perf_skottiewasm_lottieweb" 1965 cas = CAS_SKOTTIE_WASM 1966 } else if b.extraConfig("LottieWeb") { 1967 recipe = "perf_skottiewasm_lottieweb" 1968 cas = CAS_LOTTIE_WEB 1969 } 1970 b.recipeProps(EXTRA_PROPS) 1971 if recipe == "perf" { 1972 b.nanobenchFlags(doUpload) 1973 } 1974 b.kitchenTask(recipe, OUTPUT_PERF) 1975 b.cas(cas) 1976 b.swarmDimensions() 1977 if b.extraConfig("Docker") { 1978 b.usesDocker() 1979 } 1980 if compileTaskName != "" { 1981 b.dep(compileTaskName) 1982 } 1983 b.commonTestPerfAssets() 1984 b.expiration(20 * time.Hour) 1985 b.timeout(4 * time.Hour) 1986 1987 if b.extraConfig("Valgrind") { 1988 b.timeout(9 * time.Hour) 1989 b.expiration(48 * time.Hour) 1990 b.asset("valgrind") 1991 // Since Valgrind runs on the same bots as the CQ, we restrict Valgrind to a subset of the bots 1992 // to ensure there are always bots free for CQ tasks. 1993 b.dimension("valgrind:1") 1994 } else if b.extraConfig("MSAN") { 1995 b.timeout(9 * time.Hour) 1996 } else if b.parts["arch"] == "x86" && b.parts["configuration"] == "Debug" { 1997 // skia:6737 1998 b.timeout(6 * time.Hour) 1999 } else if b.matchOs("Mac11") { 2000 b.timeout(30 * time.Minute) 2001 } 2002 2003 if b.extraConfig("LottieWeb", "SkottieWASM") { 2004 b.asset("node", "lottie-samples") 2005 } else if b.matchExtraConfig("SkottieTracing") { 2006 b.needsLottiesWithAssets() 2007 } else if b.matchExtraConfig("Skottie") { 2008 b.asset("lottie-samples") 2009 } 2010 2011 if b.matchOs("Android") && b.cpu() { 2012 b.asset("text_blob_traces") 2013 } 2014 b.maybeAddIosDevImage() 2015 2016 iid := b.internalHardwareLabel() 2017 if iid != nil { 2018 b.Spec.Command = append(b.Spec.Command, fmt.Sprintf("internal_hardware_label=%d", *iid)) 2019 } 2020 }) 2021 2022 // Upload results if necessary. 2023 if doUpload { 2024 uploadName := fmt.Sprintf("%s%s%s", PREFIX_UPLOAD, b.jobNameSchema.Sep, b.Name) 2025 depName := b.Name 2026 b.addTask(uploadName, func(b *taskBuilder) { 2027 b.recipeProp("gs_bucket", b.cfg.GsBucketNano) 2028 b.recipeProps(EXTRA_PROPS) 2029 // TODO(borenet): I'm not sure why the upload task is 2030 // using the Perf task name, but I've done this to 2031 // maintain existing behavior. 2032 b.Name = depName 2033 b.kitchenTask("upload_nano_results", OUTPUT_NONE) 2034 b.Name = uploadName 2035 b.serviceAccount(b.cfg.ServiceAccountUploadNano) 2036 b.linuxGceDimensions(MACHINE_TYPE_SMALL) 2037 b.usesGSUtil() 2038 b.dep(depName) 2039 }) 2040 } 2041} 2042 2043// presubmit generates a task which runs the presubmit for this repo. 2044func (b *jobBuilder) presubmit() { 2045 b.addTask(b.Name, func(b *taskBuilder) { 2046 b.recipeProps(map[string]string{ 2047 "category": "cq", 2048 "patch_gerrit_url": "https://skia-review.googlesource.com", 2049 "patch_project": "skia", 2050 "patch_ref": specs.PLACEHOLDER_PATCH_REF, 2051 "reason": "CQ", 2052 "repo_name": "skia", 2053 }) 2054 b.recipeProps(EXTRA_PROPS) 2055 b.kitchenTaskNoBundle("run_presubmit", OUTPUT_NONE) 2056 b.cas(CAS_RUN_RECIPE) 2057 b.serviceAccount(b.cfg.ServiceAccountCompile) 2058 // Use MACHINE_TYPE_LARGE because it seems to save time versus 2059 // MEDIUM and we want presubmit to be fast. 2060 b.linuxGceDimensions(MACHINE_TYPE_LARGE) 2061 b.usesGit() 2062 b.cipd(&specs.CipdPackage{ 2063 Name: "infra/recipe_bundles/chromium.googlesource.com/chromium/tools/build", 2064 Path: "recipe_bundle", 2065 Version: "git_revision:bb122cd16700ab80bfcbd494b605dd11d4f5902d", 2066 }) 2067 }) 2068} 2069 2070// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM). 2071// We can use the same build for both CPU and GPU tests since the latter requires the code for the 2072// former anyway. 2073func (b *jobBuilder) compileWasmGMTests(compileName string) { 2074 b.addTask(compileName, func(b *taskBuilder) { 2075 b.attempts(1) 2076 b.usesDocker() 2077 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 2078 b.usesLUCIAuth() 2079 b.output("wasm_out") 2080 b.timeout(60 * time.Minute) 2081 b.cas(CAS_COMPILE) 2082 b.serviceAccount(b.cfg.ServiceAccountCompile) 2083 b.cache(CACHES_DOCKER...) 2084 // For now, we only have one compile mode - a GPU release mode. This should be sufficient to 2085 // run CPU, WebGL1, and WebGL2 tests. Debug mode is not needed for the waterfall because 2086 // when using puppeteer, stacktraces from exceptions are hard to get access to, so we do not 2087 // even bother. 2088 b.cmd( 2089 b.taskDriver("compile_wasm_gm_tests", false), 2090 "--project_id", "skia-swarming-bots", 2091 "--task_id", specs.PLACEHOLDER_TASK_ID, 2092 "--task_name", compileName, 2093 "--out_path", "./wasm_out", 2094 "--skia_path", "./skia", 2095 "--work_path", "./cache/docker/wasm_gm", 2096 ) 2097 }) 2098} 2099 2100// compileWasmGMTests uses a task driver to compile the GMs and unit tests for Web Assembly (WASM). 2101// We can use the same build for both CPU and GPU tests since the latter requires the code for the 2102// former anyway. 2103func (b *jobBuilder) runWasmGMTests() { 2104 compileTaskName := b.compile() 2105 2106 b.addTask(b.Name, func(b *taskBuilder) { 2107 b.attempts(1) 2108 b.usesNode() 2109 b.swarmDimensions() 2110 b.usesLUCIAuth() 2111 b.cipd(CIPD_PKGS_GOLDCTL) 2112 b.dep(compileTaskName) 2113 b.timeout(60 * time.Minute) 2114 b.cas(CAS_WASM_GM) 2115 b.serviceAccount(b.cfg.ServiceAccountUploadGM) 2116 b.cmd( 2117 b.taskDriver("run_wasm_gm_tests", false), 2118 "--project_id", "skia-swarming-bots", 2119 "--task_id", specs.PLACEHOLDER_TASK_ID, 2120 "--task_name", b.Name, 2121 "--test_harness_path", "./tools/run-wasm-gm-tests", 2122 "--built_path", "./wasm_out", 2123 "--node_bin_path", "./node/node/bin", 2124 "--resource_path", "./resources", 2125 "--work_path", "./wasm_gm/work", 2126 "--gold_ctl_path", "./cipd_bin_packages/goldctl", 2127 "--gold_hashes_url", b.cfg.GoldHashesURL, 2128 "--git_commit", specs.PLACEHOLDER_REVISION, 2129 "--changelist_id", specs.PLACEHOLDER_ISSUE, 2130 "--patchset_order", specs.PLACEHOLDER_PATCHSET, 2131 "--tryjob_id", specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID, 2132 // TODO(kjlubick, nifong) Make these not hard coded if we change the configs we test on. 2133 "--webgl_version", "2", // 0 means CPU ; this flag controls cpu_or_gpu and extra_config 2134 "--gold_key", "alpha_type:Premul", 2135 "--gold_key", "arch:wasm", 2136 "--gold_key", "browser:Chrome", 2137 "--gold_key", "color_depth:8888", 2138 "--gold_key", "config:gles", 2139 "--gold_key", "configuration:Release", 2140 "--gold_key", "cpu_or_gpu_value:QuadroP400", 2141 "--gold_key", "model:Golo", 2142 "--gold_key", "os:Ubuntu18", 2143 ) 2144 }) 2145} 2146 2147// labelAndSavedOutputDir contains a Bazel label (e.g. //tests:some_test) and a //bazel-bin 2148// subdirectory that should be stored into CAS. 2149type labelAndSavedOutputDir struct { 2150 label string 2151 savedOutputDir string 2152} 2153 2154// Maps a shorthand version of a label (which can be an arbitrary string) to an absolute Bazel 2155// label or "target pattern" https://bazel.build/docs/build#specifying-build-targets 2156// The reason we need this mapping is because Buildbucket build names cannot have / or : in them. 2157// TODO(borenet/kjlubick): Is there a way to generate a mapping using `bazel query`? 2158var shorthandToLabel = map[string]labelAndSavedOutputDir{ 2159 "all_tests": {"//tests:linux_rbe_tests", ""}, 2160 "core": {"//:core", ""}, 2161 "cpu_8888_benchmark_test": {"//bench:cpu_8888_test", ""}, 2162 "cpu_gms": {"//gm:cpu_gm_tests", ""}, 2163 "full_library": {"//tools:full_build", ""}, 2164 "ganesh_gl": {"//:ganesh_gl", ""}, 2165 "hello_bazel_world_test": {"//gm:hello_bazel_world_test", ""}, 2166 "modules_canvaskit": {"//modules/canvaskit:canvaskit", ""}, 2167 "modules_canvaskit_js_tests": {"//modules/canvaskit:canvaskit_js_tests", ""}, 2168 "skottie_tool_gpu": {"//modules/skottie:skottie_tool_gpu", ""}, 2169 "viewer": {"//tools/viewer:viewer", ""}, 2170 "decode_everything": {"//example/external_client:decode_everything", ""}, 2171 "path_combiner": {"//example/external_client:path_combiner", ""}, 2172 "png_decoder": {"//example/external_client:png_decoder", ""}, 2173 "shape_text": {"//example/external_client:shape_text", ""}, 2174 "svg_with_harfbuzz": {"//example/external_client:svg_with_harfbuzz", ""}, 2175 "svg_with_primitive": {"//example/external_client:svg_with_primitive", ""}, 2176 "use_ganesh_gl": {"//example/external_client:use_ganesh_gl", ""}, 2177 "use_ganesh_vulkan": {"//example/external_client:use_ganesh_vulkan", ""}, 2178 "use_graphite_native_vulkan": {"//example/external_client:use_graphite_native_vulkan", ""}, 2179 "use_skresources": {"//example/external_client:use_skresources", ""}, 2180 "write_text_to_png": {"//example/external_client:write_text_to_png", ""}, 2181 "write_to_pdf": {"//example/external_client:write_to_pdf", ""}, 2182 "play_skottie": {"//example/external_client:play_skottie", ""}, 2183 2184 // Currently there is no way to tell Bazel "only test go_test targets", so we must group them 2185 // under a test_suite. 2186 // 2187 // Alternatives: 2188 // 2189 // - Use --test_lang_filters, which currently does not work for non-native rules. See 2190 // https://github.com/bazelbuild/bazel/issues/12618. 2191 // 2192 // - As suggested in the same GitHub issue, "bazel query 'kind(go_test, //...)'" would normally 2193 // return the list of labels. However, this fails due to BUILD.bazel files in 2194 // //third_party/externals and //bazel/external/vello. We could try either fixing those files 2195 // when possible, or adding them to //.bazelignore (either permanently or temporarily inside a 2196 // specialized task driver just for Go tests). 2197 // 2198 // - Have Gazelle add a tag to all Go tests: go_test(name = "foo_test", tag = "go", ... ). Then, 2199 // we can use a wildcard label such as //... and tell Bazel to only test those targets with 2200 // said tag, e.g. "bazel test //... --test_tag_filters=go" 2201 // (https://bazel.build/reference/command-line-reference#flag--test_tag_filters). Today this 2202 // does not work due to the third party and external BUILD.bazel files mentioned in the 2203 // previous bullet point. 2204 "all_go_tests": {"//:all_go_tests", ""}, 2205 2206 // Android tests that run on a device. We store the //bazel-bin/tests directory into CAS for use 2207 // by subsequent CI tasks. 2208 "android_math_test": {"//tests:android_math_test", "tests"}, 2209 "hello_bazel_world_android_test": {"//gm:hello_bazel_world_android_test", "gm"}, 2210 "cpu_8888_benchmark_android_test": {"//bench:cpu_8888_android_test", "bench"}, 2211} 2212 2213// bazelBuild adds a task which builds the specified single-target label (//foo:bar) or 2214// multi-target label (//foo/...) using Bazel. Depending on the host we run this on, we may 2215// specify additional Bazel args to build faster. Optionally, a subset of the //bazel-bin directory 2216// will be stored into CAS for use by subsequent tasks. 2217func (b *jobBuilder) bazelBuild() { 2218 shorthand, config, host := b.parts.bazelBuildParts() 2219 labelAndSavedOutputDir, ok := shorthandToLabel[shorthand] 2220 if !ok { 2221 panic("unsupported Bazel label shorthand " + shorthand) 2222 } 2223 2224 b.addTask(b.Name, func(b *taskBuilder) { 2225 bazelCacheDir, ok := map[string]string{ 2226 // We only run builds in GCE. 2227 "linux_x64": bazelCacheDirOnGCELinux, 2228 "windows_x64": bazelCacheDirOnWindows, 2229 }[host] 2230 if !ok { 2231 panic("unknown Bazel cache dir for Bazel host " + host) 2232 } 2233 2234 // Bazel git_repository rules shell out to git. Use the version from 2235 // CIPD to ensure that we're not using an old locally-installed version. 2236 b.usesGit() 2237 b.addToPATH("cipd_bin_packages", "cipd_bin_packages/bin") 2238 2239 cmd := []string{ 2240 b.taskDriver("bazel_build", host != "windows_x64"), 2241 "--project_id=skia-swarming-bots", 2242 "--task_id=" + specs.PLACEHOLDER_TASK_ID, 2243 "--task_name=" + b.Name, 2244 "--bazel_label=" + labelAndSavedOutputDir.label, 2245 "--bazel_config=" + config, 2246 "--bazel_cache_dir=" + bazelCacheDir, 2247 "--workdir=./skia", 2248 } 2249 2250 if labelAndSavedOutputDir.savedOutputDir != "" { 2251 cmd = append(cmd, 2252 "--out_path="+OUTPUT_BAZEL, 2253 // Which //bazel-bin subdirectory to copy into the output dir (flag --out_path). 2254 "--saved_output_dir="+labelAndSavedOutputDir.savedOutputDir, 2255 ) 2256 } 2257 2258 if host == "linux_x64" { 2259 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 2260 b.usesBazel("linux_x64") 2261 if labelAndSavedOutputDir.savedOutputDir != "" { 2262 // We assume that builds which require storing a subset of //bazel-bin to CAS are Android 2263 // builds. We want such builds to use RBE, and we want to download the built top-level 2264 // artifacts. Also, we need the adb_test runner to be cross-compiled to run on a Raspberry 2265 // Pi. 2266 cmd = append(cmd, "--bazel_arg=--config=linux_rbe") 2267 cmd = append(cmd, "--bazel_arg=--jobs=100") 2268 cmd = append(cmd, "--bazel_arg=--remote_download_toplevel") 2269 cmd = append(cmd, "--bazel_arg=--adb_platform=linux_arm64") 2270 } else { 2271 // We want all Linux Bazel Builds to use RBE 2272 cmd = append(cmd, "--bazel_arg=--config=for_linux_x64_with_rbe") 2273 cmd = append(cmd, "--bazel_arg=--jobs=100") 2274 cmd = append(cmd, "--bazel_arg=--remote_download_minimal") 2275 } 2276 } else if host == "windows_x64" { 2277 b.dimension( 2278 "cpu:x86-64-Haswell_GCE", 2279 "gpu:none", 2280 fmt.Sprintf("machine_type:%s", MACHINE_TYPE_LARGE), 2281 fmt.Sprintf("os:%s", DEFAULT_OS_WIN_GCE), 2282 "pool:Skia", 2283 ) 2284 b.usesBazel("windows_x64") 2285 cmd = append(cmd, "--bazel_arg=--experimental_scale_timeouts=2.0") 2286 } else { 2287 panic("unsupported Bazel host " + host) 2288 } 2289 b.cmd(cmd...) 2290 2291 b.idempotent() 2292 b.cas(CAS_BAZEL) 2293 b.attempts(1) 2294 b.serviceAccount(b.cfg.ServiceAccountCompile) 2295 if labelAndSavedOutputDir.savedOutputDir != "" { 2296 b.output(OUTPUT_BAZEL) 2297 } 2298 }) 2299} 2300 2301type precompiledBazelTestKind int 2302 2303const ( 2304 precompiledBazelTestNone precompiledBazelTestKind = iota 2305 precompiledBenchmarkTest 2306 precompiledGMTest 2307 precompiledUnitTest 2308) 2309 2310func (b *jobBuilder) bazelTest() { 2311 taskdriverName, shorthand, buildConfig, host, testConfig := b.parts.bazelTestParts() 2312 labelAndSavedOutputDir, ok := shorthandToLabel[shorthand] 2313 if !ok { 2314 panic("unsupported Bazel label shorthand " + shorthand) 2315 } 2316 2317 // Expand task driver name to keep task names short. 2318 precompiledKind := precompiledBazelTestNone 2319 if taskdriverName == "precompiled_benchmark" { 2320 taskdriverName = "bazel_test_precompiled" 2321 precompiledKind = precompiledBenchmarkTest 2322 } 2323 if taskdriverName == "precompiled_gm" { 2324 taskdriverName = "bazel_test_precompiled" 2325 precompiledKind = precompiledGMTest 2326 } 2327 if taskdriverName == "precompiled_test" { 2328 taskdriverName = "bazel_test_precompiled" 2329 precompiledKind = precompiledUnitTest 2330 } 2331 if taskdriverName == "gm" { 2332 taskdriverName = "bazel_test_gm" 2333 } 2334 if taskdriverName == "benchmark" { 2335 taskdriverName = "bazel_test_benchmark" 2336 } 2337 2338 var deviceSpecificBazelConfig *device_specific_configs.Config 2339 if testConfig != "" { 2340 if config, ok := device_specific_configs.Configs[testConfig]; ok { 2341 deviceSpecificBazelConfig = &config 2342 } else { 2343 panic(fmt.Sprintf("Unknown device-specific Bazel config: %q", testConfig)) 2344 } 2345 } 2346 2347 bazelCacheDir := bazelCacheDirOnGCELinux 2348 if deviceSpecificBazelConfig != nil && deviceSpecificBazelConfig.Keys["model"] != "GCE" { 2349 bazelCacheDir = bazelCacheDirOnSkoloLinux 2350 } 2351 2352 b.addTask(b.Name, func(b *taskBuilder) { 2353 cmd := []string{ 2354 b.taskDriver(taskdriverName, false), 2355 "--project_id=skia-swarming-bots", 2356 "--task_id=" + specs.PLACEHOLDER_TASK_ID, 2357 "--task_name=" + b.Name, 2358 "--workdir=.", 2359 } 2360 2361 switch taskdriverName { 2362 case "canvaskit_gold": 2363 cmd = append(cmd, 2364 "--bazel_label="+labelAndSavedOutputDir.label, 2365 "--bazel_config="+buildConfig, 2366 "--bazel_cache_dir="+bazelCacheDir, 2367 "--goldctl_path=./cipd_bin_packages/goldctl", 2368 "--git_commit="+specs.PLACEHOLDER_REVISION, 2369 "--changelist_id="+specs.PLACEHOLDER_ISSUE, 2370 "--patchset_order="+specs.PLACEHOLDER_PATCHSET, 2371 "--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID) 2372 b.cipd(CIPD_PKGS_GOLDCTL) 2373 switch buildConfig { 2374 case "ck_full_cpu_release_chrome": 2375 cmd = append(cmd, "--cpu_or_gpu=CPU", "--cpu_or_gpu_value=CPU", 2376 "--compilation_mode=Release", "--browser=Chrome") 2377 case "ck_full_webgl2_release_chrome": 2378 cmd = append(cmd, "--cpu_or_gpu=GPU", "--cpu_or_gpu_value=WebGL2", 2379 "--compilation_mode=Release", "--browser=Chrome") 2380 default: 2381 panic("Gold keys not specified for config " + buildConfig) 2382 } 2383 2384 case "cpu_tests": 2385 cmd = append(cmd, 2386 "--bazel_label="+labelAndSavedOutputDir.label, 2387 "--bazel_config="+buildConfig, 2388 "--bazel_cache_dir="+bazelCacheDir) 2389 2390 case "toolchain_layering_check": 2391 cmd = append(cmd, 2392 "--bazel_label="+labelAndSavedOutputDir.label, 2393 "--bazel_config="+buildConfig, 2394 "--bazel_cache_dir="+bazelCacheDir) 2395 2396 case "bazel_test_precompiled": 2397 // Compute the file name of the test based on its Bazel label. The file name will be relative to 2398 // the bazel-bin directory, which we receive a subset of as a CAS input. 2399 command := strings.ReplaceAll(labelAndSavedOutputDir.label, "//", "") 2400 command = strings.ReplaceAll(command, ":", "/") 2401 command = path.Join(OUTPUT_BAZEL, command) 2402 2403 // The test's working directory will be its runfiles directory, which simulates the behavior of 2404 // the "bazel run" command. 2405 commandWorkDir := path.Join(command+".runfiles", "skia") 2406 2407 cmd = append(cmd, 2408 "--command="+command, 2409 "--command_workdir="+commandWorkDir) 2410 2411 switch precompiledKind { 2412 case precompiledBenchmarkTest: 2413 cmd = append(cmd, 2414 "--kind=benchmark", 2415 "--git_commit="+specs.PLACEHOLDER_REVISION, 2416 "--changelist_id="+specs.PLACEHOLDER_ISSUE, 2417 "--patchset_order="+specs.PLACEHOLDER_PATCHSET) 2418 2419 case precompiledGMTest: 2420 cmd = append(cmd, 2421 "--kind=gm", 2422 "--bazel_label="+labelAndSavedOutputDir.label, 2423 "--goldctl_path=./cipd_bin_packages/goldctl", 2424 "--git_commit="+specs.PLACEHOLDER_REVISION, 2425 "--changelist_id="+specs.PLACEHOLDER_ISSUE, 2426 "--patchset_order="+specs.PLACEHOLDER_PATCHSET, 2427 "--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID) 2428 b.cipd(CIPD_PKGS_GOLDCTL) 2429 2430 case precompiledUnitTest: 2431 cmd = append(cmd, "--kind=unit") 2432 2433 default: 2434 panic(fmt.Sprintf("Unknown precompiled test kind: %v", precompiledKind)) 2435 } 2436 2437 case "bazel_test_gm": 2438 cmd = append(cmd, 2439 "--bazel_label="+labelAndSavedOutputDir.label, 2440 "--bazel_config="+buildConfig, 2441 "--bazel_cache_dir="+bazelCacheDir, 2442 "--goldctl_path=./cipd_bin_packages/goldctl", 2443 "--git_commit="+specs.PLACEHOLDER_REVISION, 2444 "--changelist_id="+specs.PLACEHOLDER_ISSUE, 2445 "--patchset_order="+specs.PLACEHOLDER_PATCHSET, 2446 "--tryjob_id="+specs.PLACEHOLDER_BUILDBUCKET_BUILD_ID) 2447 b.cipd(CIPD_PKGS_GOLDCTL) 2448 2449 case "bazel_test_benchmark": 2450 // Note that these tasks run on Skolo machines. 2451 cmd = append(cmd, 2452 "--bazel_label="+labelAndSavedOutputDir.label, 2453 "--bazel_config="+buildConfig, 2454 "--bazel_cache_dir="+bazelCacheDirOnSkoloLinux, 2455 "--git_commit="+specs.PLACEHOLDER_REVISION, 2456 "--changelist_id="+specs.PLACEHOLDER_ISSUE, 2457 "--patchset_order="+specs.PLACEHOLDER_PATCHSET) 2458 2459 case "external_client": 2460 // For external_client, we want to test how an external user would 2461 // build using Skia. Therefore, we change to the workspace in that 2462 // directory and use labels relative to it. 2463 pathInSkia := "example/external_client" 2464 label := strings.Replace(labelAndSavedOutputDir.label, pathInSkia, "", -1) 2465 cmd = append(cmd, 2466 "--bazel_label="+label, 2467 "--path_in_skia="+pathInSkia, 2468 "--bazel_cache_dir="+bazelCacheDir) 2469 b.usesDocker() 2470 2471 default: 2472 panic("Unsupported Bazel taskdriver " + taskdriverName) 2473 } 2474 2475 if deviceSpecificBazelConfig != nil { 2476 cmd = append(cmd, "--device_specific_bazel_config="+deviceSpecificBazelConfig.Name) 2477 } 2478 2479 if host == "linux_x64" { 2480 b.usesBazel("linux_x64") 2481 } else if host == "linux_arm64" || host == "on_rpi" { 2482 // The RPIs do not run Bazel directly, they have precompiled binary 2483 // to run instead. 2484 } else { 2485 panic("unsupported Bazel host " + host) 2486 } 2487 2488 if taskdriverName == "bazel_test_gm" || 2489 taskdriverName == "bazel_test_benchmark" || 2490 taskdriverName == "bazel_test_precompiled" { 2491 if taskdriverName == "bazel_test_precompiled" { 2492 // This task precompiles the test and stores it to CAS. 2493 b.dep(fmt.Sprintf("BazelBuild-%s-%s-linux_x64", shorthand, buildConfig)) 2494 } 2495 2496 // Set dimensions. 2497 if deviceSpecificBazelConfig == nil { 2498 log.Fatalf("While processing job %q: task driver %q requires a device-specific Bazel config.", b.Name, taskdriverName) 2499 } 2500 if len(deviceSpecificBazelConfig.SwarmingDimensions) == 0 { 2501 log.Fatalf("While processing job %q: device-specific Bazel config %q does not provide Swarming dimensions.", b.Name, deviceSpecificBazelConfig.Name) 2502 } 2503 var dimensions []string 2504 for name, value := range deviceSpecificBazelConfig.SwarmingDimensions { 2505 dimensions = append(dimensions, fmt.Sprintf("%s:%s", name, value)) 2506 } 2507 dimensions = append(dimensions, fmt.Sprintf("pool:%s", b.cfg.Pool)) 2508 sort.Strings(dimensions) 2509 b.dimension(dimensions...) 2510 } else { 2511 b.linuxGceDimensions(MACHINE_TYPE_MEDIUM) 2512 } 2513 2514 b.cmd(cmd...) 2515 b.idempotent() 2516 b.cas(CAS_BAZEL) 2517 b.attempts(1) 2518 b.serviceAccount(b.cfg.ServiceAccountCompile) 2519 }) 2520} 2521