1#!/usr/bin/env python3 2# Copyright (c) 2013 The Chromium Authors. All rights reserved. 3# Use of this source code is governed by a BSD-style license that can be 4# found in the LICENSE file. 5 6 7"""Top-level presubmit script for Skia. 8 9See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts 10for more details about the presubmit API built into gcl. 11""" 12 13import difflib 14import os 15import re 16import subprocess 17import sys 18 19 20RELEASE_NOTES_DIR = 'relnotes' 21RELEASE_NOTES_FILE_NAME = 'RELEASE_NOTES.md' 22RELEASE_NOTES_README = '//relnotes/README.md' 23 24GOLD_TRYBOT_URL = 'https://gold.skia.org/search?issue=' 25 26SERVICE_ACCOUNT_SUFFIX = [ 27 '@%s.iam.gserviceaccount.com' % project for project in [ 28 'skia-buildbots.google.com', 'skia-swarming-bots', 'skia-public', 29 'skia-corp.google.com', 'chops-service-accounts']] 30 31USE_PYTHON3 = True 32 33 34def _CheckChangeHasEol(input_api, output_api, source_file_filter=None): 35 """Checks that files end with at least one \n (LF).""" 36 eof_files = [] 37 for f in input_api.AffectedSourceFiles(source_file_filter): 38 contents = input_api.ReadFile(f, 'rb') 39 # Check that the file ends in at least one newline character. 40 if len(contents) > 1 and contents[-1:] != '\n': 41 eof_files.append(f.LocalPath()) 42 43 if eof_files: 44 return [output_api.PresubmitPromptWarning( 45 'These files should end in a newline character:', 46 items=eof_files)] 47 return [] 48 49 50def _JsonChecks(input_api, output_api): 51 """Run checks on any modified json files.""" 52 failing_files = [] 53 for affected_file in input_api.AffectedFiles(None): 54 affected_file_path = affected_file.LocalPath() 55 is_json = affected_file_path.endswith('.json') 56 is_metadata = (affected_file_path.startswith('site/') and 57 affected_file_path.endswith('/METADATA')) 58 if is_json or is_metadata: 59 try: 60 input_api.json.load(open(affected_file_path, 'r')) 61 except ValueError as ve: 62 failing_files.append(f'{affected_file_path}\t\t{ve}') 63 64 results = [] 65 if failing_files: 66 results.append( 67 output_api.PresubmitError( 68 'The following files contain invalid json:\n%s\n' % 69 '\n'.join(failing_files))) 70 return results 71 72 73def _IfDefChecks(input_api, output_api): 74 """Ensures if/ifdef are not before includes. See skbug/3362 for details.""" 75 comment_block_start_pattern = re.compile('^\s*\/\*.*$') 76 comment_block_middle_pattern = re.compile('^\s+\*.*') 77 comment_block_end_pattern = re.compile('^\s+\*\/.*$') 78 single_line_comment_pattern = re.compile('^\s*//.*$') 79 def is_comment(line): 80 return (comment_block_start_pattern.match(line) or 81 comment_block_middle_pattern.match(line) or 82 comment_block_end_pattern.match(line) or 83 single_line_comment_pattern.match(line)) 84 85 empty_line_pattern = re.compile('^\s*$') 86 def is_empty_line(line): 87 return empty_line_pattern.match(line) 88 89 failing_files = [] 90 for affected_file in input_api.AffectedSourceFiles(None): 91 affected_file_path = affected_file.LocalPath() 92 if affected_file_path.endswith('.cpp') or affected_file_path.endswith('.h'): 93 f = open(affected_file_path) 94 for line in f: 95 if is_comment(line) or is_empty_line(line): 96 continue 97 # The below will be the first real line after comments and newlines. 98 if line.startswith('#if 0 '): 99 pass 100 elif line.startswith('#if ') or line.startswith('#ifdef '): 101 failing_files.append(affected_file_path) 102 break 103 104 results = [] 105 if failing_files: 106 results.append( 107 output_api.PresubmitError( 108 'The following files have #if or #ifdef before includes:\n%s\n\n' 109 'See https://bug.skia.org/3362 for why this should be fixed.' % 110 '\n'.join(failing_files))) 111 return results 112 113 114def _CopyrightChecks(input_api, output_api, source_file_filter=None): 115 results = [] 116 year_pattern = r'\d{4}' 117 year_range_pattern = r'%s(-%s)?' % (year_pattern, year_pattern) 118 years_pattern = r'%s(,%s)*,?' % (year_range_pattern, year_range_pattern) 119 copyright_pattern = ( 120 r'Copyright (\([cC]\) )?%s \w+' % years_pattern) 121 122 for affected_file in input_api.AffectedSourceFiles(source_file_filter): 123 if ('third_party/' in affected_file.LocalPath() or 124 'tests/sksl/' in affected_file.LocalPath() or 125 'bazel/rbe/' in affected_file.LocalPath() or 126 'bazel/external/' in affected_file.LocalPath() or 127 'bazel/exporter/interfaces/mocks/' in affected_file.LocalPath() or 128 affected_file.LocalPath().endswith('gen.go')): 129 continue 130 contents = input_api.ReadFile(affected_file, 'rb') 131 if not re.search(copyright_pattern, contents): 132 results.append(output_api.PresubmitError( 133 '%s is missing a correct copyright header.' % affected_file)) 134 return results 135 136 137def _InfraTests(input_api, output_api): 138 """Run the infra tests.""" 139 results = [] 140 if not any(f.LocalPath().startswith('infra') 141 for f in input_api.AffectedFiles()): 142 return results 143 144 cmd = ['python3', os.path.join('infra', 'bots', 'infra_tests.py')] 145 try: 146 subprocess.check_output(cmd) 147 except subprocess.CalledProcessError as e: 148 results.append(output_api.PresubmitError( 149 '`%s` failed:\n%s' % (' '.join(cmd), e.output))) 150 return results 151 152 153def _CheckGNFormatted(input_api, output_api): 154 """Make sure any .gn files we're changing have been formatted.""" 155 files = [] 156 for f in input_api.AffectedFiles(include_deletes=False): 157 if (f.LocalPath().endswith('.gn') or 158 f.LocalPath().endswith('.gni')): 159 files.append(f) 160 if not files: 161 return [] 162 163 cmd = ['python3', os.path.join('bin', 'fetch-gn')] 164 try: 165 subprocess.check_output(cmd) 166 except subprocess.CalledProcessError as e: 167 return [output_api.PresubmitError( 168 '`%s` failed:\n%s' % (' '.join(cmd), e.output))] 169 170 results = [] 171 for f in files: 172 gn = 'gn.exe' if 'win32' in sys.platform else 'gn' 173 gn = os.path.join(input_api.PresubmitLocalPath(), 'bin', gn) 174 cmd = [gn, 'format', '--dry-run', f.LocalPath()] 175 try: 176 subprocess.check_output(cmd) 177 except subprocess.CalledProcessError: 178 fix = 'bin/gn format ' + f.LocalPath() 179 results.append(output_api.PresubmitError( 180 '`%s` failed, try\n\t%s' % (' '.join(cmd), fix))) 181 return results 182 183 184def _CheckGitConflictMarkers(input_api, output_api): 185 pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$') 186 results = [] 187 for f in input_api.AffectedFiles(): 188 for line_num, line in f.ChangedContents(): 189 if f.LocalPath().endswith('.md'): 190 # First-level headers in markdown look a lot like version control 191 # conflict markers. http://daringfireball.net/projects/markdown/basics 192 continue 193 if pattern.match(line): 194 results.append( 195 output_api.PresubmitError( 196 'Git conflict markers found in %s:%d %s' % ( 197 f.LocalPath(), line_num, line))) 198 return results 199 200 201def _CheckIncludesFormatted(input_api, output_api): 202 """Make sure #includes in files we're changing have been formatted.""" 203 files = [str(f) for f in input_api.AffectedFiles() if f.Action() != 'D'] 204 cmd = ['python3', 205 'tools/rewrite_includes.py', 206 '--dry-run'] + files 207 if 0 != subprocess.call(cmd): 208 return [output_api.PresubmitError('`%s` failed' % ' '.join(cmd))] 209 return [] 210 211 212class _WarningsAsErrors(): 213 def __init__(self, output_api): 214 self.output_api = output_api 215 self.old_warning = None 216 def __enter__(self): 217 self.old_warning = self.output_api.PresubmitPromptWarning 218 self.output_api.PresubmitPromptWarning = self.output_api.PresubmitError 219 return self.output_api 220 def __exit__(self, ex_type, ex_value, ex_traceback): 221 self.output_api.PresubmitPromptWarning = self.old_warning 222 223 224def _RegenerateAllExamplesCPP(input_api, output_api): 225 """Regenerates all_examples.cpp if an example was added or deleted.""" 226 if not any(f.LocalPath().startswith('docs/examples/') 227 for f in input_api.AffectedFiles()): 228 return [] 229 command_str = 'tools/fiddle/make_all_examples_cpp.py' 230 cmd = ['python3', command_str, '--print-diff'] 231 proc = subprocess.run(cmd, capture_output=True) 232 if proc.returncode != 0: 233 return [output_api.PresubmitError('`%s` failed' % ' '.join(cmd))] 234 235 results = [] 236 diff_output = proc.stdout.decode('utf-8').strip() 237 if diff_output: 238 results += [output_api.PresubmitError( 239 'Diffs found after running "%s":\n\n%s\n' 240 'Please commit or discard the above changes.' % ( 241 command_str, 242 diff_output, 243 ) 244 )] 245 return results 246 247 248def _CheckIncludeForOutsideDeps(input_api, output_api): 249 """The include directory should consist of only public APIs. 250 251 This check makes sure we don't have anything in the include directory 252 depend on outside folders. If we had include/core/SkDonut.h depend on 253 src/core/SkPastry.h, then clients would have transitive access to the 254 private SkPastry class and any symbols in there, even if they don't 255 directly include src/core/SkPastry.h (which can be detected/blocked 256 with build systems like GN or Bazel). By keeping include/ self-contained, 257 we keep a tighter grip on our public API and make Skia easier to distribute 258 (one can ship a .a/.so and a single directory of .h files). 259 """ 260 banned_includes = [ 261 input_api.re.compile(r'#\s*include\s+("src/.*)'), 262 input_api.re.compile(r'#\s*include\s+("tools/.*)'), 263 ] 264 file_filter = lambda x: (x.LocalPath().startswith('include/')) 265 errors = [] 266 for affected_file in input_api.AffectedSourceFiles(file_filter): 267 affected_filepath = affected_file.LocalPath() 268 for (line_num, line) in affected_file.ChangedContents(): 269 for re in banned_includes: 270 match = re.search(line) 271 if match: 272 errors.append(('%s:%s: include/* should only depend on other things in include/*. ' + 273 'Please remove #include of %s, perhaps making it a forward-declare.') % ( 274 affected_filepath, line_num, match.group(1))) 275 276 if errors: 277 return [output_api.PresubmitError('\n'.join(errors))] 278 return [] 279 280 281def _CheckExamplesForPrivateAPIs(input_api, output_api): 282 """We only want our checked-in examples (aka fiddles) to show public API.""" 283 banned_includes = [ 284 input_api.re.compile(r'#\s*include\s+("src/.*)'), 285 input_api.re.compile(r'#\s*include\s+("include/private/.*)'), 286 ] 287 file_filter = lambda x: (x.LocalPath().startswith('docs/examples/')) 288 errors = [] 289 for affected_file in input_api.AffectedSourceFiles(file_filter): 290 affected_filepath = affected_file.LocalPath() 291 for (line_num, line) in affected_file.ChangedContents(): 292 for re in banned_includes: 293 match = re.search(line) 294 if match: 295 errors.append('%s:%s: Fiddles should not use private/internal API like %s.' % ( 296 affected_filepath, line_num, match.group(1))) 297 298 if errors: 299 return [output_api.PresubmitError('\n'.join(errors))] 300 return [] 301 302 303def _CheckGeneratedBazelBUILDFiles(input_api, output_api): 304 if 'win32' in sys.platform: 305 # TODO(crbug.com/skia/12541): Remove when Bazel builds work on Windows. 306 # Note: `make` is not installed on Windows by default. 307 return [] 308 if 'darwin' in sys.platform: 309 # This takes too long on Mac with default settings. Probably due to sandboxing. 310 return [] 311 files = [] 312 for affected_file in input_api.AffectedFiles(include_deletes=True): 313 affected_file_path = affected_file.LocalPath() 314 if (affected_file_path.endswith('.go') or 315 affected_file_path.endswith('BUILD.bazel')): 316 files.append(affected_file) 317 if not files: 318 return [] 319 return _RunCommandAndCheckDiff( 320 output_api, ['make', '-C', 'bazel', 'generate_go'], files 321 ) 322 323 324def _CheckBazelBUILDFiles(input_api, output_api): 325 """Makes sure our BUILD.bazel files are compatible with G3.""" 326 results = [] 327 for affected_file in input_api.AffectedFiles(include_deletes=False): 328 affected_file_path = affected_file.LocalPath() 329 is_bazel = affected_file_path.endswith('BUILD.bazel') 330 # This list lines up with the one in autoroller_lib.py (see G3). 331 excluded_paths = ["infra/", "bazel/rbe/", "bazel/external/", "bazel/common_config_settings/", 332 "modules/canvaskit/go/", "experimental/", "bazel/platform", "third_party/", 333 "tests/", "resources/", "bazel/deps_parser/", "bazel/exporter_tool/", 334 "tools/gpu/gl/interface/", "bazel/utils/", "include/config/", 335 "bench/", "example/external_client/"] 336 is_excluded = any(affected_file_path.startswith(n) for n in excluded_paths) 337 if is_bazel and not is_excluded: 338 with open(affected_file_path, 'r') as file: 339 contents = file.read() 340 if 'licenses(["notice"])' not in contents: 341 results.append(output_api.PresubmitError( 342 ('%s needs to have\nlicenses(["notice"])\nimmediately after ' + 343 'the load() calls to comply with G3 policies.') % affected_file_path 344 )) 345 if 'cc_library(' in contents and '"skia_cc_library"' not in contents: 346 results.append(output_api.PresubmitError( 347 ('%s needs to load skia_cc_library from skia_rules.bzl instead of using the ' + 348 'native one. This allows us to build differently for G3.\n' + 349 'Add "skia_cc_library" to load("//bazel:skia_rules.bzl", ...)') 350 % affected_file_path 351 )) 352 if 'default_applicable_licenses' not in contents: 353 # See https://opensource.google/documentation/reference/thirdparty/new_license_rules 354 results.append(output_api.PresubmitError( 355 ('%s needs to have\npackage(default_applicable_licenses = ["//:license"])\n'+ 356 'to comply with G3 policies') % affected_file_path 357 )) 358 return results 359 360 361def _RunCommandAndCheckDiff(output_api, command, files_to_check): 362 """Run an arbitrary command. Fail if it produces any diffs on the given files.""" 363 prev_contents = {} 364 for file in files_to_check: 365 # NewContents just reads the file. 366 prev_contents[file] = file.NewContents() 367 368 command_str = ' '.join(command) 369 results = [] 370 371 try: 372 subprocess.check_output( 373 command, 374 stderr=subprocess.STDOUT, encoding='utf-8') 375 except subprocess.CalledProcessError as e: 376 results += [output_api.PresubmitError( 377 'Command "%s" returned non-zero exit code %d. Output: \n\n%s' % ( 378 command_str, 379 e.returncode, 380 e.output, 381 ) 382 )] 383 384 # Compare the new content to the previous content. 385 diffs = [] 386 for file, prev_content in prev_contents.items(): 387 new_content = file.NewContents(flush_cache=True) 388 if new_content != prev_content: 389 path = file.LocalPath() 390 diff = difflib.unified_diff(prev_content, new_content, path, path, lineterm='') 391 diffs.append('\n'.join(diff)) 392 393 if diffs: 394 results += [output_api.PresubmitError( 395 'Diffs found after running "%s":\n\n%s\n\n' 396 'Please commit or discard the above changes.' % ( 397 command_str, 398 '\n'.join(diffs), 399 ) 400 )] 401 402 return results 403 404 405def _CheckGNIGenerated(input_api, output_api): 406 """Ensures that the generated *.gni files are current. 407 408 The Bazel project files are authoritative and some *.gni files are 409 generated from them using the exporter_tool. This check ensures they 410 are still current. 411 """ 412 if 'win32' in sys.platform: 413 # TODO(crbug.com/skia/12541): Remove when Bazel builds work on Windows. 414 # Note: `make` is not installed on Windows by default. 415 return [ 416 output_api.PresubmitNotifyResult( 417 'Skipping Bazel=>GNI export check on Windows (unsupported platform).' 418 ) 419 ] 420 if 'darwin' in sys.platform: 421 # This takes too long on Mac with default settings. Probably due to sandboxing. 422 return [] 423 files = [] 424 for affected_file in input_api.AffectedFiles(include_deletes=True): 425 affected_file_path = affected_file.LocalPath() 426 if affected_file_path.endswith('BUILD.bazel') or affected_file_path.endswith('.gni'): 427 files.append(affected_file) 428 # Generate GNI files and verify no changes. 429 if not files: 430 # No Bazel build files changed. 431 return [] 432 return _RunCommandAndCheckDiff( 433 output_api, ['make', '-C', 'bazel', 'generate_gni'], files 434 ) 435 436 437def _CheckBuildifier(input_api, output_api): 438 """Runs Buildifier and fails on linting errors, or if it produces any diffs. 439 440 This check only runs if the affected files include any WORKSPACE, BUILD, 441 BUILD.bazel or *.bzl files. 442 """ 443 files = [] 444 # Please keep the below exclude patterns in sync with those in the //:buildifier rule definition. 445 for affected_file in input_api.AffectedFiles(include_deletes=False): 446 affected_file_path = affected_file.LocalPath() 447 if affected_file_path.endswith('BUILD.bazel') or affected_file_path.endswith('.bzl'): 448 if not affected_file_path.endswith('public.bzl') and \ 449 not affected_file_path.endswith('go_repositories.bzl') and \ 450 not "bazel/rbe/gce_linux/" in affected_file_path and \ 451 not affected_file_path.startswith("third_party/externals/") and \ 452 not "node_modules/" in affected_file_path: # Skip generated files. 453 files.append(affected_file) 454 if not files: 455 return [] 456 try: 457 subprocess.check_output( 458 ['buildifier', '--version'], 459 stderr=subprocess.STDOUT) 460 except: 461 return [output_api.PresubmitNotifyResult( 462 'Skipping buildifier check because it is not on PATH. \n' + 463 'You can download it from https://github.com/bazelbuild/buildtools/releases')] 464 465 return _RunCommandAndCheckDiff( 466 # Please keep the below arguments in sync with those in the //:buildifier rule definition. 467 output_api, [ 468 'buildifier', 469 '--mode=fix', 470 '--lint=fix', 471 '--warnings', 472 ','.join([ 473 '-native-android', 474 '-native-cc', 475 '-native-py', 476 ]) 477 ] + [f.LocalPath() for f in files], files) 478 479 480def _CheckBannedAPIs(input_api, output_api): 481 """Check source code for functions, packages, and symbols that should not be used.""" 482 483 # A list of tuples of a regex to match an API and a suggested replacement for 484 # that API. There is an optional third parameter for files which *can* use this 485 # API without warning. 486 banned_replacements = [ 487 (r'std::stof\(', 'std::strtof(), which does not throw'), 488 (r'std::stod\(', 'std::strtod(), which does not throw'), 489 (r'std::stold\(', 'std::strtold(), which does not throw'), 490 # go/cstyle#Disallowed_Stdlib 491 (r'std::barrier', ''), 492 (r'std::condition_variable', ''), 493 (r'std::counting_semaphore', ''), 494 (r'std::future', ''), 495 (r'std::jthread', ''), 496 (r'std::latch', ''), 497 (r'std::mutex', 'SkMutex'), 498 (r'std::shared_mutex', 'SkSharedMutex'), 499 (r'std::stop_token', ''), 500 (r'std::thread', '', ['^tests/']), 501 502 # We used to have separate symbols for this, but coalesced them to make the 503 # Bazel build easier. 504 (r'GR_TEST_UTILS', 'GPU_TEST_UTILS'), 505 (r'GRAPHITE_TEST_UTILS', 'GPU_TEST_UTILS'), 506 507 # This form of multi line string can unintentionally cause Skia to ship with 508 # extraneous spaces and newlines in its SkSL (or generated) code, which slightly 509 # increases code size and parse time. Instead, use normal quotes and C++'s 510 # auto-concatenation 511 # "this string" 512 # "and this" 513 # "string will be joined without extra spaces" 514 (r'R"\(', 'implied string concatenation', 515 ['^bench/', 516 '^docs/', 517 '^gm/', 518 '^modules/skottie/tests/', 519 '^src/sksl/lex/Main.cpp', 520 '^tests/', 521 '^tools/'] 522 ), 523 524 # Our Bazel rules have special copies of our cc_library rules with GPU_TEST_UTILS 525 # set. If GPU_TEST_UTILS is used outside of those files in Skia proper, the build 526 # will break/crash in mysterious ways (because files may get compiled in multiple 527 # conflicting ways as a result of the define being inconsistently set). 528 (r'GPU_TEST_UTILS', 'use only in GPU code and tests', 529 ['^include/core/SkTypes.h', 530 '^include/gpu/', 531 '^include/private/gpu/', 532 '^src/gpu/ganesh', 533 '^src/gpu/graphite', 534 '^tests/', 535 '^tools/'] 536 ), 537 ] 538 539 # These defines are either there or not, and using them with just an #if is a 540 # subtle, frustrating bug. 541 existence_defines = ['SK_GANESH', 'SK_GRAPHITE', 'SK_GL', 'SK_VULKAN', 'SK_DAWN', 'SK_METAL', 542 'SK_DIRECT3D', 'SK_DEBUG', 'GPU_TEST_UTILS'] 543 for d in existence_defines: 544 banned_replacements.append(('#if {}'.format(d), 545 '#if defined({})'.format(d))) 546 compiled_replacements = [] 547 for rep in banned_replacements: 548 exceptions = [] 549 if len(rep) == 3: 550 (re, replacement, exceptions) = rep 551 else: 552 (re, replacement) = rep 553 554 compiled_re = input_api.re.compile(re) 555 compiled_exceptions = [input_api.re.compile(exc) for exc in exceptions] 556 compiled_replacements.append( 557 (compiled_re, replacement, compiled_exceptions)) 558 559 errors = [] 560 file_filter = lambda x: (x.LocalPath().endswith('.h') or 561 x.LocalPath().endswith('.cpp') or 562 x.LocalPath().endswith('.cc') or 563 x.LocalPath().endswith('.m') or 564 x.LocalPath().endswith('.mm')) 565 for affected_file in input_api.AffectedSourceFiles(file_filter): 566 affected_filepath = affected_file.LocalPath() 567 for (line_num, line) in affected_file.ChangedContents(): 568 for (re, replacement, exceptions) in compiled_replacements: 569 match = re.search(line) 570 if match: 571 for exc in exceptions: 572 if exc.search(affected_filepath): 573 break 574 else: 575 errors.append('%s:%s: Instead of %s, please use %s.' % ( 576 affected_filepath, line_num, match.group(), replacement)) 577 578 if errors: 579 return [output_api.PresubmitError('\n'.join(errors))] 580 581 return [] 582 583 584def _CheckDEPS(input_api, output_api): 585 """If DEPS was modified, run the deps_parser to update bazel/deps.bzl""" 586 files = [] 587 for affected_file in input_api.AffectedFiles(include_deletes=False): 588 affected_file_path = affected_file.LocalPath() 589 if affected_file_path.endswith('DEPS') or affected_file_path.endswith('deps.bzl'): 590 files.append(affected_file) 591 if not files: 592 return [] 593 try: 594 subprocess.check_output( 595 ['bazelisk', '--version'], 596 stderr=subprocess.STDOUT) 597 except: 598 return [output_api.PresubmitNotifyResult( 599 'Skipping DEPS check because bazelisk is not on PATH. \n' + 600 'You can download it from https://github.com/bazelbuild/bazelisk/releases/tag/v1.14.0')] 601 602 return _RunCommandAndCheckDiff( 603 output_api, ['bazelisk', 'run', '//bazel/deps_parser'], files 604 ) 605 606 607def _CommonChecks(input_api, output_api): 608 """Presubmit checks common to upload and commit.""" 609 results = [] 610 sources = lambda x: (x.LocalPath().endswith('.h') or 611 x.LocalPath().endswith('.py') or 612 x.LocalPath().endswith('.sh') or 613 x.LocalPath().endswith('.m') or 614 x.LocalPath().endswith('.mm') or 615 x.LocalPath().endswith('.go') or 616 x.LocalPath().endswith('.c') or 617 x.LocalPath().endswith('.cc') or 618 x.LocalPath().endswith('.cpp')) 619 results.extend(_CheckChangeHasEol( 620 input_api, output_api, source_file_filter=sources)) 621 with _WarningsAsErrors(output_api): 622 results.extend(input_api.canned_checks.CheckChangeHasNoCR( 623 input_api, output_api, source_file_filter=sources)) 624 results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace( 625 input_api, output_api, source_file_filter=sources)) 626 results.extend(_JsonChecks(input_api, output_api)) 627 results.extend(_IfDefChecks(input_api, output_api)) 628 results.extend(_CopyrightChecks(input_api, output_api, 629 source_file_filter=sources)) 630 results.extend(_CheckIncludesFormatted(input_api, output_api)) 631 results.extend(_CheckGNFormatted(input_api, output_api)) 632 results.extend(_CheckGitConflictMarkers(input_api, output_api)) 633 results.extend(_RegenerateAllExamplesCPP(input_api, output_api)) 634 results.extend(_CheckExamplesForPrivateAPIs(input_api, output_api)) 635 results.extend(_CheckIncludeForOutsideDeps(input_api, output_api)) 636 results.extend(_CheckBazelBUILDFiles(input_api, output_api)) 637 results.extend(_CheckBannedAPIs(input_api, output_api)) 638 return results 639 640 641def CheckChangeOnUpload(input_api, output_api): 642 """Presubmit checks for the change on upload.""" 643 results = [] 644 results.extend(_CommonChecks(input_api, output_api)) 645 # Run on upload, not commit, since the presubmit bot apparently doesn't have 646 # coverage or Go installed. 647 results.extend(_InfraTests(input_api, output_api)) 648 results.extend(_CheckTopReleaseNotesChanged(input_api, output_api)) 649 results.extend(_CheckReleaseNotesForPublicAPI(input_api, output_api)) 650 # Buildifier might not be on the CI machines. 651 results.extend(_CheckBuildifier(input_api, output_api)) 652 # We don't want this to block the CQ (for now). 653 results.extend(_CheckDEPS(input_api, output_api)) 654 # Bazelisk is not yet included in the Presubmit job. 655 results.extend(_CheckGeneratedBazelBUILDFiles(input_api, output_api)) 656 results.extend(_CheckGNIGenerated(input_api, output_api)) 657 return results 658 659 660class CodeReview(object): 661 """Abstracts which codereview tool is used for the specified issue.""" 662 663 def __init__(self, input_api): 664 self._issue = input_api.change.issue 665 self._gerrit = input_api.gerrit 666 667 def GetOwnerEmail(self): 668 return self._gerrit.GetChangeOwner(self._issue) 669 670 def GetSubject(self): 671 return self._gerrit.GetChangeInfo(self._issue)['subject'] 672 673 def GetDescription(self): 674 return self._gerrit.GetChangeDescription(self._issue) 675 676 def GetReviewers(self): 677 code_review_label = ( 678 self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review']) 679 return [r['email'] for r in code_review_label.get('all', [])] 680 681 def GetApprovers(self): 682 approvers = [] 683 code_review_label = ( 684 self._gerrit.GetChangeInfo(self._issue)['labels']['Code-Review']) 685 for m in code_review_label.get('all', []): 686 if m.get("value") == 1: 687 approvers.append(m["email"]) 688 return approvers 689 690 691def _CheckReleaseNotesForPublicAPI(input_api, output_api): 692 """Checks to see if a release notes file is added or edited with public API changes.""" 693 results = [] 694 public_api_changed = False 695 release_file_changed = False 696 for affected_file in input_api.AffectedFiles(): 697 affected_file_path = affected_file.LocalPath() 698 file_path, file_ext = os.path.splitext(affected_file_path) 699 # We only care about files that end in .h and are under the top-level 700 # include dir, but not include/private. 701 if (file_ext == '.h' and 702 file_path.split(os.path.sep)[0] == 'include' and 703 'private' not in file_path): 704 public_api_changed = True 705 elif os.path.dirname(file_path) == RELEASE_NOTES_DIR: 706 release_file_changed = True 707 708 if public_api_changed and not release_file_changed: 709 results.append(output_api.PresubmitPromptWarning( 710 'If this change affects a client API, please add a new summary ' 711 'file in the %s directory. More information can be found in ' 712 '%s.' % (RELEASE_NOTES_DIR, RELEASE_NOTES_README))) 713 return results 714 715 716def _CheckTopReleaseNotesChanged(input_api, output_api): 717 """Warns if the top level release notes file was changed. 718 719 The top level file is now auto-edited, and new release notes should 720 be added to the RELEASE_NOTES_DIR directory""" 721 results = [] 722 top_relnotes_changed = False 723 release_file_changed = False 724 for affected_file in input_api.AffectedFiles(): 725 affected_file_path = affected_file.LocalPath() 726 file_path, file_ext = os.path.splitext(affected_file_path) 727 if affected_file_path == RELEASE_NOTES_FILE_NAME: 728 top_relnotes_changed = True 729 elif os.path.dirname(file_path) == RELEASE_NOTES_DIR: 730 release_file_changed = True 731 # When relnotes_util is run it will modify RELEASE_NOTES_FILE_NAME 732 # and delete the individual note files in RELEASE_NOTES_DIR. 733 # So, if both paths are modified do not emit a warning. 734 if top_relnotes_changed and not release_file_changed: 735 results.append(output_api.PresubmitPromptWarning( 736 'Do not edit %s directly. %s is automatically edited during the ' 737 'release process. Release notes should be added as new files in ' 738 'the %s directory. More information can be found in %s.' % (RELEASE_NOTES_FILE_NAME, 739 RELEASE_NOTES_FILE_NAME, 740 RELEASE_NOTES_DIR, 741 RELEASE_NOTES_README))) 742 return results 743 744 745def PostUploadHook(gerrit, change, output_api): 746 """git cl upload will call this hook after the issue is created/modified. 747 748 This hook does the following: 749 * Adds a link to preview docs changes if there are any docs changes in the CL. 750 * Adds 'No-Try: true' if the CL contains only docs changes. 751 """ 752 if not change.issue: 753 return [] 754 755 # Skip PostUploadHooks for all auto-commit service account bots. New 756 # patchsets (caused due to PostUploadHooks) invalidates the CQ+2 vote from 757 # the "--use-commit-queue" flag to "git cl upload". 758 for suffix in SERVICE_ACCOUNT_SUFFIX: 759 if change.author_email.endswith(suffix): 760 return [] 761 762 results = [] 763 at_least_one_docs_change = False 764 all_docs_changes = True 765 for affected_file in change.AffectedFiles(): 766 affected_file_path = affected_file.LocalPath() 767 file_path, _ = os.path.splitext(affected_file_path) 768 if 'site' == file_path.split(os.path.sep)[0]: 769 at_least_one_docs_change = True 770 else: 771 all_docs_changes = False 772 if at_least_one_docs_change and not all_docs_changes: 773 break 774 775 footers = change.GitFootersFromDescription() 776 description_changed = False 777 778 # If the change includes only doc changes then add No-Try: true in the 779 # CL's description if it does not exist yet. 780 if all_docs_changes and 'true' not in footers.get('No-Try', []): 781 description_changed = True 782 change.AddDescriptionFooter('No-Try', 'true') 783 results.append( 784 output_api.PresubmitNotifyResult( 785 'This change has only doc changes. Automatically added ' 786 '\'No-Try: true\' to the CL\'s description')) 787 788 # If the description has changed update it. 789 if description_changed: 790 gerrit.UpdateDescription( 791 change.FullDescriptionText(), change.issue) 792 793 return results 794 795 796def CheckChangeOnCommit(input_api, output_api): 797 """Presubmit checks for the change on commit.""" 798 results = [] 799 results.extend(_CommonChecks(input_api, output_api)) 800 # Checks for the presence of 'DO NOT''SUBMIT' in CL description and in 801 # content of files. 802 results.extend( 803 input_api.canned_checks.CheckDoNotSubmit(input_api, output_api)) 804 return results 805