• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env vpython3
2
3# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
4#
5# Use of this source code is governed by a BSD-style license
6# that can be found in the LICENSE file in the root of the source
7# tree. An additional intellectual property rights grant can be found
8# in the file PATENTS.  All contributing project authors may
9# be found in the AUTHORS file in the root of the source tree.
10
11import json
12import os
13import re
14import sys
15from collections import defaultdict
16from contextlib import contextmanager
17
18# Runs PRESUBMIT.py in py3 mode by git cl presubmit.
19USE_PYTHON3 = True
20
21# Files and directories that are *skipped* by cpplint in the presubmit script.
22CPPLINT_EXCEPTIONS = [
23    'api/video_codecs/video_decoder.h',
24    'common_types.cc',
25    'common_types.h',
26    'examples/objc',
27    'media/base/stream_params.h',
28    'media/base/video_common.h',
29    'modules/audio_coding',
30    'modules/audio_device',
31    'modules/audio_processing',
32    'modules/desktop_capture',
33    'modules/include/module_common_types.h',
34    'modules/utility',
35    'modules/video_capture',
36    'p2p/base/pseudo_tcp.cc',
37    'p2p/base/pseudo_tcp.h',
38    'PRESUBMIT.py',
39    'presubmit_test_mocks.py',
40    'presubmit_test.py',
41    'rtc_base',
42    'sdk/android/src/jni',
43    'sdk/objc',
44    'system_wrappers',
45    'test',
46    'tools_webrtc',
47    'voice_engine',
48]
49
50# These filters will always be removed, even if the caller specifies a filter
51# set, as they are problematic or broken in some way.
52#
53# Justifications for each filter:
54# - build/c++11         : Rvalue ref checks are unreliable (false positives),
55#                         include file and feature blocklists are
56#                         google3-specific.
57# - runtime/references  : Mutable references are not banned by the Google
58#                         C++ style guide anymore (starting from May 2020).
59# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
60#                         all move-related errors).
61DISABLED_LINT_FILTERS = [
62    '-build/c++11',
63    '-runtime/references',
64    '-whitespace/operators',
65]
66
67# List of directories of "supported" native APIs. That means changes to headers
68# will be done in a compatible way following this scheme:
69# 1. Non-breaking changes are made.
70# 2. The old APIs as marked as deprecated (with comments).
71# 3. Deprecation is announced to discuss-webrtc@googlegroups.com and
72#    webrtc-users@google.com (internal list).
73# 4. (later) The deprecated APIs are removed.
74NATIVE_API_DIRS = (
75    'api',  # All subdirectories of api/ are included as well.
76    'media/base',
77    'media/engine',
78    'modules/audio_device/include',
79    'pc',
80)
81
82# These directories should not be used but are maintained only to avoid breaking
83# some legacy downstream code.
84LEGACY_API_DIRS = (
85    'common_audio/include',
86    'modules/audio_coding/include',
87    'modules/audio_processing/include',
88    'modules/congestion_controller/include',
89    'modules/include',
90    'modules/remote_bitrate_estimator/include',
91    'modules/rtp_rtcp/include',
92    'modules/rtp_rtcp/source',
93    'modules/utility/include',
94    'modules/video_coding/codecs/h264/include',
95    'modules/video_coding/codecs/vp8/include',
96    'modules/video_coding/codecs/vp9/include',
97    'modules/video_coding/include',
98    'rtc_base',
99    'system_wrappers/include',
100)
101
102# NOTE: The set of directories in API_DIRS should be the same as those
103# listed in the table in native-api.md.
104API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
105
106# TARGET_RE matches a GN target, and extracts the target name and the contents.
107TARGET_RE = re.compile(
108    r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
109    r'(?P<target_contents>.*?)'
110    r'(?P=indent)}', re.MULTILINE | re.DOTALL)
111
112# SOURCES_RE matches a block of sources inside a GN target.
113SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
114                        re.MULTILINE | re.DOTALL)
115
116# DEPS_RE matches a block of sources inside a GN target.
117DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
118                     re.MULTILINE | re.DOTALL)
119
120# FILE_PATH_RE matches a file path.
121FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
122
123
124def FindSrcDirPath(starting_dir):
125  """Returns the abs path to the src/ dir of the project."""
126  src_dir = starting_dir
127  while os.path.basename(src_dir) != 'src':
128    src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
129  return src_dir
130
131
132@contextmanager
133def _AddToPath(*paths):
134  original_sys_path = sys.path
135  sys.path.extend(paths)
136  try:
137    yield
138  finally:
139    # Restore sys.path to what it was before.
140    sys.path = original_sys_path
141
142
143def VerifyNativeApiHeadersListIsValid(input_api, output_api):
144  """Ensures the list of native API header directories is up to date."""
145  non_existing_paths = []
146  native_api_full_paths = [
147      input_api.os_path.join(input_api.PresubmitLocalPath(), *path.split('/'))
148      for path in API_DIRS
149  ]
150  for path in native_api_full_paths:
151    if not os.path.isdir(path):
152      non_existing_paths.append(path)
153  if non_existing_paths:
154    return [
155        output_api.PresubmitError(
156            'Directories to native API headers have changed which has made '
157            'the list in PRESUBMIT.py outdated.\nPlease update it to the '
158            'current location of our native APIs.', non_existing_paths)
159    ]
160  return []
161
162
163API_CHANGE_MSG = """
164You seem to be changing native API header files. Please make sure that you:
165  1. Make compatible changes that don't break existing clients. Usually
166     this is done by keeping the existing method signatures unchanged.
167  2. Mark the old stuff as deprecated (use the ABSL_DEPRECATED macro).
168  3. Create a timeline and plan for when the deprecated stuff will be
169     removed. (The amount of time we give users to change their code
170     should be informed by how much work it is for them. If they just
171     need to replace one name with another or something equally
172     simple, 1-2 weeks might be good; if they need to do serious work,
173     up to 3 months may be called for.)
174  4. Update/inform existing downstream code owners to stop using the
175     deprecated stuff. (Send announcements to
176     discuss-webrtc@googlegroups.com and webrtc-users@google.com.)
177  5. Remove the deprecated stuff, once the agreed-upon amount of time
178     has passed.
179Related files:
180"""
181
182
183def CheckNativeApiHeaderChanges(input_api, output_api):
184  """Checks to remind proper changing of native APIs."""
185  files = []
186  source_file_filter = lambda x: input_api.FilterSourceFile(
187      x, files_to_check=[r'.+\.(gn|gni|h)$'])
188  for f in input_api.AffectedSourceFiles(source_file_filter):
189    for path in API_DIRS:
190      dn = os.path.dirname(f.LocalPath())
191      if path == 'api':
192        # Special case: Subdirectories included.
193        if dn == 'api' or dn.startswith('api/'):
194          files.append(f.LocalPath())
195      else:
196        # Normal case: Subdirectories not included.
197        if dn == path:
198          files.append(f.LocalPath())
199
200  if files:
201    return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)]
202  return []
203
204
205def CheckNoIOStreamInHeaders(input_api, output_api, source_file_filter):
206  """Checks to make sure no .h files include <iostream>."""
207  files = []
208  pattern = input_api.re.compile(r'^#include\s*<iostream>',
209                                 input_api.re.MULTILINE)
210  file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
211      x))
212  for f in input_api.AffectedSourceFiles(file_filter):
213    if not f.LocalPath().endswith('.h'):
214      continue
215    contents = input_api.ReadFile(f)
216    if pattern.search(contents):
217      files.append(f)
218
219  if len(files) > 0:
220    return [
221        output_api.PresubmitError(
222            'Do not #include <iostream> in header files, since it inserts '
223            'static initialization into every file including the header. '
224            'Instead, #include <ostream>. See http://crbug.com/94794', files)
225    ]
226  return []
227
228
229def CheckNoPragmaOnce(input_api, output_api, source_file_filter):
230  """Make sure that banned functions are not used."""
231  files = []
232  pattern = input_api.re.compile(r'^#pragma\s+once', input_api.re.MULTILINE)
233  file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
234      x))
235  for f in input_api.AffectedSourceFiles(file_filter):
236    if not f.LocalPath().endswith('.h'):
237      continue
238    contents = input_api.ReadFile(f)
239    if pattern.search(contents):
240      files.append(f)
241
242  if files:
243    return [
244        output_api.PresubmitError(
245            'Do not use #pragma once in header files.\n'
246            'See http://www.chromium.org/developers/coding-style'
247            '#TOC-File-headers', files)
248    ]
249  return []
250
251
252def CheckNoFRIEND_TEST(# pylint: disable=invalid-name
253        input_api,
254        output_api,
255        source_file_filter):
256  """Make sure that gtest's FRIEND_TEST() macro is not used, the
257  FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be
258  used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes."""
259  problems = []
260
261  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and
262                           source_file_filter(f))
263  for f in input_api.AffectedFiles(file_filter=file_filter):
264    for line_num, line in f.ChangedContents():
265      if 'FRIEND_TEST(' in line:
266        problems.append('    %s:%d' % (f.LocalPath(), line_num))
267
268  if not problems:
269    return []
270  return [
271      output_api.PresubmitPromptWarning(
272          'WebRTC\'s code should not use gtest\'s FRIEND_TEST() macro. '
273          'Include testsupport/gtest_prod_util.h and use '
274          'FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))
275  ]
276
277
278def IsLintDisabled(disabled_paths, file_path):
279  """ Checks if a file is disabled for lint check."""
280  for path in disabled_paths:
281    if file_path == path or os.path.dirname(file_path).startswith(path):
282      return True
283  return False
284
285
286def CheckApprovedFilesLintClean(input_api, output_api,
287                                source_file_filter=None):
288  """Checks that all new or non-exempt .cc and .h files pass cpplint.py.
289  This check is based on CheckChangeLintsClean in
290  depot_tools/presubmit_canned_checks.py but has less filters and only checks
291  added files."""
292  result = []
293
294  # Initialize cpplint.
295  import cpplint
296  # Access to a protected member _XX of a client class
297  # pylint: disable=W0212
298  cpplint._cpplint_state.ResetErrorCounts()
299
300  lint_filters = cpplint._Filters()
301  lint_filters.extend(DISABLED_LINT_FILTERS)
302  cpplint._SetFilters(','.join(lint_filters))
303
304  # Create a platform independent exempt list for cpplint.
305  disabled_paths = [
306      input_api.os_path.join(*path.split('/')) for path in CPPLINT_EXCEPTIONS
307  ]
308
309  # Use the strictest verbosity level for cpplint.py (level 1) which is the
310  # default when running cpplint.py from command line. To make it possible to
311  # work with not-yet-converted code, we're only applying it to new (or
312  # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS.
313  verbosity_level = 1
314  files = []
315  for f in input_api.AffectedSourceFiles(source_file_filter):
316    # Note that moved/renamed files also count as added.
317    if f.Action() == 'A' or not IsLintDisabled(disabled_paths, f.LocalPath()):
318      files.append(f.AbsoluteLocalPath())
319
320  for file_name in files:
321    cpplint.ProcessFile(file_name, verbosity_level)
322
323  if cpplint._cpplint_state.error_count > 0:
324    if input_api.is_committing:
325      res_type = output_api.PresubmitError
326    else:
327      res_type = output_api.PresubmitPromptWarning
328    result = [res_type('Changelist failed cpplint.py check.')]
329
330  return result
331
332
333def CheckNoSourcesAbove(input_api, gn_files, output_api):
334  # Disallow referencing source files with paths above the GN file location.
335  source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]',
336                                        re.MULTILINE | re.DOTALL)
337  file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"')
338  violating_gn_files = set()
339  violating_source_entries = []
340  for gn_file in gn_files:
341    contents = input_api.ReadFile(gn_file)
342    for source_block_match in source_pattern.finditer(contents):
343      # Find all source list entries starting with ../ in the source block
344      # (exclude overrides entries).
345      for file_list_match in file_pattern.finditer(source_block_match.group(1)):
346        source_file = file_list_match.group(1)
347        if 'overrides/' not in source_file:
348          violating_source_entries.append(source_file)
349          violating_gn_files.add(gn_file)
350  if violating_gn_files:
351    return [
352        output_api.PresubmitError(
353            'Referencing source files above the directory of the GN file '
354            'is not allowed. Please introduce new GN targets in the proper '
355            'location instead.\n'
356            'Invalid source entries:\n'
357            '%s\n'
358            'Violating GN files:' % '\n'.join(violating_source_entries),
359            items=violating_gn_files)
360    ]
361  return []
362
363
364def CheckAbseilDependencies(input_api, gn_files, output_api):
365  """Checks that Abseil dependencies are declared in `absl_deps`."""
366  absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
367  target_types_to_check = [
368      'rtc_library',
369      'rtc_source_set',
370      'rtc_static_library',
371      'webrtc_fuzzer_test',
372  ]
373  error_msg = ('Abseil dependencies in target "%s" (file: %s) '
374               'should be moved to the "absl_deps" parameter.')
375  errors = []
376
377  # pylint: disable=too-many-nested-blocks
378  for gn_file in gn_files:
379    gn_file_content = input_api.ReadFile(gn_file)
380    for target_match in TARGET_RE.finditer(gn_file_content):
381      target_type = target_match.group('target_type')
382      target_name = target_match.group('target_name')
383      target_contents = target_match.group('target_contents')
384      if target_type in target_types_to_check:
385        for deps_match in DEPS_RE.finditer(target_contents):
386          deps = deps_match.group('deps').splitlines()
387          for dep in deps:
388            if re.search(absl_re, dep):
389              errors.append(
390                  output_api.PresubmitError(error_msg %
391                                            (target_name, gn_file.LocalPath())))
392              break  # no need to warn more than once per target
393  return errors
394
395
396def CheckNoMixingSources(input_api, gn_files, output_api):
397  """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
398
399  See bugs.webrtc.org/7743 for more context.
400  """
401
402  def _MoreThanOneSourceUsed(*sources_lists):
403    sources_used = 0
404    for source_list in sources_lists:
405      if len(source_list) > 0:
406        sources_used += 1
407    return sources_used > 1
408
409  errors = defaultdict(lambda: [])
410  for gn_file in gn_files:
411    gn_file_content = input_api.ReadFile(gn_file)
412    for target_match in TARGET_RE.finditer(gn_file_content):
413      # list_of_sources is a list of tuples of the form
414      # (c_files, cc_files, objc_files) that keeps track of all the
415      # sources defined in a target. A GN target can have more that
416      # on definition of sources (since it supports if/else statements).
417      # E.g.:
418      # rtc_static_library("foo") {
419      #   if (is_win) {
420      #     sources = [ "foo.cc" ]
421      #   } else {
422      #     sources = [ "foo.mm" ]
423      #   }
424      # }
425      # This is allowed and the presubmit check should support this case.
426      list_of_sources = []
427      c_files = []
428      cc_files = []
429      objc_files = []
430      target_name = target_match.group('target_name')
431      target_contents = target_match.group('target_contents')
432      for sources_match in SOURCES_RE.finditer(target_contents):
433        if '+=' not in sources_match.group(0):
434          if c_files or cc_files or objc_files:
435            list_of_sources.append((c_files, cc_files, objc_files))
436          c_files = []
437          cc_files = []
438          objc_files = []
439        for file_match in FILE_PATH_RE.finditer(sources_match.group(1)):
440          file_path = file_match.group('file_path')
441          extension = file_match.group('extension')
442          if extension == '.c':
443            c_files.append(file_path + extension)
444          if extension == '.cc':
445            cc_files.append(file_path + extension)
446          if extension in ['.m', '.mm']:
447            objc_files.append(file_path + extension)
448      list_of_sources.append((c_files, cc_files, objc_files))
449      for c_files_list, cc_files_list, objc_files_list in list_of_sources:
450        if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list):
451          all_sources = sorted(c_files_list + cc_files_list + objc_files_list)
452          errors[gn_file.LocalPath()].append((target_name, all_sources))
453  if errors:
454    return [
455        output_api.PresubmitError(
456            'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n'
457            'Please create a separate target for each collection of '
458            'sources.\n'
459            'Mixed sources: \n'
460            '%s\n'
461            'Violating GN files:\n%s\n' %
462            (json.dumps(errors, indent=2), '\n'.join(list(errors.keys()))))
463    ]
464  return []
465
466
467def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api):
468  cwd = input_api.PresubmitLocalPath()
469  with _AddToPath(
470      input_api.os_path.join(cwd, 'tools_webrtc', 'presubmit_checks_lib')):
471    from check_package_boundaries import CheckPackageBoundaries
472  build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files]
473  errors = CheckPackageBoundaries(cwd, build_files)[:5]
474  if errors:
475    return [
476        output_api.PresubmitError(
477            'There are package boundary violations in the following GN '
478            'files:',
479            long_text='\n\n'.join(str(err) for err in errors))
480    ]
481  return []
482
483
484def _ReportFileAndLine(filename, line_num):
485  """Default error formatter for _FindNewViolationsOfRule."""
486  return '%s (line %s)' % (filename, line_num)
487
488
489def CheckNoWarningSuppressionFlagsAreAdded(gn_files,
490                                           input_api,
491                                           output_api,
492                                           error_formatter=_ReportFileAndLine):
493  """Ensure warning suppression flags are not added without a reason."""
494  msg = ('Usage of //build/config/clang:extra_warnings is discouraged '
495         'in WebRTC.\n'
496         'If you are not adding this code (e.g. you are just moving '
497         'existing code) or you want to add an exception,\n'
498         'you can add a comment on the line that causes the problem:\n\n'
499         '"-Wno-odr"  # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n'
500         '\n'
501         'Affected files:\n')
502  errors = []  # 2-element tuples with (file, line number)
503  clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings')
504  # pylint: disable-next=fixme
505  no_presubmit_re = input_api.re.compile(
506      r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
507  for f in gn_files:
508    for line_num, line in f.ChangedContents():
509      if clang_warn_re.search(line) and not no_presubmit_re.search(line):
510        errors.append(error_formatter(f.LocalPath(), line_num))
511  if errors:
512    return [output_api.PresubmitError(msg, errors)]
513  return []
514
515
516def CheckNoTestCaseUsageIsAdded(input_api,
517                                output_api,
518                                source_file_filter,
519                                error_formatter=_ReportFileAndLine):
520  error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the '
521               'new API: https://github.com/google/googletest/blob/master/'
522               'googletest/docs/primer.md#beware-of-the-nomenclature.\n'
523               'Affected files:\n')
524  errors = []  # 2-element tuples with (file, line number)
525  test_case_re = input_api.re.compile(r'TEST_CASE')
526  file_filter = lambda f: (source_file_filter(f) and f.LocalPath().endswith(
527      '.cc'))
528  for f in input_api.AffectedSourceFiles(file_filter):
529    for line_num, line in f.ChangedContents():
530      if test_case_re.search(line):
531        errors.append(error_formatter(f.LocalPath(), line_num))
532  if errors:
533    return [output_api.PresubmitError(error_msg, errors)]
534  return []
535
536
537def CheckNoStreamUsageIsAdded(input_api,
538                              output_api,
539                              source_file_filter,
540                              error_formatter=_ReportFileAndLine):
541  """Make sure that no more dependencies on stringstream are added."""
542  error_msg = ('Usage of <sstream>, <istream> and <ostream> in WebRTC is '
543               'deprecated.\n'
544               'This includes the following types:\n'
545               'std::istringstream, std::ostringstream, std::wistringstream, '
546               'std::wostringstream,\n'
547               'std::wstringstream, std::ostream, std::wostream, std::istream,'
548               'std::wistream,\n'
549               'std::iostream, std::wiostream.\n'
550               'If you are not adding this code (e.g. you are just moving '
551               'existing code),\n'
552               'you can add a comment on the line that causes the problem:\n\n'
553               '#include <sstream>  // no-presubmit-check TODO(webrtc:8982)\n'
554               'std::ostream& F() {  // no-presubmit-check TODO(webrtc:8982)\n'
555               '\n'
556               'If you are adding new code, consider using '
557               'rtc::SimpleStringBuilder\n'
558               '(in rtc_base/strings/string_builder.h).\n'
559               'Affected files:\n')
560  errors = []  # 2-element tuples with (file, line number)
561  include_re = input_api.re.compile(r'#include <(i|o|s)stream>')
562  usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
563  no_presubmit_re = input_api.re.compile(
564      r'// no-presubmit-check TODO\(webrtc:8982\)')
565  file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
566      x))
567
568  def _IsException(file_path):
569    is_test = any(
570        file_path.endswith(x)
571        for x in ['_test.cc', '_tests.cc', '_unittest.cc', '_unittests.cc'])
572    return (file_path.startswith('examples') or file_path.startswith('test')
573            or is_test)
574
575  for f in input_api.AffectedSourceFiles(file_filter):
576    # Usage of stringstream is allowed under examples/ and in tests.
577    if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()):
578      continue
579    for line_num, line in f.ChangedContents():
580      if ((include_re.search(line) or usage_re.search(line))
581          and not no_presubmit_re.search(line)):
582        errors.append(error_formatter(f.LocalPath(), line_num))
583  if errors:
584    return [output_api.PresubmitError(error_msg, errors)]
585  return []
586
587
588def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api):
589  """Checks that public_deps is not used without a good reason."""
590  result = []
591  no_presubmit_check_re = input_api.re.compile(
592      r'# no-presubmit-check TODO\(webrtc:\d+\)')
593  error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files '
594               'because it doesn\'t map well to downstream build systems.\n'
595               'Used in: %s (line %d).\n'
596               'If you are not adding this code (e.g. you are just moving '
597               'existing code) or you have a good reason, you can add this '
598               'comment (verbatim) on the line that causes the problem:\n\n'
599               'public_deps = [  # no-presubmit-check TODO(webrtc:8603)\n')
600  for affected_file in gn_files:
601    for (line_number, affected_line) in affected_file.ChangedContents():
602      if 'public_deps' in affected_line:
603        surpressed = no_presubmit_check_re.search(affected_line)
604        if not surpressed:
605          result.append(
606              output_api.PresubmitError(
607                  error_msg % (affected_file.LocalPath(), line_number)))
608  return result
609
610
611def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
612  result = []
613  error_msg = ('check_includes overrides are not allowed since it can cause '
614               'incorrect dependencies to form. It effectively means that your '
615               'module can include any .h file without depending on its '
616               'corresponding target. There are some exceptional cases when '
617               'this is allowed: if so, get approval from a .gn owner in the '
618               'root OWNERS file.\n'
619               'Used in: %s (line %d).')
620  # pylint: disable-next=fixme
621  no_presubmit_re = input_api.re.compile(
622      r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
623  for affected_file in gn_files:
624    for (line_number, affected_line) in affected_file.ChangedContents():
625      if ('check_includes' in affected_line
626          and not no_presubmit_re.search(affected_line)):
627        result.append(
628            output_api.PresubmitError(error_msg %
629                                      (affected_file.LocalPath(), line_number)))
630  return result
631
632
633def CheckGnChanges(input_api, output_api):
634  file_filter = lambda x: (input_api.FilterSourceFile(
635      x,
636      files_to_check=(r'.+\.(gn|gni)$', ),
637      files_to_skip=(r'.*/presubmit_checks_lib/testdata/.*', )))
638
639  gn_files = []
640  for f in input_api.AffectedSourceFiles(file_filter):
641    gn_files.append(f)
642
643  result = []
644  if gn_files:
645    result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
646    result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
647    result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
648    result.extend(
649        CheckNoPackageBoundaryViolations(input_api, gn_files, output_api))
650    result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
651    result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api))
652    result.extend(
653        CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api))
654  return result
655
656
657def CheckGnGen(input_api, output_api):
658  """Runs `gn gen --check` with default args to detect mismatches between
659  #includes and dependencies in the BUILD.gn files, as well as general build
660  errors.
661  """
662  with _AddToPath(
663      input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
664                             'presubmit_checks_lib')):
665    from build_helpers import RunGnCheck
666  errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5]
667  if errors:
668    return [
669        output_api.PresubmitPromptWarning(
670            'Some #includes do not match the build dependency graph. '
671            'Please run:\n'
672            '  gn gen --check <out_dir>',
673            long_text='\n\n'.join(errors))
674    ]
675  return []
676
677
678def CheckUnwantedDependencies(input_api, output_api, source_file_filter):
679  """Runs checkdeps on #include statements added in this
680  change. Breaking - rules is an error, breaking ! rules is a
681  warning.
682  """
683  # Copied from Chromium's src/PRESUBMIT.py.
684
685  # We need to wait until we have an input_api object and use this
686  # roundabout construct to import checkdeps because this file is
687  # eval-ed and thus doesn't have __file__.
688  src_path = FindSrcDirPath(input_api.PresubmitLocalPath())
689  checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps')
690  if not os.path.exists(checkdeps_path):
691    return [
692        output_api.PresubmitError(
693            'Cannot find checkdeps at %s\nHave you run "gclient sync" to '
694            'download all the DEPS entries?' % checkdeps_path)
695    ]
696  with _AddToPath(checkdeps_path):
697    import checkdeps
698    from cpp_checker import CppChecker
699    from rules import Rule
700
701  added_includes = []
702  for f in input_api.AffectedFiles(file_filter=source_file_filter):
703    if not CppChecker.IsCppFile(f.LocalPath()):
704      continue
705
706    changed_lines = [line for _, line in f.ChangedContents()]
707    added_includes.append([f.LocalPath(), changed_lines])
708
709  deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
710
711  error_descriptions = []
712  warning_descriptions = []
713  for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
714      added_includes):
715    description_with_path = '%s\n    %s' % (path, rule_description)
716    if rule_type == Rule.DISALLOW:
717      error_descriptions.append(description_with_path)
718    else:
719      warning_descriptions.append(description_with_path)
720
721  results = []
722  if error_descriptions:
723    results.append(
724        output_api.PresubmitError(
725            'You added one or more #includes that violate checkdeps rules.'
726            '\nCheck that the DEPS files in these locations contain valid '
727            'rules.\nSee '
728            'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ '
729            'for more details about checkdeps.', error_descriptions))
730  if warning_descriptions:
731    results.append(
732        output_api.PresubmitPromptOrNotify(
733            'You added one or more #includes of files that are temporarily'
734            '\nallowed but being removed. Can you avoid introducing the\n'
735            '#include? See relevant DEPS file(s) for details and contacts.'
736            '\nSee '
737            'https://cs.chromium.org/chromium/src/buildtools/checkdeps/ '
738            'for more details about checkdeps.', warning_descriptions))
739  return results
740
741
742def CheckCommitMessageBugEntry(input_api, output_api):
743  """Check that bug entries are well-formed in commit message."""
744  bogus_bug_msg = (
745      'Bogus Bug entry: %s. Please specify the issue tracker prefix and the '
746      'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.')
747  results = []
748  for bug in input_api.change.BugsFromDescription():
749    bug = bug.strip()
750    if bug.lower() == 'none':
751      continue
752    if 'b/' not in bug and ':' not in bug:
753      try:
754        if int(bug) > 100000:
755          # Rough indicator for current chromium bugs.
756          prefix_guess = 'chromium'
757        else:
758          prefix_guess = 'webrtc'
759        results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' %
760                       (prefix_guess, bug))
761      except ValueError:
762        results.append(bogus_bug_msg % bug)
763    elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)):
764      results.append(bogus_bug_msg % bug)
765  return [output_api.PresubmitError(r) for r in results]
766
767
768def CheckChangeHasBugField(input_api, output_api):
769  """Requires that the changelist is associated with a bug.
770
771  This check is stricter than the one in depot_tools/presubmit_canned_checks.py
772  since it fails the presubmit if the bug field is missing or doesn't contain
773  a bug reference.
774
775  This supports both 'BUG=' and 'Bug:' since we are in the process of migrating
776  to Gerrit and it encourages the usage of 'Bug:'.
777  """
778  if input_api.change.BugsFromDescription():
779    return []
780  return [
781      output_api.PresubmitError(
782          'The "Bug: [bug number]" footer is mandatory. Please create a '
783          'bug and reference it using either of:\n'
784          ' * https://bugs.webrtc.org - reference it using Bug: '
785          'webrtc:XXXX\n'
786          ' * https://crbug.com - reference it using Bug: chromium:XXXXXX')
787  ]
788
789
790def CheckJSONParseErrors(input_api, output_api, source_file_filter):
791  """Check that JSON files do not contain syntax errors."""
792
793  def FilterFile(affected_file):
794    return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json'
795            and source_file_filter(affected_file))
796
797  def GetJSONParseError(input_api, filename):
798    try:
799      contents = input_api.ReadFile(filename)
800      input_api.json.loads(contents)
801    except ValueError as e:
802      return e
803    return None
804
805  results = []
806  for affected_file in input_api.AffectedFiles(file_filter=FilterFile,
807                                               include_deletes=False):
808    parse_error = GetJSONParseError(input_api,
809                                    affected_file.AbsoluteLocalPath())
810    if parse_error:
811      results.append(
812          output_api.PresubmitError('%s could not be parsed: %s' %
813                                    (affected_file.LocalPath(), parse_error)))
814  return results
815
816
817def RunPythonTests(input_api, output_api):
818  def Join(*args):
819    return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
820
821  excluded_files = [
822      # These tests should be run manually after webrtc_dashboard_upload target
823      # has been built.
824      'catapult_uploader_test.py',
825      'process_perf_results_test.py',
826  ]
827
828  test_directories = [
829      input_api.PresubmitLocalPath(),
830      Join('rtc_tools', 'py_event_log_analyzer'),
831      Join('audio', 'test', 'unittests'),
832  ] + [
833      root for root, _, files in os.walk(Join('tools_webrtc')) if any(
834          f.endswith('_test.py') and f not in excluded_files for f in files)
835  ]
836
837  tests = []
838
839  for directory in test_directories:
840    tests.extend(
841        input_api.canned_checks.GetUnitTestsInDirectory(
842            input_api,
843            output_api,
844            directory,
845            files_to_check=[r'.+_test\.py$'],
846            run_on_python2=False))
847  return input_api.RunTests(tests, parallel=True)
848
849
850def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
851                                        source_file_filter):
852  """Checks that the namespace google::protobuf has not been used."""
853  files = []
854  pattern = input_api.re.compile(r'google::protobuf')
855  proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h')
856  file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
857      x))
858  for f in input_api.AffectedSourceFiles(file_filter):
859    if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']:
860      continue
861    contents = input_api.ReadFile(f)
862    if pattern.search(contents):
863      files.append(f)
864
865  if files:
866    return [
867        output_api.PresubmitError(
868            'Please avoid to use namespace `google::protobuf` directly.\n'
869            'Add a using directive in `%s` and include that header instead.' %
870            proto_utils_path, files)
871    ]
872  return []
873
874
875def _LicenseHeader(input_api):
876  """Returns the license header regexp."""
877  # Accept any year number from 2003 to the current year
878  current_year = int(input_api.time.strftime('%Y'))
879  allowed_years = (str(s) for s in reversed(range(2003, current_year + 1)))
880  years_re = '(' + '|'.join(allowed_years) + ')'
881  license_header = (
882      r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
883      r'All [Rr]ights [Rr]eserved\.\n'
884      r'.*?\n'
885      r'.*? Use of this source code is governed by a BSD-style license\n'
886      r'.*? that can be found in the LICENSE file in the root of the source\n'
887      r'.*? tree\. An additional intellectual property rights grant can be '
888      r'found\n'
889      r'.*? in the file PATENTS\.  All contributing project authors may\n'
890      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
891  ) % {
892      'year': years_re,
893  }
894  return license_header
895
896
897def CommonChecks(input_api, output_api):
898  """Checks common to both upload and commit."""
899  results = []
900  # Filter out files that are in objc or ios dirs from being cpplint-ed since
901  # they do not follow C++ lint rules.
902  exception_list = input_api.DEFAULT_FILES_TO_SKIP + (
903      r".*\bobjc[\\\/].*",
904      r".*objc\.[hcm]+$",
905  )
906  source_file_filter = lambda x: input_api.FilterSourceFile(
907      x, None, exception_list)
908  results.extend(
909      CheckApprovedFilesLintClean(input_api, output_api, source_file_filter))
910  results.extend(
911      input_api.canned_checks.CheckLicense(input_api, output_api,
912                                           _LicenseHeader(input_api)))
913
914  # TODO(bugs.webrtc.org/12114): Delete this filter and run pylint on
915  # all python files. This is a temporary solution.
916  python_file_filter = lambda f: (f.LocalPath().endswith('.py') and
917                                  source_file_filter(f))
918  python_changed_files = [
919      f.LocalPath()
920      for f in input_api.AffectedFiles(include_deletes=False,
921                                       file_filter=python_file_filter)
922  ]
923
924  results.extend(
925      input_api.canned_checks.RunPylint(
926          input_api,
927          output_api,
928          files_to_check=python_changed_files,
929          files_to_skip=(
930              r'^base[\\\/].*\.py$',
931              r'^build[\\\/].*\.py$',
932              r'^buildtools[\\\/].*\.py$',
933              r'^infra[\\\/].*\.py$',
934              r'^ios[\\\/].*\.py$',
935              r'^out.*[\\\/].*\.py$',
936              r'^testing[\\\/].*\.py$',
937              r'^third_party[\\\/].*\.py$',
938              r'^tools[\\\/].*\.py$',
939              r'^xcodebuild.*[\\\/].*\.py$',
940          ),
941          pylintrc='pylintrc',
942          version='2.7'))
943
944  # TODO(bugs.webrtc.org/13606): talk/ is no more, so make below checks simpler?
945  # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function
946  # since we need to have different license checks
947  # in talk/ and webrtc/directories.
948  # Instead, hand-picked checks are included below.
949
950  # .m and .mm files are ObjC files. For simplicity we will consider
951  # .h files in ObjC subdirectories ObjC headers.
952  objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$')
953  # Skip long-lines check for DEPS and GN files.
954  build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS')
955  # Also we will skip most checks for third_party directory.
956  third_party_filter_list = (r'(^|.*[\\\/])third_party[\\\/].+', )
957  eighty_char_sources = lambda x: input_api.FilterSourceFile(
958      x,
959      files_to_skip=build_file_filter_list + objc_filter_list +
960      third_party_filter_list)
961  hundred_char_sources = lambda x: input_api.FilterSourceFile(
962      x, files_to_check=objc_filter_list)
963  non_third_party_sources = lambda x: input_api.FilterSourceFile(
964      x, files_to_skip=third_party_filter_list)
965
966  results.extend(
967      input_api.canned_checks.CheckLongLines(
968          input_api,
969          output_api,
970          maxlen=80,
971          source_file_filter=eighty_char_sources))
972  results.extend(
973      input_api.canned_checks.CheckLongLines(
974          input_api,
975          output_api,
976          maxlen=100,
977          source_file_filter=hundred_char_sources))
978  results.extend(
979      input_api.canned_checks.CheckChangeHasNoTabs(
980          input_api, output_api, source_file_filter=non_third_party_sources))
981  results.extend(
982      input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
983          input_api, output_api, source_file_filter=non_third_party_sources))
984  results.extend(
985      input_api.canned_checks.CheckAuthorizedAuthor(
986          input_api,
987          output_api,
988          bot_allowlist=[
989              'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com',
990              'webrtc-version-updater@webrtc-ci.iam.gserviceaccount.com',
991          ]))
992  results.extend(
993      input_api.canned_checks.CheckChangeTodoHasOwner(
994          input_api, output_api, source_file_filter=non_third_party_sources))
995  results.extend(
996      input_api.canned_checks.CheckPatchFormatted(input_api, output_api))
997  results.extend(CheckNativeApiHeaderChanges(input_api, output_api))
998  results.extend(
999      CheckNoIOStreamInHeaders(input_api,
1000                               output_api,
1001                               source_file_filter=non_third_party_sources))
1002  results.extend(
1003      CheckNoPragmaOnce(input_api,
1004                        output_api,
1005                        source_file_filter=non_third_party_sources))
1006  results.extend(
1007      CheckNoFRIEND_TEST(input_api,
1008                         output_api,
1009                         source_file_filter=non_third_party_sources))
1010  results.extend(CheckGnChanges(input_api, output_api))
1011  results.extend(
1012      CheckUnwantedDependencies(input_api,
1013                                output_api,
1014                                source_file_filter=non_third_party_sources))
1015  results.extend(
1016      CheckJSONParseErrors(input_api,
1017                           output_api,
1018                           source_file_filter=non_third_party_sources))
1019  results.extend(RunPythonTests(input_api, output_api))
1020  results.extend(
1021      CheckUsageOfGoogleProtobufNamespace(
1022          input_api, output_api, source_file_filter=non_third_party_sources))
1023  results.extend(
1024      CheckOrphanHeaders(input_api,
1025                         output_api,
1026                         source_file_filter=non_third_party_sources))
1027  results.extend(
1028      CheckNewlineAtTheEndOfProtoFiles(
1029          input_api, output_api, source_file_filter=non_third_party_sources))
1030  results.extend(
1031      CheckNoStreamUsageIsAdded(input_api, output_api, non_third_party_sources))
1032  results.extend(
1033      CheckNoTestCaseUsageIsAdded(input_api, output_api,
1034                                  non_third_party_sources))
1035  results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api))
1036  results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api))
1037  results.extend(
1038      CheckAbslMemoryInclude(input_api, output_api, non_third_party_sources))
1039  results.extend(
1040      CheckAssertUsage(input_api, output_api, non_third_party_sources))
1041  results.extend(
1042      CheckBannedAbslMakeUnique(input_api, output_api, non_third_party_sources))
1043  results.extend(
1044      CheckObjcApiSymbols(input_api, output_api, non_third_party_sources))
1045  return results
1046
1047
1048def CheckApiDepsFileIsUpToDate(input_api, output_api):
1049  """Check that 'include_rules' in api/DEPS is up to date.
1050
1051  The file api/DEPS must be kept up to date in order to avoid to avoid to
1052  include internal header from WebRTC's api/ headers.
1053
1054  This check is focused on ensuring that 'include_rules' contains a deny
1055  rule for each root level directory. More focused allow rules can be
1056  added to 'specific_include_rules'.
1057  """
1058  results = []
1059  api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS')
1060  with open(api_deps) as f:
1061    deps_content = _ParseDeps(f.read())
1062
1063  include_rules = deps_content.get('include_rules', [])
1064  dirs_to_skip = set(['api', 'docs'])
1065
1066  # Only check top level directories affected by the current CL.
1067  dirs_to_check = set()
1068  for f in input_api.AffectedFiles():
1069    path_tokens = [t for t in f.LocalPath().split(os.sep) if t]
1070    if len(path_tokens) > 1:
1071      if (path_tokens[0] not in dirs_to_skip and os.path.isdir(
1072          os.path.join(input_api.PresubmitLocalPath(), path_tokens[0]))):
1073        dirs_to_check.add(path_tokens[0])
1074
1075  missing_include_rules = set()
1076  for p in dirs_to_check:
1077    rule = '-%s' % p
1078    if rule not in include_rules:
1079      missing_include_rules.add(rule)
1080
1081  if missing_include_rules:
1082    error_msg = [
1083        'include_rules = [\n',
1084        '  ...\n',
1085    ]
1086
1087    for r in sorted(missing_include_rules):
1088      error_msg.append('  "%s",\n' % str(r))
1089
1090    error_msg.append('  ...\n')
1091    error_msg.append(']\n')
1092
1093    results.append(
1094        output_api.PresubmitError(
1095            'New root level directory detected! WebRTC api/ headers should '
1096            'not #include headers from \n'
1097            'the new directory, so please update "include_rules" in file\n'
1098            '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg))))
1099
1100  return results
1101
1102
1103def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
1104  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and
1105                           source_file_filter(f))
1106
1107  files = []
1108  for f in input_api.AffectedFiles(include_deletes=False,
1109                                   file_filter=file_filter):
1110    for _, line in f.ChangedContents():
1111      if 'absl::make_unique' in line:
1112        files.append(f)
1113        break
1114
1115  if files:
1116    return [
1117        output_api.PresubmitError(
1118            'Please use std::make_unique instead of absl::make_unique.\n'
1119            'Affected files:', files)
1120    ]
1121  return []
1122
1123
1124def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
1125  rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}',
1126                               re.MULTILINE | re.DOTALL)
1127  file_filter = lambda f: (f.LocalPath().endswith(('.h')) and
1128                           source_file_filter(f))
1129
1130  files = []
1131  file_filter = lambda x: (input_api.FilterSourceFile(x) and source_file_filter(
1132      x))
1133  for f in input_api.AffectedSourceFiles(file_filter):
1134    if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
1135      continue
1136    if f.LocalPath().endswith('sdk/objc/base/RTCMacros.h'):
1137      continue
1138    contents = input_api.ReadFile(f)
1139    for match in rtc_objc_export.finditer(contents):
1140      export_block = match.group(0)
1141      if 'RTC_OBJC_TYPE' not in export_block:
1142        files.append(f.LocalPath())
1143
1144  if len(files) > 0:
1145    return [
1146        output_api.PresubmitError(
1147            'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
1148            'macro.\n\n' + 'For example:\n' +
1149            'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
1150            'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
1151            'Please fix the following files:', files)
1152    ]
1153  return []
1154
1155
1156def CheckAssertUsage(input_api, output_api, source_file_filter):
1157  pattern = input_api.re.compile(r'\bassert\(')
1158  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h', '.m', '.mm'))
1159                           and source_file_filter(f))
1160
1161  files = []
1162  for f in input_api.AffectedFiles(include_deletes=False,
1163                                   file_filter=file_filter):
1164    for _, line in f.ChangedContents():
1165      if pattern.search(line):
1166        files.append(f.LocalPath())
1167        break
1168
1169  if len(files) > 0:
1170    return [
1171        output_api.PresubmitError(
1172            'Usage of assert() has been detected in the following files, '
1173            'please use RTC_DCHECK() instead.\n Files:', files)
1174    ]
1175  return []
1176
1177
1178def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
1179  pattern = input_api.re.compile(r'^#include\s*"absl/memory/memory.h"',
1180                                 input_api.re.MULTILINE)
1181  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h')) and
1182                           source_file_filter(f))
1183
1184  files = []
1185  for f in input_api.AffectedFiles(include_deletes=False,
1186                                   file_filter=file_filter):
1187    contents = input_api.ReadFile(f)
1188    if pattern.search(contents):
1189      continue
1190    for _, line in f.ChangedContents():
1191      if 'absl::WrapUnique' in line:
1192        files.append(f)
1193        break
1194
1195  if len(files) > 0:
1196    return [
1197        output_api.PresubmitError(
1198            'Please include "absl/memory/memory.h" header for '
1199            'absl::WrapUnique.\nThis header may or may not be included '
1200            'transitively depending on the C++ standard version.', files)
1201    ]
1202  return []
1203
1204
1205def CheckChangeOnUpload(input_api, output_api):
1206  results = []
1207  results.extend(CommonChecks(input_api, output_api))
1208  results.extend(CheckGnGen(input_api, output_api))
1209  results.extend(input_api.canned_checks.CheckGNFormatted(
1210      input_api, output_api))
1211  return results
1212
1213
1214def CheckChangeOnCommit(input_api, output_api):
1215  results = []
1216  results.extend(CommonChecks(input_api, output_api))
1217  results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api))
1218  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
1219  results.extend(
1220      input_api.canned_checks.CheckChangeWasUploaded(input_api, output_api))
1221  results.extend(
1222      input_api.canned_checks.CheckChangeHasDescription(input_api, output_api))
1223  results.extend(CheckChangeHasBugField(input_api, output_api))
1224  results.extend(CheckCommitMessageBugEntry(input_api, output_api))
1225  results.extend(
1226      input_api.canned_checks.CheckTreeIsOpen(
1227          input_api,
1228          output_api,
1229          json_url='http://webrtc-status.appspot.com/current?format=json'))
1230  return results
1231
1232
1233def CheckOrphanHeaders(input_api, output_api, source_file_filter):
1234  # We need to wait until we have an input_api object and use this
1235  # roundabout construct to import prebubmit_checks_lib because this file is
1236  # eval-ed and thus doesn't have __file__.
1237  error_msg = """{} should be listed in {}."""
1238  results = []
1239  exempt_paths = [re.escape(os.path.join('tools_webrtc', 'ios', 'SDK'))]
1240
1241  with _AddToPath(
1242      input_api.os_path.join(input_api.PresubmitLocalPath(), 'tools_webrtc',
1243                             'presubmit_checks_lib')):
1244    from check_orphan_headers import GetBuildGnPathFromFilePath
1245    from check_orphan_headers import IsHeaderInBuildGn
1246
1247  file_filter = lambda x: input_api.FilterSourceFile(
1248      x, files_to_skip=exempt_paths) and source_file_filter(x)
1249  for f in input_api.AffectedSourceFiles(file_filter):
1250    if f.LocalPath().endswith('.h'):
1251      file_path = os.path.abspath(f.LocalPath())
1252      root_dir = os.getcwd()
1253      gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists,
1254                                                root_dir)
1255      in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path)
1256      if not in_build_gn:
1257        results.append(
1258            output_api.PresubmitError(
1259                error_msg.format(f.LocalPath(), os.path.relpath(gn_file_path))))
1260  return results
1261
1262
1263def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api,
1264                                     source_file_filter):
1265  """Checks that all .proto files are terminated with a newline."""
1266  error_msg = 'File {} must end with exactly one newline.'
1267  results = []
1268  file_filter = lambda x: input_api.FilterSourceFile(
1269      x, files_to_check=(r'.+\.proto$', )) and source_file_filter(x)
1270  for f in input_api.AffectedSourceFiles(file_filter):
1271    file_path = f.LocalPath()
1272    with open(file_path) as f:
1273      lines = f.readlines()
1274      if len(lines) > 0 and not lines[-1].endswith('\n'):
1275        results.append(output_api.PresubmitError(error_msg.format(file_path)))
1276  return results
1277
1278
1279def _ExtractAddRulesFromParsedDeps(parsed_deps):
1280  """Extract the rules that add dependencies from a parsed DEPS file.
1281
1282  Args:
1283    parsed_deps: the locals dictionary from evaluating the DEPS file."""
1284  add_rules = set()
1285  add_rules.update([
1286      rule[1:] for rule in parsed_deps.get('include_rules', [])
1287      if rule.startswith('+') or rule.startswith('!')
1288  ])
1289  for _, rules in parsed_deps.get('specific_include_rules', {}).items():
1290    add_rules.update([
1291        rule[1:] for rule in rules
1292        if rule.startswith('+') or rule.startswith('!')
1293    ])
1294  return add_rules
1295
1296
1297def _ParseDeps(contents):
1298  """Simple helper for parsing DEPS files."""
1299
1300  # Stubs for handling special syntax in the root DEPS file.
1301  class VarImpl:
1302    def __init__(self, local_scope):
1303      self._local_scope = local_scope
1304
1305    def Lookup(self, var_name):
1306      """Implements the Var syntax."""
1307      try:
1308        return self._local_scope['vars'][var_name]
1309      except KeyError as var_not_defined:
1310        raise Exception('Var is not defined: %s' %
1311                        var_name) from var_not_defined
1312
1313  local_scope = {}
1314  global_scope = {
1315      'Var': VarImpl(local_scope).Lookup,
1316  }
1317  exec(contents, global_scope, local_scope)
1318  return local_scope
1319
1320
1321def _CalculateAddedDeps(os_path, old_contents, new_contents):
1322  """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
1323  a set of DEPS entries that we should look up.
1324
1325  For a directory (rather than a specific filename) we fake a path to
1326  a specific filename by adding /DEPS. This is chosen as a file that
1327  will seldom or never be subject to per-file include_rules.
1328  """
1329  # We ignore deps entries on auto-generated directories.
1330  auto_generated_dirs = ['grit', 'jni']
1331
1332  old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
1333  new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
1334
1335  added_deps = new_deps.difference(old_deps)
1336
1337  results = set()
1338  for added_dep in added_deps:
1339    if added_dep.split('/')[0] in auto_generated_dirs:
1340      continue
1341    # Assume that a rule that ends in .h is a rule for a specific file.
1342    if added_dep.endswith('.h'):
1343      results.add(added_dep)
1344    else:
1345      results.add(os_path.join(added_dep, 'DEPS'))
1346  return results
1347
1348
1349def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
1350  """When a dependency prefixed with + is added to a DEPS file, we
1351    want to make sure that the change is reviewed by an OWNER of the
1352    target file or directory, to avoid layering violations from being
1353    introduced. This check verifies that this happens.
1354    """
1355  virtual_depended_on_files = set()
1356
1357  file_filter = lambda f: not input_api.re.match(
1358      r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath())
1359  for f in input_api.AffectedFiles(include_deletes=False,
1360                                   file_filter=file_filter):
1361    filename = input_api.os_path.basename(f.LocalPath())
1362    if filename == 'DEPS':
1363      virtual_depended_on_files.update(
1364          _CalculateAddedDeps(input_api.os_path, '\n'.join(f.OldContents()),
1365                              '\n'.join(f.NewContents())))
1366
1367  if not virtual_depended_on_files:
1368    return []
1369
1370  if input_api.is_committing:
1371    if input_api.tbr:
1372      return [
1373          output_api.PresubmitNotifyResult(
1374              '--tbr was specified, skipping OWNERS check for DEPS '
1375              'additions')
1376      ]
1377    if input_api.dry_run:
1378      return [
1379          output_api.PresubmitNotifyResult(
1380              'This is a dry run, skipping OWNERS check for DEPS '
1381              'additions')
1382      ]
1383    if not input_api.change.issue:
1384      return [
1385          output_api.PresubmitError(
1386              "DEPS approval by OWNERS check failed: this change has "
1387              "no change number, so we can't check it for approvals.")
1388      ]
1389    output = output_api.PresubmitError
1390  else:
1391    output = output_api.PresubmitNotifyResult
1392
1393  owner_email, reviewers = (
1394      input_api.canned_checks.GetCodereviewOwnerAndReviewers(
1395          input_api, None, approval_needed=input_api.is_committing))
1396
1397  owner_email = owner_email or input_api.change.author_email
1398
1399  approval_status = input_api.owners_client.GetFilesApprovalStatus(
1400      virtual_depended_on_files, reviewers.union([owner_email]), [])
1401  missing_files = [
1402      f for f in virtual_depended_on_files
1403      if approval_status[f] != input_api.owners_client.APPROVED
1404  ]
1405
1406  # We strip the /DEPS part that was added by
1407  # _FilesToCheckForIncomingDeps to fake a path to a file in a
1408  # directory.
1409  def StripDeps(path):
1410    start_deps = path.rfind('/DEPS')
1411    if start_deps != -1:
1412      return path[:start_deps]
1413    return path
1414
1415  unapproved_dependencies = [
1416      "'+%s'," % StripDeps(path) for path in missing_files
1417  ]
1418
1419  if unapproved_dependencies:
1420    output_list = [
1421        output('You need LGTM from owners of depends-on paths in DEPS that '
1422               ' were modified in this CL:\n    %s' %
1423               '\n    '.join(sorted(unapproved_dependencies)))
1424    ]
1425    suggested_owners = input_api.owners_client.SuggestOwners(
1426        missing_files, exclude=[owner_email])
1427    output_list.append(
1428        output('Suggested missing target path OWNERS:\n    %s' %
1429               '\n    '.join(suggested_owners or [])))
1430    return output_list
1431
1432  return []
1433