• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
2#
3# Use of this source code is governed by a BSD-style license
4# that can be found in the LICENSE file in the root of the source
5# tree. An additional intellectual property rights grant can be found
6# in the file PATENTS.  All contributing project authors may
7# be found in the AUTHORS file in the root of the source tree.
8
9import json
10import os
11import re
12import sys
13from collections import defaultdict
14from contextlib import contextmanager
15
16# Files and directories that are *skipped* by cpplint in the presubmit script.
17CPPLINT_EXCEPTIONS = [
18  'api/video_codecs/video_decoder.h',
19  'common_types.cc',
20  'common_types.h',
21  'examples/objc',
22  'media/base/stream_params.h',
23  'media/base/video_common.h',
24  'media/sctp/sctp_transport.cc',
25  'modules/audio_coding',
26  'modules/audio_device',
27  'modules/audio_processing',
28  'modules/desktop_capture',
29  'modules/include/module_common_types.h',
30  'modules/utility',
31  'modules/video_capture',
32  'p2p/base/pseudo_tcp.cc',
33  'p2p/base/pseudo_tcp.h',
34  'rtc_base',
35  'sdk/android/src/jni',
36  'sdk/objc',
37  'system_wrappers',
38  'test',
39  'tools_webrtc',
40  'voice_engine',
41]
42
43# These filters will always be removed, even if the caller specifies a filter
44# set, as they are problematic or broken in some way.
45#
46# Justifications for each filter:
47# - build/c++11         : Rvalue ref checks are unreliable (false positives),
48#                         include file and feature blocklists are
49#                         google3-specific.
50# - runtime/references  : Mutable references are not banned by the Google
51#                         C++ style guide anymore (starting from May 2020).
52# - whitespace/operators: Same as above (doesn't seem sufficient to eliminate
53#                         all move-related errors).
54DISABLED_LINT_FILTERS = [
55  '-build/c++11',
56  '-runtime/references',
57  '-whitespace/operators',
58]
59
60# List of directories of "supported" native APIs. That means changes to headers
61# will be done in a compatible way following this scheme:
62# 1. Non-breaking changes are made.
63# 2. The old APIs as marked as deprecated (with comments).
64# 3. Deprecation is announced to discuss-webrtc@googlegroups.com and
65#    webrtc-users@google.com (internal list).
66# 4. (later) The deprecated APIs are removed.
67NATIVE_API_DIRS = (
68  'api',  # All subdirectories of api/ are included as well.
69  'media/base',
70  'media/engine',
71  'modules/audio_device/include',
72  'pc',
73)
74
75# These directories should not be used but are maintained only to avoid breaking
76# some legacy downstream code.
77LEGACY_API_DIRS = (
78  'common_audio/include',
79  'modules/audio_coding/include',
80  'modules/audio_processing/include',
81  'modules/congestion_controller/include',
82  'modules/include',
83  'modules/remote_bitrate_estimator/include',
84  'modules/rtp_rtcp/include',
85  'modules/rtp_rtcp/source',
86  'modules/utility/include',
87  'modules/video_coding/codecs/h264/include',
88  'modules/video_coding/codecs/vp8/include',
89  'modules/video_coding/codecs/vp9/include',
90  'modules/video_coding/include',
91  'rtc_base',
92  'system_wrappers/include',
93)
94
95# NOTE: The set of directories in API_DIRS should be the same as those
96# listed in the table in native-api.md.
97API_DIRS = NATIVE_API_DIRS[:] + LEGACY_API_DIRS[:]
98
99# TARGET_RE matches a GN target, and extracts the target name and the contents.
100TARGET_RE = re.compile(
101  r'(?P<indent>\s*)(?P<target_type>\w+)\("(?P<target_name>\w+)"\) {'
102  r'(?P<target_contents>.*?)'
103  r'(?P=indent)}',
104  re.MULTILINE | re.DOTALL)
105
106# SOURCES_RE matches a block of sources inside a GN target.
107SOURCES_RE = re.compile(r'sources \+?= \[(?P<sources>.*?)\]',
108                        re.MULTILINE | re.DOTALL)
109
110# DEPS_RE matches a block of sources inside a GN target.
111DEPS_RE = re.compile(r'\bdeps \+?= \[(?P<deps>.*?)\]',
112                     re.MULTILINE | re.DOTALL)
113
114# FILE_PATH_RE matchies a file path.
115FILE_PATH_RE = re.compile(r'"(?P<file_path>(\w|\/)+)(?P<extension>\.\w+)"')
116
117
118def FindSrcDirPath(starting_dir):
119  """Returns the abs path to the src/ dir of the project."""
120  src_dir = starting_dir
121  while os.path.basename(src_dir) != 'src':
122    src_dir = os.path.normpath(os.path.join(src_dir, os.pardir))
123  return src_dir
124
125
126@contextmanager
127def _AddToPath(*paths):
128  original_sys_path = sys.path
129  sys.path.extend(paths)
130  try:
131    yield
132  finally:
133    # Restore sys.path to what it was before.
134    sys.path = original_sys_path
135
136
137def VerifyNativeApiHeadersListIsValid(input_api, output_api):
138  """Ensures the list of native API header directories is up to date."""
139  non_existing_paths = []
140  native_api_full_paths = [
141      input_api.os_path.join(input_api.PresubmitLocalPath(),
142                             *path.split('/')) for path in API_DIRS]
143  for path in native_api_full_paths:
144    if not os.path.isdir(path):
145      non_existing_paths.append(path)
146  if non_existing_paths:
147    return [output_api.PresubmitError(
148        'Directories to native API headers have changed which has made the '
149        'list in PRESUBMIT.py outdated.\nPlease update it to the current '
150        'location of our native APIs.',
151        non_existing_paths)]
152  return []
153
154
155API_CHANGE_MSG = """
156You seem to be changing native API header files. Please make sure that you:
157  1. Make compatible changes that don't break existing clients. Usually
158     this is done by keeping the existing method signatures unchanged.
159  2. Mark the old stuff as deprecated (see RTC_DEPRECATED macro).
160  3. Create a timeline and plan for when the deprecated stuff will be
161     removed. (The amount of time we give users to change their code
162     should be informed by how much work it is for them. If they just
163     need to replace one name with another or something equally
164     simple, 1-2 weeks might be good; if they need to do serious work,
165     up to 3 months may be called for.)
166  4. Update/inform existing downstream code owners to stop using the
167     deprecated stuff. (Send announcements to
168     discuss-webrtc@googlegroups.com and webrtc-users@google.com.)
169  5. Remove the deprecated stuff, once the agreed-upon amount of time
170     has passed.
171Related files:
172"""
173
174
175def CheckNativeApiHeaderChanges(input_api, output_api):
176  """Checks to remind proper changing of native APIs."""
177  files = []
178  source_file_filter = lambda x: input_api.FilterSourceFile(
179      x, allow_list=[r'.+\.(gn|gni|h)$'])
180  for f in input_api.AffectedSourceFiles(source_file_filter):
181    for path in API_DIRS:
182      dn = os.path.dirname(f.LocalPath())
183      if path == 'api':
184        # Special case: Subdirectories included.
185        if dn == 'api' or dn.startswith('api/'):
186          files.append(f.LocalPath())
187      else:
188        # Normal case: Subdirectories not included.
189        if dn == path:
190          files.append(f.LocalPath())
191
192  if files:
193    return [output_api.PresubmitNotifyResult(API_CHANGE_MSG, files)]
194  return []
195
196
197def CheckNoIOStreamInHeaders(input_api, output_api,
198                             source_file_filter):
199  """Checks to make sure no .h files include <iostream>."""
200  files = []
201  pattern = input_api.re.compile(r'^#include\s*<iostream>',
202                                 input_api.re.MULTILINE)
203  file_filter = lambda x: (input_api.FilterSourceFile(x)
204                           and source_file_filter(x))
205  for f in input_api.AffectedSourceFiles(file_filter):
206    if not f.LocalPath().endswith('.h'):
207      continue
208    contents = input_api.ReadFile(f)
209    if pattern.search(contents):
210      files.append(f)
211
212  if len(files):
213    return [output_api.PresubmitError(
214        'Do not #include <iostream> in header files, since it inserts static ' +
215        'initialization into every file including the header. Instead, ' +
216        '#include <ostream>. See http://crbug.com/94794',
217        files)]
218  return []
219
220
221def CheckNoPragmaOnce(input_api, output_api,
222                      source_file_filter):
223  """Make sure that banned functions are not used."""
224  files = []
225  pattern = input_api.re.compile(r'^#pragma\s+once',
226                                 input_api.re.MULTILINE)
227  file_filter = lambda x: (input_api.FilterSourceFile(x)
228                           and source_file_filter(x))
229  for f in input_api.AffectedSourceFiles(file_filter):
230    if not f.LocalPath().endswith('.h'):
231      continue
232    contents = input_api.ReadFile(f)
233    if pattern.search(contents):
234      files.append(f)
235
236  if files:
237    return [output_api.PresubmitError(
238        'Do not use #pragma once in header files.\n'
239        'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
240        files)]
241  return []
242
243
244def CheckNoFRIEND_TEST(input_api, output_api,  # pylint: disable=invalid-name
245                       source_file_filter):
246  """Make sure that gtest's FRIEND_TEST() macro is not used, the
247  FRIEND_TEST_ALL_PREFIXES() macro from testsupport/gtest_prod_util.h should be
248  used instead since that allows for FLAKY_, FAILS_ and DISABLED_ prefixes."""
249  problems = []
250
251  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h'))
252                           and source_file_filter(f))
253  for f in input_api.AffectedFiles(file_filter=file_filter):
254    for line_num, line in f.ChangedContents():
255      if 'FRIEND_TEST(' in line:
256        problems.append('    %s:%d' % (f.LocalPath(), line_num))
257
258  if not problems:
259    return []
260  return [output_api.PresubmitPromptWarning('WebRTC\'s code should not use '
261      'gtest\'s FRIEND_TEST() macro. Include testsupport/gtest_prod_util.h and '
262      'use FRIEND_TEST_ALL_PREFIXES() instead.\n' + '\n'.join(problems))]
263
264
265def IsLintDisabled(disabled_paths, file_path):
266  """ Checks if a file is disabled for lint check."""
267  for path in disabled_paths:
268    if file_path == path or os.path.dirname(file_path).startswith(path):
269      return True
270  return False
271
272
273def CheckApprovedFilesLintClean(input_api, output_api,
274                                source_file_filter=None):
275  """Checks that all new or non-exempt .cc and .h files pass cpplint.py.
276  This check is based on CheckChangeLintsClean in
277  depot_tools/presubmit_canned_checks.py but has less filters and only checks
278  added files."""
279  result = []
280
281  # Initialize cpplint.
282  import cpplint
283  # Access to a protected member _XX of a client class
284  # pylint: disable=W0212
285  cpplint._cpplint_state.ResetErrorCounts()
286
287  lint_filters = cpplint._Filters()
288  lint_filters.extend(DISABLED_LINT_FILTERS)
289  cpplint._SetFilters(','.join(lint_filters))
290
291  # Create a platform independent exempt list for cpplint.
292  disabled_paths = [input_api.os_path.join(*path.split('/'))
293                     for path in CPPLINT_EXCEPTIONS]
294
295  # Use the strictest verbosity level for cpplint.py (level 1) which is the
296  # default when running cpplint.py from command line. To make it possible to
297  # work with not-yet-converted code, we're only applying it to new (or
298  # moved/renamed) files and files not listed in CPPLINT_EXCEPTIONS.
299  verbosity_level = 1
300  files = []
301  for f in input_api.AffectedSourceFiles(source_file_filter):
302    # Note that moved/renamed files also count as added.
303    if f.Action() == 'A' or not IsLintDisabled(disabled_paths,
304                                                  f.LocalPath()):
305      files.append(f.AbsoluteLocalPath())
306
307  for file_name in files:
308    cpplint.ProcessFile(file_name, verbosity_level)
309
310  if cpplint._cpplint_state.error_count > 0:
311    if input_api.is_committing:
312      res_type = output_api.PresubmitError
313    else:
314      res_type = output_api.PresubmitPromptWarning
315    result = [res_type('Changelist failed cpplint.py check.')]
316
317  return result
318
319
320def CheckNoSourcesAbove(input_api, gn_files, output_api):
321  # Disallow referencing source files with paths above the GN file location.
322  source_pattern = input_api.re.compile(r' +sources \+?= \[(.*?)\]',
323                                        re.MULTILINE | re.DOTALL)
324  file_pattern = input_api.re.compile(r'"((\.\./.*?)|(//.*?))"')
325  violating_gn_files = set()
326  violating_source_entries = []
327  for gn_file in gn_files:
328    contents = input_api.ReadFile(gn_file)
329    for source_block_match in source_pattern.finditer(contents):
330      # Find all source list entries starting with ../ in the source block
331      # (exclude overrides entries).
332      for file_list_match in file_pattern.finditer(source_block_match.group(1)):
333        source_file = file_list_match.group(1)
334        if 'overrides/' not in source_file:
335          violating_source_entries.append(source_file)
336          violating_gn_files.add(gn_file)
337  if violating_gn_files:
338    return [output_api.PresubmitError(
339        'Referencing source files above the directory of the GN file is not '
340        'allowed. Please introduce new GN targets in the proper location '
341        'instead.\n'
342        'Invalid source entries:\n'
343        '%s\n'
344        'Violating GN files:' % '\n'.join(violating_source_entries),
345        items=violating_gn_files)]
346  return []
347
348
349def CheckAbseilDependencies(input_api, gn_files, output_api):
350  """Checks that Abseil dependencies are declared in `absl_deps`."""
351  absl_re = re.compile(r'third_party/abseil-cpp', re.MULTILINE | re.DOTALL)
352  target_types_to_check = [
353    'rtc_library',
354    'rtc_source_set',
355    'rtc_static_library',
356    'webrtc_fuzzer_test',
357  ]
358  error_msg = ('Abseil dependencies in target "%s" (file: %s) '
359              'should be moved to the "absl_deps" parameter.')
360  errors = []
361
362  for gn_file in gn_files:
363    gn_file_content = input_api.ReadFile(gn_file)
364    for target_match in TARGET_RE.finditer(gn_file_content):
365      target_type = target_match.group('target_type')
366      target_name = target_match.group('target_name')
367      target_contents = target_match.group('target_contents')
368      if target_type in target_types_to_check:
369        for deps_match in DEPS_RE.finditer(target_contents):
370          deps = deps_match.group('deps').splitlines()
371          for dep in deps:
372            if re.search(absl_re, dep):
373              errors.append(
374                output_api.PresubmitError(error_msg % (target_name,
375                                                       gn_file.LocalPath())))
376              break  # no need to warn more than once per target
377  return errors
378
379
380def CheckNoMixingSources(input_api, gn_files, output_api):
381  """Disallow mixing C, C++ and Obj-C/Obj-C++ in the same target.
382
383  See bugs.webrtc.org/7743 for more context.
384  """
385
386  def _MoreThanOneSourceUsed(*sources_lists):
387    sources_used = 0
388    for source_list in sources_lists:
389      if len(source_list):
390        sources_used += 1
391    return sources_used > 1
392
393  errors = defaultdict(lambda: [])
394  for gn_file in gn_files:
395    gn_file_content = input_api.ReadFile(gn_file)
396    for target_match in TARGET_RE.finditer(gn_file_content):
397      # list_of_sources is a list of tuples of the form
398      # (c_files, cc_files, objc_files) that keeps track of all the sources
399      # defined in a target. A GN target can have more that on definition of
400      # sources (since it supports if/else statements).
401      # E.g.:
402      # rtc_static_library("foo") {
403      #   if (is_win) {
404      #     sources = [ "foo.cc" ]
405      #   } else {
406      #     sources = [ "foo.mm" ]
407      #   }
408      # }
409      # This is allowed and the presubmit check should support this case.
410      list_of_sources = []
411      c_files = []
412      cc_files = []
413      objc_files = []
414      target_name = target_match.group('target_name')
415      target_contents = target_match.group('target_contents')
416      for sources_match in SOURCES_RE.finditer(target_contents):
417        if '+=' not in sources_match.group(0):
418          if c_files or cc_files or objc_files:
419            list_of_sources.append((c_files, cc_files, objc_files))
420          c_files = []
421          cc_files = []
422          objc_files = []
423        for file_match in FILE_PATH_RE.finditer(sources_match.group(1)):
424          file_path = file_match.group('file_path')
425          extension = file_match.group('extension')
426          if extension == '.c':
427            c_files.append(file_path + extension)
428          if extension == '.cc':
429            cc_files.append(file_path + extension)
430          if extension in ['.m', '.mm']:
431            objc_files.append(file_path + extension)
432      list_of_sources.append((c_files, cc_files, objc_files))
433      for c_files_list, cc_files_list, objc_files_list in list_of_sources:
434        if _MoreThanOneSourceUsed(c_files_list, cc_files_list, objc_files_list):
435          all_sources = sorted(c_files_list + cc_files_list + objc_files_list)
436          errors[gn_file.LocalPath()].append((target_name, all_sources))
437  if errors:
438    return [output_api.PresubmitError(
439        'GN targets cannot mix .c, .cc and .m (or .mm) source files.\n'
440        'Please create a separate target for each collection of sources.\n'
441        'Mixed sources: \n'
442        '%s\n'
443        'Violating GN files:\n%s\n' % (json.dumps(errors, indent=2),
444                                       '\n'.join(errors.keys())))]
445  return []
446
447
448def CheckNoPackageBoundaryViolations(input_api, gn_files, output_api):
449  cwd = input_api.PresubmitLocalPath()
450  with _AddToPath(input_api.os_path.join(
451      cwd, 'tools_webrtc', 'presubmit_checks_lib')):
452    from check_package_boundaries import CheckPackageBoundaries
453  build_files = [os.path.join(cwd, gn_file.LocalPath()) for gn_file in gn_files]
454  errors = CheckPackageBoundaries(cwd, build_files)[:5]
455  if errors:
456    return [output_api.PresubmitError(
457        'There are package boundary violations in the following GN files:',
458        long_text='\n\n'.join(str(err) for err in errors))]
459  return []
460
461
462def _ReportFileAndLine(filename, line_num):
463  """Default error formatter for _FindNewViolationsOfRule."""
464  return '%s (line %s)' % (filename, line_num)
465
466
467def CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api, output_api,
468                                           error_formatter=_ReportFileAndLine):
469  """Make sure that warning suppression flags are not added wihtout a reason."""
470  msg = ('Usage of //build/config/clang:extra_warnings is discouraged '
471         'in WebRTC.\n'
472         'If you are not adding this code (e.g. you are just moving '
473         'existing code) or you want to add an exception,\n'
474         'you can add a comment on the line that causes the problem:\n\n'
475         '"-Wno-odr"  # no-presubmit-check TODO(bugs.webrtc.org/BUG_ID)\n'
476         '\n'
477         'Affected files:\n')
478  errors = []  # 2-element tuples with (file, line number)
479  clang_warn_re = input_api.re.compile(r'//build/config/clang:extra_warnings')
480  no_presubmit_re = input_api.re.compile(
481      r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
482  for f in gn_files:
483    for line_num, line in f.ChangedContents():
484      if clang_warn_re.search(line) and not no_presubmit_re.search(line):
485        errors.append(error_formatter(f.LocalPath(), line_num))
486  if errors:
487    return [output_api.PresubmitError(msg, errors)]
488  return []
489
490
491def CheckNoTestCaseUsageIsAdded(input_api, output_api, source_file_filter,
492                                error_formatter=_ReportFileAndLine):
493  error_msg = ('Usage of legacy GoogleTest API detected!\nPlease use the '
494               'new API: https://github.com/google/googletest/blob/master/'
495               'googletest/docs/primer.md#beware-of-the-nomenclature.\n'
496               'Affected files:\n')
497  errors = []  # 2-element tuples with (file, line number)
498  test_case_re = input_api.re.compile(r'TEST_CASE')
499  file_filter = lambda f: (source_file_filter(f)
500                           and f.LocalPath().endswith('.cc'))
501  for f in input_api.AffectedSourceFiles(file_filter):
502    for line_num, line in f.ChangedContents():
503      if test_case_re.search(line):
504        errors.append(error_formatter(f.LocalPath(), line_num))
505  if errors:
506    return [output_api.PresubmitError(error_msg, errors)]
507  return []
508
509
510def CheckNoStreamUsageIsAdded(input_api, output_api,
511                              source_file_filter,
512                              error_formatter=_ReportFileAndLine):
513  """Make sure that no more dependencies on stringstream are added."""
514  error_msg = ('Usage of <sstream>, <istream> and <ostream> in WebRTC is '
515               'deprecated.\n'
516               'This includes the following types:\n'
517               'std::istringstream, std::ostringstream, std::wistringstream, '
518               'std::wostringstream,\n'
519               'std::wstringstream, std::ostream, std::wostream, std::istream,'
520               'std::wistream,\n'
521               'std::iostream, std::wiostream.\n'
522               'If you are not adding this code (e.g. you are just moving '
523               'existing code),\n'
524               'you can add a comment on the line that causes the problem:\n\n'
525               '#include <sstream>  // no-presubmit-check TODO(webrtc:8982)\n'
526               'std::ostream& F() {  // no-presubmit-check TODO(webrtc:8982)\n'
527               '\n'
528               'If you are adding new code, consider using '
529               'rtc::SimpleStringBuilder\n'
530               '(in rtc_base/strings/string_builder.h).\n'
531               'Affected files:\n')
532  errors = []  # 2-element tuples with (file, line number)
533  include_re = input_api.re.compile(r'#include <(i|o|s)stream>')
534  usage_re = input_api.re.compile(r'std::(w|i|o|io|wi|wo|wio)(string)*stream')
535  no_presubmit_re = input_api.re.compile(
536      r'// no-presubmit-check TODO\(webrtc:8982\)')
537  file_filter = lambda x: (input_api.FilterSourceFile(x)
538                           and source_file_filter(x))
539
540  def _IsException(file_path):
541    is_test = any(file_path.endswith(x) for x in ['_test.cc', '_tests.cc',
542                                                  '_unittest.cc',
543                                                  '_unittests.cc'])
544    return (file_path.startswith('examples') or
545            file_path.startswith('test') or
546            is_test)
547
548
549  for f in input_api.AffectedSourceFiles(file_filter):
550    # Usage of stringstream is allowed under examples/ and in tests.
551    if f.LocalPath() == 'PRESUBMIT.py' or _IsException(f.LocalPath()):
552      continue
553    for line_num, line in f.ChangedContents():
554      if ((include_re.search(line) or usage_re.search(line))
555          and not no_presubmit_re.search(line)):
556        errors.append(error_formatter(f.LocalPath(), line_num))
557  if errors:
558    return [output_api.PresubmitError(error_msg, errors)]
559  return []
560
561
562def CheckPublicDepsIsNotUsed(gn_files, input_api, output_api):
563  """Checks that public_deps is not used without a good reason."""
564  result = []
565  no_presubmit_check_re = input_api.re.compile(
566      r'# no-presubmit-check TODO\(webrtc:\d+\)')
567  error_msg = ('public_deps is not recommended in WebRTC BUILD.gn files '
568               'because it doesn\'t map well to downstream build systems.\n'
569               'Used in: %s (line %d).\n'
570               'If you are not adding this code (e.g. you are just moving '
571               'existing code) or you have a good reason, you can add this '
572               'comment (verbatim) on the line that causes the problem:\n\n'
573               'public_deps = [  # no-presubmit-check TODO(webrtc:8603)\n')
574  for affected_file in gn_files:
575    for (line_number, affected_line) in affected_file.ChangedContents():
576      if 'public_deps' in affected_line:
577        surpressed = no_presubmit_check_re.search(affected_line)
578        if not surpressed:
579          result.append(
580              output_api.PresubmitError(error_msg % (affected_file.LocalPath(),
581                                                     line_number)))
582  return result
583
584
585def CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api):
586  result = []
587  error_msg = ('check_includes overrides are not allowed since it can cause '
588               'incorrect dependencies to form. It effectively means that your '
589               'module can include any .h file without depending on its '
590               'corresponding target. There are some exceptional cases when '
591               'this is allowed: if so, get approval from a .gn owner in the '
592               'root OWNERS file.\n'
593               'Used in: %s (line %d).')
594  no_presubmit_re = input_api.re.compile(
595      r'# no-presubmit-check TODO\(bugs\.webrtc\.org/\d+\)')
596  for affected_file in gn_files:
597    for (line_number, affected_line) in affected_file.ChangedContents():
598      if ('check_includes' in affected_line
599          and not no_presubmit_re.search(affected_line)):
600        result.append(
601            output_api.PresubmitError(error_msg % (affected_file.LocalPath(),
602                                                   line_number)))
603  return result
604
605
606def CheckGnChanges(input_api, output_api):
607  file_filter = lambda x: (input_api.FilterSourceFile(
608      x, allow_list=(r'.+\.(gn|gni)$',),
609      block_list=(r'.*/presubmit_checks_lib/testdata/.*',)))
610
611  gn_files = []
612  for f in input_api.AffectedSourceFiles(file_filter):
613    gn_files.append(f)
614
615  result = []
616  if gn_files:
617    result.extend(CheckNoSourcesAbove(input_api, gn_files, output_api))
618    result.extend(CheckNoMixingSources(input_api, gn_files, output_api))
619    result.extend(CheckAbseilDependencies(input_api, gn_files, output_api))
620    result.extend(CheckNoPackageBoundaryViolations(input_api, gn_files,
621                                                   output_api))
622    result.extend(CheckPublicDepsIsNotUsed(gn_files, input_api, output_api))
623    result.extend(CheckCheckIncludesIsNotUsed(gn_files, input_api, output_api))
624    result.extend(CheckNoWarningSuppressionFlagsAreAdded(gn_files, input_api,
625                                                          output_api))
626  return result
627
628
629def CheckGnGen(input_api, output_api):
630  """Runs `gn gen --check` with default args to detect mismatches between
631  #includes and dependencies in the BUILD.gn files, as well as general build
632  errors.
633  """
634  with _AddToPath(input_api.os_path.join(
635      input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')):
636    from build_helpers import RunGnCheck
637  errors = RunGnCheck(FindSrcDirPath(input_api.PresubmitLocalPath()))[:5]
638  if errors:
639    return [output_api.PresubmitPromptWarning(
640        'Some #includes do not match the build dependency graph. Please run:\n'
641        '  gn gen --check <out_dir>',
642        long_text='\n\n'.join(errors))]
643  return []
644
645
646def CheckUnwantedDependencies(input_api, output_api, source_file_filter):
647  """Runs checkdeps on #include statements added in this
648  change. Breaking - rules is an error, breaking ! rules is a
649  warning.
650  """
651  # Copied from Chromium's src/PRESUBMIT.py.
652
653  # We need to wait until we have an input_api object and use this
654  # roundabout construct to import checkdeps because this file is
655  # eval-ed and thus doesn't have __file__.
656  src_path = FindSrcDirPath(input_api.PresubmitLocalPath())
657  checkdeps_path = input_api.os_path.join(src_path, 'buildtools', 'checkdeps')
658  if not os.path.exists(checkdeps_path):
659    return [output_api.PresubmitError(
660        'Cannot find checkdeps at %s\nHave you run "gclient sync" to '
661        'download all the DEPS entries?' % checkdeps_path)]
662  with _AddToPath(checkdeps_path):
663    import checkdeps
664    from cpp_checker import CppChecker
665    from rules import Rule
666
667  added_includes = []
668  for f in input_api.AffectedFiles(file_filter=source_file_filter):
669    if not CppChecker.IsCppFile(f.LocalPath()):
670      continue
671
672    changed_lines = [line for _, line in f.ChangedContents()]
673    added_includes.append([f.LocalPath(), changed_lines])
674
675  deps_checker = checkdeps.DepsChecker(input_api.PresubmitLocalPath())
676
677  error_descriptions = []
678  warning_descriptions = []
679  for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
680      added_includes):
681    description_with_path = '%s\n    %s' % (path, rule_description)
682    if rule_type == Rule.DISALLOW:
683      error_descriptions.append(description_with_path)
684    else:
685      warning_descriptions.append(description_with_path)
686
687  results = []
688  if error_descriptions:
689    results.append(output_api.PresubmitError(
690        'You added one or more #includes that violate checkdeps rules.\n'
691        'Check that the DEPS files in these locations contain valid rules.\n'
692        'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for '
693        'more details about checkdeps.',
694        error_descriptions))
695  if warning_descriptions:
696    results.append(output_api.PresubmitPromptOrNotify(
697        'You added one or more #includes of files that are temporarily\n'
698        'allowed but being removed. Can you avoid introducing the\n'
699        '#include? See relevant DEPS file(s) for details and contacts.\n'
700        'See https://cs.chromium.org/chromium/src/buildtools/checkdeps/ for '
701        'more details about checkdeps.',
702        warning_descriptions))
703  return results
704
705
706def CheckCommitMessageBugEntry(input_api, output_api):
707  """Check that bug entries are well-formed in commit message."""
708  bogus_bug_msg = (
709      'Bogus Bug entry: %s. Please specify the issue tracker prefix and the '
710      'issue number, separated by a colon, e.g. webrtc:123 or chromium:12345.')
711  results = []
712  for bug in input_api.change.BugsFromDescription():
713    bug = bug.strip()
714    if bug.lower() == 'none':
715      continue
716    if 'b/' not in bug and ':' not in bug:
717      try:
718        if int(bug) > 100000:
719          # Rough indicator for current chromium bugs.
720          prefix_guess = 'chromium'
721        else:
722          prefix_guess = 'webrtc'
723        results.append('Bug entry requires issue tracker prefix, e.g. %s:%s' %
724                       (prefix_guess, bug))
725      except ValueError:
726        results.append(bogus_bug_msg % bug)
727    elif not (re.match(r'\w+:\d+', bug) or re.match(r'b/\d+', bug)):
728      results.append(bogus_bug_msg % bug)
729  return [output_api.PresubmitError(r) for r in results]
730
731
732def CheckChangeHasBugField(input_api, output_api):
733  """Requires that the changelist is associated with a bug.
734
735  This check is stricter than the one in depot_tools/presubmit_canned_checks.py
736  since it fails the presubmit if the bug field is missing or doesn't contain
737  a bug reference.
738
739  This supports both 'BUG=' and 'Bug:' since we are in the process of migrating
740  to Gerrit and it encourages the usage of 'Bug:'.
741  """
742  if input_api.change.BugsFromDescription():
743    return []
744  else:
745    return [output_api.PresubmitError(
746        'The "Bug: [bug number]" footer is mandatory. Please create a bug and '
747        'reference it using either of:\n'
748        ' * https://bugs.webrtc.org - reference it using Bug: webrtc:XXXX\n'
749        ' * https://crbug.com - reference it using Bug: chromium:XXXXXX')]
750
751
752def CheckJSONParseErrors(input_api, output_api, source_file_filter):
753  """Check that JSON files do not contain syntax errors."""
754
755  def FilterFile(affected_file):
756    return (input_api.os_path.splitext(affected_file.LocalPath())[1] == '.json'
757            and source_file_filter(affected_file))
758
759  def GetJSONParseError(input_api, filename):
760    try:
761      contents = input_api.ReadFile(filename)
762      input_api.json.loads(contents)
763    except ValueError as e:
764      return e
765    return None
766
767  results = []
768  for affected_file in input_api.AffectedFiles(
769      file_filter=FilterFile, include_deletes=False):
770    parse_error = GetJSONParseError(input_api,
771                                    affected_file.AbsoluteLocalPath())
772    if parse_error:
773      results.append(output_api.PresubmitError('%s could not be parsed: %s' %
774                                               (affected_file.LocalPath(),
775                                                parse_error)))
776  return results
777
778
779def RunPythonTests(input_api, output_api):
780  def Join(*args):
781    return input_api.os_path.join(input_api.PresubmitLocalPath(), *args)
782
783  test_directories = [
784      input_api.PresubmitLocalPath(),
785      Join('rtc_tools', 'py_event_log_analyzer'),
786      Join('audio', 'test', 'unittests'),
787  ] + [
788      root for root, _, files in os.walk(Join('tools_webrtc'))
789      if any(f.endswith('_test.py') for f in files)
790  ]
791
792  tests = []
793  for directory in test_directories:
794    tests.extend(
795      input_api.canned_checks.GetUnitTestsInDirectory(
796          input_api,
797          output_api,
798          directory,
799          allowlist=[r'.+_test\.py$']))
800  return input_api.RunTests(tests, parallel=True)
801
802
803def CheckUsageOfGoogleProtobufNamespace(input_api, output_api,
804                                        source_file_filter):
805  """Checks that the namespace google::protobuf has not been used."""
806  files = []
807  pattern = input_api.re.compile(r'google::protobuf')
808  proto_utils_path = os.path.join('rtc_base', 'protobuf_utils.h')
809  file_filter = lambda x: (input_api.FilterSourceFile(x)
810                           and source_file_filter(x))
811  for f in input_api.AffectedSourceFiles(file_filter):
812    if f.LocalPath() in [proto_utils_path, 'PRESUBMIT.py']:
813      continue
814    contents = input_api.ReadFile(f)
815    if pattern.search(contents):
816      files.append(f)
817
818  if files:
819    return [output_api.PresubmitError(
820        'Please avoid to use namespace `google::protobuf` directly.\n'
821        'Add a using directive in `%s` and include that header instead.'
822        % proto_utils_path, files)]
823  return []
824
825
826def _LicenseHeader(input_api):
827  """Returns the license header regexp."""
828  # Accept any year number from 2003 to the current year
829  current_year = int(input_api.time.strftime('%Y'))
830  allowed_years = (str(s) for s in reversed(xrange(2003, current_year + 1)))
831  years_re = '(' + '|'.join(allowed_years) + ')'
832  license_header = (
833      r'.*? Copyright( \(c\))? %(year)s The WebRTC [Pp]roject [Aa]uthors\. '
834        r'All [Rr]ights [Rr]eserved\.\n'
835      r'.*?\n'
836      r'.*? Use of this source code is governed by a BSD-style license\n'
837      r'.*? that can be found in the LICENSE file in the root of the source\n'
838      r'.*? tree\. An additional intellectual property rights grant can be '
839        r'found\n'
840      r'.*? in the file PATENTS\.  All contributing project authors may\n'
841      r'.*? be found in the AUTHORS file in the root of the source tree\.\n'
842  ) % {
843      'year': years_re,
844  }
845  return license_header
846
847
848def CommonChecks(input_api, output_api):
849  """Checks common to both upload and commit."""
850  results = []
851  # Filter out files that are in objc or ios dirs from being cpplint-ed since
852  # they do not follow C++ lint rules.
853  exception_list = input_api.DEFAULT_BLACK_LIST + (
854    r".*\bobjc[\\\/].*",
855    r".*objc\.[hcm]+$",
856  )
857  source_file_filter = lambda x: input_api.FilterSourceFile(x, None,
858                                                            exception_list)
859  results.extend(CheckApprovedFilesLintClean(
860      input_api, output_api, source_file_filter))
861  results.extend(input_api.canned_checks.CheckLicense(
862      input_api, output_api, _LicenseHeader(input_api)))
863  results.extend(input_api.canned_checks.RunPylint(input_api, output_api,
864      block_list=(r'^base[\\\/].*\.py$',
865                  r'^build[\\\/].*\.py$',
866                  r'^buildtools[\\\/].*\.py$',
867                  r'^infra[\\\/].*\.py$',
868                  r'^ios[\\\/].*\.py$',
869                  r'^out.*[\\\/].*\.py$',
870                  r'^testing[\\\/].*\.py$',
871                  r'^third_party[\\\/].*\.py$',
872                  r'^tools[\\\/].*\.py$',
873                  # TODO(phoglund): should arguably be checked.
874                  r'^tools_webrtc[\\\/]mb[\\\/].*\.py$',
875                  r'^xcodebuild.*[\\\/].*\.py$',),
876      pylintrc='pylintrc'))
877
878  # TODO(nisse): talk/ is no more, so make below checks simpler?
879  # WebRTC can't use the presubmit_canned_checks.PanProjectChecks function since
880  # we need to have different license checks in talk/ and webrtc/ directories.
881  # Instead, hand-picked checks are included below.
882
883  # .m and .mm files are ObjC files. For simplicity we will consider .h files in
884  # ObjC subdirectories ObjC headers.
885  objc_filter_list = (r'.+\.m$', r'.+\.mm$', r'.+objc\/.+\.h$')
886  # Skip long-lines check for DEPS and GN files.
887  build_file_filter_list = (r'.+\.gn$', r'.+\.gni$', 'DEPS')
888  # Also we will skip most checks for third_party directory.
889  third_party_filter_list = (r'^third_party[\\\/].+',)
890  eighty_char_sources = lambda x: input_api.FilterSourceFile(x,
891      block_list=build_file_filter_list + objc_filter_list +
892                 third_party_filter_list)
893  hundred_char_sources = lambda x: input_api.FilterSourceFile(x,
894      allow_list=objc_filter_list)
895  non_third_party_sources = lambda x: input_api.FilterSourceFile(x,
896      block_list=third_party_filter_list)
897
898  results.extend(input_api.canned_checks.CheckLongLines(
899      input_api, output_api, maxlen=80, source_file_filter=eighty_char_sources))
900  results.extend(input_api.canned_checks.CheckLongLines(
901      input_api, output_api, maxlen=100,
902      source_file_filter=hundred_char_sources))
903  results.extend(input_api.canned_checks.CheckChangeHasNoTabs(
904      input_api, output_api, source_file_filter=non_third_party_sources))
905  results.extend(input_api.canned_checks.CheckChangeHasNoStrayWhitespace(
906      input_api, output_api, source_file_filter=non_third_party_sources))
907  results.extend(input_api.canned_checks.CheckAuthorizedAuthor(
908      input_api, output_api, bot_whitelist=[
909          'chromium-webrtc-autoroll@webrtc-ci.iam.gserviceaccount.com'
910      ]))
911  results.extend(input_api.canned_checks.CheckChangeTodoHasOwner(
912      input_api, output_api, source_file_filter=non_third_party_sources))
913  results.extend(input_api.canned_checks.CheckPatchFormatted(
914      input_api, output_api))
915  results.extend(CheckNativeApiHeaderChanges(input_api, output_api))
916  results.extend(CheckNoIOStreamInHeaders(
917      input_api, output_api, source_file_filter=non_third_party_sources))
918  results.extend(CheckNoPragmaOnce(
919      input_api, output_api, source_file_filter=non_third_party_sources))
920  results.extend(CheckNoFRIEND_TEST(
921      input_api, output_api, source_file_filter=non_third_party_sources))
922  results.extend(CheckGnChanges(input_api, output_api))
923  results.extend(CheckUnwantedDependencies(
924      input_api, output_api, source_file_filter=non_third_party_sources))
925  results.extend(CheckJSONParseErrors(
926      input_api, output_api, source_file_filter=non_third_party_sources))
927  results.extend(RunPythonTests(input_api, output_api))
928  results.extend(CheckUsageOfGoogleProtobufNamespace(
929      input_api, output_api, source_file_filter=non_third_party_sources))
930  results.extend(CheckOrphanHeaders(
931      input_api, output_api, source_file_filter=non_third_party_sources))
932  results.extend(CheckNewlineAtTheEndOfProtoFiles(
933      input_api, output_api, source_file_filter=non_third_party_sources))
934  results.extend(CheckNoStreamUsageIsAdded(
935      input_api, output_api, non_third_party_sources))
936  results.extend(CheckNoTestCaseUsageIsAdded(
937      input_api, output_api, non_third_party_sources))
938  results.extend(CheckAddedDepsHaveTargetApprovals(input_api, output_api))
939  results.extend(CheckApiDepsFileIsUpToDate(input_api, output_api))
940  results.extend(CheckAbslMemoryInclude(
941      input_api, output_api, non_third_party_sources))
942  results.extend(CheckBannedAbslMakeUnique(
943      input_api, output_api, non_third_party_sources))
944  results.extend(CheckObjcApiSymbols(
945      input_api, output_api, non_third_party_sources))
946  return results
947
948
949def CheckApiDepsFileIsUpToDate(input_api, output_api):
950  """Check that 'include_rules' in api/DEPS is up to date.
951
952  The file api/DEPS must be kept up to date in order to avoid to avoid to
953  include internal header from WebRTC's api/ headers.
954
955  This check is focused on ensuring that 'include_rules' contains a deny
956  rule for each root level directory. More focused allow rules can be
957  added to 'specific_include_rules'.
958  """
959  results = []
960  api_deps = os.path.join(input_api.PresubmitLocalPath(), 'api', 'DEPS')
961  with open(api_deps) as f:
962    deps_content = _ParseDeps(f.read())
963
964  include_rules = deps_content.get('include_rules', [])
965  dirs_to_skip = set(['api', 'docs'])
966
967  # Only check top level directories affected by the current CL.
968  dirs_to_check = set()
969  for f in input_api.AffectedFiles():
970    path_tokens = [t for t in f.LocalPath().split(os.sep) if t]
971    if len(path_tokens) > 1:
972      if (path_tokens[0] not in dirs_to_skip and
973          os.path.isdir(os.path.join(input_api.PresubmitLocalPath(),
974                                     path_tokens[0]))):
975        dirs_to_check.add(path_tokens[0])
976
977  missing_include_rules = set()
978  for p in dirs_to_check:
979    rule = '-%s' % p
980    if rule not in include_rules:
981      missing_include_rules.add(rule)
982
983  if missing_include_rules:
984    error_msg = [
985      'include_rules = [\n',
986      '  ...\n',
987    ]
988
989    for r in sorted(missing_include_rules):
990      error_msg.append('  "%s",\n' % str(r))
991
992    error_msg.append('  ...\n')
993    error_msg.append(']\n')
994
995    results.append(output_api.PresubmitError(
996        'New root level directory detected! WebRTC api/ headers should '
997        'not #include headers from \n'
998        'the new directory, so please update "include_rules" in file\n'
999        '"%s". Example:\n%s\n' % (api_deps, ''.join(error_msg))))
1000
1001  return results
1002
1003def CheckBannedAbslMakeUnique(input_api, output_api, source_file_filter):
1004  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h'))
1005                           and source_file_filter(f))
1006
1007  files = []
1008  for f in input_api.AffectedFiles(
1009      include_deletes=False, file_filter=file_filter):
1010    for _, line in f.ChangedContents():
1011      if 'absl::make_unique' in line:
1012        files.append(f)
1013        break
1014
1015  if len(files):
1016    return [output_api.PresubmitError(
1017        'Please use std::make_unique instead of absl::make_unique.\n'
1018        'Affected files:',
1019        files)]
1020  return []
1021
1022def CheckObjcApiSymbols(input_api, output_api, source_file_filter):
1023  rtc_objc_export = re.compile(r'RTC_OBJC_EXPORT(.|\n){26}',
1024                               re.MULTILINE | re.DOTALL)
1025  file_filter = lambda f: (f.LocalPath().endswith(('.h'))
1026                           and source_file_filter(f))
1027
1028  files = []
1029  file_filter = lambda x: (input_api.FilterSourceFile(x)
1030                           and source_file_filter(x))
1031  for f in input_api.AffectedSourceFiles(file_filter):
1032    if not f.LocalPath().endswith('.h') or not 'sdk/objc' in f.LocalPath():
1033      continue
1034    contents = input_api.ReadFile(f)
1035    for match in rtc_objc_export.finditer(contents):
1036      export_block = match.group(0)
1037      if 'RTC_OBJC_TYPE' not in export_block:
1038        files.append(f.LocalPath())
1039
1040  if len(files):
1041    return [output_api.PresubmitError(
1042        'RTC_OBJC_EXPORT types must be wrapped into an RTC_OBJC_TYPE() ' +
1043        'macro.\n\n' +
1044        'For example:\n' +
1045        'RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RtcFoo)\n\n' +
1046        'RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RtcFoo)\n\n' +
1047        'Please fix the following files:',
1048        files)]
1049  return []
1050
1051def CheckAbslMemoryInclude(input_api, output_api, source_file_filter):
1052  pattern = input_api.re.compile(
1053      r'^#include\s*"absl/memory/memory.h"', input_api.re.MULTILINE)
1054  file_filter = lambda f: (f.LocalPath().endswith(('.cc', '.h'))
1055                           and source_file_filter(f))
1056
1057  files = []
1058  for f in input_api.AffectedFiles(
1059      include_deletes=False, file_filter=file_filter):
1060    contents = input_api.ReadFile(f)
1061    if pattern.search(contents):
1062      continue
1063    for _, line in f.ChangedContents():
1064      if 'absl::WrapUnique' in line:
1065        files.append(f)
1066        break
1067
1068  if len(files):
1069    return [output_api.PresubmitError(
1070        'Please include "absl/memory/memory.h" header for  absl::WrapUnique.\n'
1071        'This header may or may not be included transitively depending on the '
1072        'C++ standard version.',
1073        files)]
1074  return []
1075
1076def CheckChangeOnUpload(input_api, output_api):
1077  results = []
1078  results.extend(CommonChecks(input_api, output_api))
1079  results.extend(CheckGnGen(input_api, output_api))
1080  results.extend(
1081      input_api.canned_checks.CheckGNFormatted(input_api, output_api))
1082  return results
1083
1084
1085def CheckChangeOnCommit(input_api, output_api):
1086  results = []
1087  results.extend(CommonChecks(input_api, output_api))
1088  results.extend(VerifyNativeApiHeadersListIsValid(input_api, output_api))
1089  results.extend(input_api.canned_checks.CheckOwners(input_api, output_api))
1090  results.extend(input_api.canned_checks.CheckChangeWasUploaded(
1091      input_api, output_api))
1092  results.extend(input_api.canned_checks.CheckChangeHasDescription(
1093      input_api, output_api))
1094  results.extend(CheckChangeHasBugField(input_api, output_api))
1095  results.extend(CheckCommitMessageBugEntry(input_api, output_api))
1096  results.extend(input_api.canned_checks.CheckTreeIsOpen(
1097      input_api, output_api,
1098      json_url='http://webrtc-status.appspot.com/current?format=json'))
1099  return results
1100
1101
1102def CheckOrphanHeaders(input_api, output_api, source_file_filter):
1103  # We need to wait until we have an input_api object and use this
1104  # roundabout construct to import prebubmit_checks_lib because this file is
1105  # eval-ed and thus doesn't have __file__.
1106  error_msg = """{} should be listed in {}."""
1107  results = []
1108  exempt_paths = [
1109    os.path.join('tools_webrtc', 'ios', 'SDK'),
1110  ]
1111  with _AddToPath(input_api.os_path.join(
1112      input_api.PresubmitLocalPath(), 'tools_webrtc', 'presubmit_checks_lib')):
1113    from check_orphan_headers import GetBuildGnPathFromFilePath
1114    from check_orphan_headers import IsHeaderInBuildGn
1115
1116  file_filter = lambda x: input_api.FilterSourceFile(
1117      x, block_list=exempt_paths) and source_file_filter(x)
1118  for f in input_api.AffectedSourceFiles(file_filter):
1119    if f.LocalPath().endswith('.h'):
1120      file_path = os.path.abspath(f.LocalPath())
1121      root_dir = os.getcwd()
1122      gn_file_path = GetBuildGnPathFromFilePath(file_path, os.path.exists,
1123                                                root_dir)
1124      in_build_gn = IsHeaderInBuildGn(file_path, gn_file_path)
1125      if not in_build_gn:
1126        results.append(output_api.PresubmitError(error_msg.format(
1127            f.LocalPath(), os.path.relpath(gn_file_path))))
1128  return results
1129
1130
1131def CheckNewlineAtTheEndOfProtoFiles(input_api, output_api, source_file_filter):
1132  """Checks that all .proto files are terminated with a newline."""
1133  error_msg = 'File {} must end with exactly one newline.'
1134  results = []
1135  file_filter = lambda x: input_api.FilterSourceFile(
1136      x, allow_list=(r'.+\.proto$',)) and source_file_filter(x)
1137  for f in input_api.AffectedSourceFiles(file_filter):
1138    file_path = f.LocalPath()
1139    with open(file_path) as f:
1140      lines = f.readlines()
1141      if len(lines) > 0 and not lines[-1].endswith('\n'):
1142        results.append(output_api.PresubmitError(error_msg.format(file_path)))
1143  return results
1144
1145
1146def _ExtractAddRulesFromParsedDeps(parsed_deps):
1147  """Extract the rules that add dependencies from a parsed DEPS file.
1148
1149  Args:
1150    parsed_deps: the locals dictionary from evaluating the DEPS file."""
1151  add_rules = set()
1152  add_rules.update([
1153      rule[1:] for rule in parsed_deps.get('include_rules', [])
1154      if rule.startswith('+') or rule.startswith('!')
1155  ])
1156  for _, rules in parsed_deps.get('specific_include_rules',
1157                                              {}).iteritems():
1158    add_rules.update([
1159        rule[1:] for rule in rules
1160        if rule.startswith('+') or rule.startswith('!')
1161    ])
1162  return add_rules
1163
1164
1165def _ParseDeps(contents):
1166  """Simple helper for parsing DEPS files."""
1167  # Stubs for handling special syntax in the root DEPS file.
1168  class VarImpl(object):
1169
1170    def __init__(self, local_scope):
1171      self._local_scope = local_scope
1172
1173    def Lookup(self, var_name):
1174      """Implements the Var syntax."""
1175      try:
1176        return self._local_scope['vars'][var_name]
1177      except KeyError:
1178        raise Exception('Var is not defined: %s' % var_name)
1179
1180  local_scope = {}
1181  global_scope = {
1182      'Var': VarImpl(local_scope).Lookup,
1183  }
1184  exec contents in global_scope, local_scope
1185  return local_scope
1186
1187
1188def _CalculateAddedDeps(os_path, old_contents, new_contents):
1189  """Helper method for _CheckAddedDepsHaveTargetApprovals. Returns
1190  a set of DEPS entries that we should look up.
1191
1192  For a directory (rather than a specific filename) we fake a path to
1193  a specific filename by adding /DEPS. This is chosen as a file that
1194  will seldom or never be subject to per-file include_rules.
1195  """
1196  # We ignore deps entries on auto-generated directories.
1197  auto_generated_dirs = ['grit', 'jni']
1198
1199  old_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(old_contents))
1200  new_deps = _ExtractAddRulesFromParsedDeps(_ParseDeps(new_contents))
1201
1202  added_deps = new_deps.difference(old_deps)
1203
1204  results = set()
1205  for added_dep in added_deps:
1206    if added_dep.split('/')[0] in auto_generated_dirs:
1207      continue
1208    # Assume that a rule that ends in .h is a rule for a specific file.
1209    if added_dep.endswith('.h'):
1210      results.add(added_dep)
1211    else:
1212      results.add(os_path.join(added_dep, 'DEPS'))
1213  return results
1214
1215
1216def CheckAddedDepsHaveTargetApprovals(input_api, output_api):
1217  """When a dependency prefixed with + is added to a DEPS file, we
1218  want to make sure that the change is reviewed by an OWNER of the
1219  target file or directory, to avoid layering violations from being
1220  introduced. This check verifies that this happens.
1221  """
1222  virtual_depended_on_files = set()
1223
1224  file_filter = lambda f: not input_api.re.match(
1225      r"^third_party[\\\/](WebKit|blink)[\\\/].*", f.LocalPath())
1226  for f in input_api.AffectedFiles(include_deletes=False,
1227                                   file_filter=file_filter):
1228    filename = input_api.os_path.basename(f.LocalPath())
1229    if filename == 'DEPS':
1230      virtual_depended_on_files.update(_CalculateAddedDeps(
1231          input_api.os_path,
1232          '\n'.join(f.OldContents()),
1233          '\n'.join(f.NewContents())))
1234
1235  if not virtual_depended_on_files:
1236    return []
1237
1238  if input_api.is_committing:
1239    if input_api.tbr:
1240      return [output_api.PresubmitNotifyResult(
1241          '--tbr was specified, skipping OWNERS check for DEPS additions')]
1242    if input_api.dry_run:
1243      return [output_api.PresubmitNotifyResult(
1244          'This is a dry run, skipping OWNERS check for DEPS additions')]
1245    if not input_api.change.issue:
1246      return [output_api.PresubmitError(
1247          "DEPS approval by OWNERS check failed: this change has "
1248          "no change number, so we can't check it for approvals.")]
1249    output = output_api.PresubmitError
1250  else:
1251    output = output_api.PresubmitNotifyResult
1252
1253  owners_db = input_api.owners_db
1254  owner_email, reviewers = (
1255      input_api.canned_checks.GetCodereviewOwnerAndReviewers(
1256        input_api,
1257        owners_db.email_regexp,
1258        approval_needed=input_api.is_committing))
1259
1260  owner_email = owner_email or input_api.change.author_email
1261
1262  reviewers_plus_owner = set(reviewers)
1263  if owner_email:
1264    reviewers_plus_owner.add(owner_email)
1265  missing_files = owners_db.files_not_covered_by(virtual_depended_on_files,
1266                                                 reviewers_plus_owner)
1267
1268  # We strip the /DEPS part that was added by
1269  # _FilesToCheckForIncomingDeps to fake a path to a file in a
1270  # directory.
1271  def StripDeps(path):
1272    start_deps = path.rfind('/DEPS')
1273    if start_deps != -1:
1274      return path[:start_deps]
1275    else:
1276      return path
1277  unapproved_dependencies = ["'+%s'," % StripDeps(path)
1278                             for path in missing_files]
1279
1280  if unapproved_dependencies:
1281    output_list = [
1282      output('You need LGTM from owners of depends-on paths in DEPS that were '
1283             'modified in this CL:\n    %s' %
1284                 '\n    '.join(sorted(unapproved_dependencies)))]
1285    suggested_owners = owners_db.reviewers_for(missing_files, owner_email)
1286    output_list.append(output(
1287        'Suggested missing target path OWNERS:\n    %s' %
1288            '\n    '.join(suggested_owners or [])))
1289    return output_list
1290
1291  return []
1292