• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2#
3# Copyright 2012 the V8 project authors. All rights reserved.
4# Redistribution and use in source and binary forms, with or without
5# modification, are permitted provided that the following conditions are
6# met:
7#
8#     * Redistributions of source code must retain the above copyright
9#       notice, this list of conditions and the following disclaimer.
10#     * Redistributions in binary form must reproduce the above
11#       copyright notice, this list of conditions and the following
12#       disclaimer in the documentation and/or other materials provided
13#       with the distribution.
14#     * Neither the name of Google Inc. nor the names of its
15#       contributors may be used to endorse or promote products derived
16#       from this software without specific prior written permission.
17#
18# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30try:
31  import hashlib
32  md5er = hashlib.md5
33except ImportError, e:
34  import md5
35  md5er = md5.new
36
37
38import json
39import optparse
40import os
41from os.path import abspath, join, dirname, basename, exists
42import pickle
43import re
44import sys
45import subprocess
46import multiprocessing
47from subprocess import PIPE
48
49from testrunner.local import statusfile
50from testrunner.local import testsuite
51from testrunner.local import utils
52
53# Special LINT rules diverging from default and reason.
54# build/header_guard: Our guards have the form "V8_FOO_H_", not "SRC_FOO_H_".
55#   We now run our own header guard check in PRESUBMIT.py.
56# build/include_what_you_use: Started giving false positives for variables
57#   named "string" and "map" assuming that you needed to include STL headers.
58
59LINT_RULES = """
60-build/header_guard
61-build/include_what_you_use
62-readability/fn_size
63-readability/multiline_comment
64-runtime/references
65-whitespace/comments
66""".split()
67
68LINT_OUTPUT_PATTERN = re.compile(r'^.+[:(]\d+[:)]|^Done processing')
69FLAGS_LINE = re.compile("//\s*Flags:.*--([A-z0-9-])+_[A-z0-9].*\n")
70ASSERT_OPTIMIZED_PATTERN = re.compile("assertOptimized")
71FLAGS_ENABLE_OPT = re.compile("//\s*Flags:.*--opt[^-].*\n")
72ASSERT_UNOPTIMIZED_PATTERN = re.compile("assertUnoptimized")
73FLAGS_NO_ALWAYS_OPT = re.compile("//\s*Flags:.*--no-?always-opt.*\n")
74
75TOOLS_PATH = dirname(abspath(__file__))
76
77def CppLintWorker(command):
78  try:
79    process = subprocess.Popen(command, stderr=subprocess.PIPE)
80    process.wait()
81    out_lines = ""
82    error_count = -1
83    while True:
84      out_line = process.stderr.readline()
85      if out_line == '' and process.poll() != None:
86        if error_count == -1:
87          print "Failed to process %s" % command.pop()
88          return 1
89        break
90      m = LINT_OUTPUT_PATTERN.match(out_line)
91      if m:
92        out_lines += out_line
93        error_count += 1
94    sys.stdout.write(out_lines)
95    return error_count
96  except KeyboardInterrupt:
97    process.kill()
98  except:
99    print('Error running cpplint.py. Please make sure you have depot_tools' +
100          ' in your $PATH. Lint check skipped.')
101    process.kill()
102
103
104class FileContentsCache(object):
105
106  def __init__(self, sums_file_name):
107    self.sums = {}
108    self.sums_file_name = sums_file_name
109
110  def Load(self):
111    try:
112      sums_file = None
113      try:
114        sums_file = open(self.sums_file_name, 'r')
115        self.sums = pickle.load(sums_file)
116      except:
117        # Cannot parse pickle for any reason. Not much we can do about it.
118        pass
119    finally:
120      if sums_file:
121        sums_file.close()
122
123  def Save(self):
124    try:
125      sums_file = open(self.sums_file_name, 'w')
126      pickle.dump(self.sums, sums_file)
127    except:
128      # Failed to write pickle. Try to clean-up behind us.
129      if sums_file:
130        sums_file.close()
131      try:
132        os.unlink(self.sums_file_name)
133      except:
134        pass
135    finally:
136      sums_file.close()
137
138  def FilterUnchangedFiles(self, files):
139    changed_or_new = []
140    for file in files:
141      try:
142        handle = open(file, "r")
143        file_sum = md5er(handle.read()).digest()
144        if not file in self.sums or self.sums[file] != file_sum:
145          changed_or_new.append(file)
146          self.sums[file] = file_sum
147      finally:
148        handle.close()
149    return changed_or_new
150
151  def RemoveFile(self, file):
152    if file in self.sums:
153      self.sums.pop(file)
154
155
156class SourceFileProcessor(object):
157  """
158  Utility class that can run through a directory structure, find all relevant
159  files and invoke a custom check on the files.
160  """
161
162  def RunOnPath(self, path):
163    """Runs processor on all files under the given path."""
164
165    all_files = []
166    for file in self.GetPathsToSearch():
167      all_files += self.FindFilesIn(join(path, file))
168    return self.ProcessFiles(all_files)
169
170  def RunOnFiles(self, files):
171    """Runs processor only on affected files."""
172
173    # Helper for getting directory pieces.
174    dirs = lambda f: dirname(f).split(os.sep)
175
176    # Path offsets where to look (to be in sync with RunOnPath).
177    # Normalize '.' to check for it with str.startswith.
178    search_paths = [('' if p == '.' else p) for p in self.GetPathsToSearch()]
179
180    all_files = [
181      f.AbsoluteLocalPath()
182      for f in files
183      if (not self.IgnoreFile(f.LocalPath()) and
184          self.IsRelevant(f.LocalPath()) and
185          all(not self.IgnoreDir(d) for d in dirs(f.LocalPath())) and
186          any(map(f.LocalPath().startswith, search_paths)))
187    ]
188
189    return self.ProcessFiles(all_files)
190
191  def IgnoreDir(self, name):
192    return (name.startswith('.') or
193            name in ('buildtools', 'data', 'gmock', 'gtest', 'kraken',
194                     'octane', 'sunspider', 'traces-arm64'))
195
196  def IgnoreFile(self, name):
197    return name.startswith('.')
198
199  def FindFilesIn(self, path):
200    result = []
201    for (root, dirs, files) in os.walk(path):
202      for ignored in [x for x in dirs if self.IgnoreDir(x)]:
203        dirs.remove(ignored)
204      for file in files:
205        if not self.IgnoreFile(file) and self.IsRelevant(file):
206          result.append(join(root, file))
207    return result
208
209
210class CppLintProcessor(SourceFileProcessor):
211  """
212  Lint files to check that they follow the google code style.
213  """
214
215  def IsRelevant(self, name):
216    return name.endswith('.cc') or name.endswith('.h')
217
218  def IgnoreDir(self, name):
219    return (super(CppLintProcessor, self).IgnoreDir(name)
220              or (name == 'third_party'))
221
222  IGNORE_LINT = ['export-template.h', 'flag-definitions.h']
223
224  def IgnoreFile(self, name):
225    return (super(CppLintProcessor, self).IgnoreFile(name)
226              or (name in CppLintProcessor.IGNORE_LINT))
227
228  def GetPathsToSearch(self):
229    dirs = ['include', 'samples', 'src']
230    test_dirs = ['cctest', 'common', 'fuzzer', 'inspector', 'unittests']
231    return dirs + [join('test', dir) for dir in test_dirs]
232
233  def GetCpplintScript(self, prio_path):
234    for path in [prio_path] + os.environ["PATH"].split(os.pathsep):
235      path = path.strip('"')
236      cpplint = os.path.join(path, "cpplint.py")
237      if os.path.isfile(cpplint):
238        return cpplint
239
240    return None
241
242  def ProcessFiles(self, files):
243    good_files_cache = FileContentsCache('.cpplint-cache')
244    good_files_cache.Load()
245    files = good_files_cache.FilterUnchangedFiles(files)
246    if len(files) == 0:
247      print 'No changes in files detected. Skipping cpplint check.'
248      return True
249
250    filters = ",".join([n for n in LINT_RULES])
251    cpplint = self.GetCpplintScript(TOOLS_PATH)
252    if cpplint is None:
253      print('Could not find cpplint.py. Make sure '
254            'depot_tools is installed and in the path.')
255      sys.exit(1)
256
257    command = [sys.executable, cpplint, '--filter', filters]
258
259    commands = [command + [file] for file in files]
260    count = multiprocessing.cpu_count()
261    pool = multiprocessing.Pool(count)
262    try:
263      results = pool.map_async(CppLintWorker, commands).get(999999)
264    except KeyboardInterrupt:
265      print "\nCaught KeyboardInterrupt, terminating workers."
266      sys.exit(1)
267
268    for i in range(len(files)):
269      if results[i] > 0:
270        good_files_cache.RemoveFile(files[i])
271
272    total_errors = sum(results)
273    print "Total errors found: %d" % total_errors
274    good_files_cache.Save()
275    return total_errors == 0
276
277
278COPYRIGHT_HEADER_PATTERN = re.compile(
279    r'Copyright [\d-]*20[0-1][0-9] the V8 project authors. All rights reserved.')
280
281class SourceProcessor(SourceFileProcessor):
282  """
283  Check that all files include a copyright notice and no trailing whitespaces.
284  """
285
286  RELEVANT_EXTENSIONS = ['.js', '.cc', '.h', '.py', '.c', '.status', '.tq', '.g4']
287
288  def __init__(self):
289    self.runtime_function_call_pattern = self.CreateRuntimeFunctionCallMatcher()
290
291  def CreateRuntimeFunctionCallMatcher(self):
292    runtime_h_path = join(dirname(TOOLS_PATH), 'src/runtime/runtime.h')
293    pattern = re.compile(r'\s+F\(([^,]*),.*\)')
294    runtime_functions = []
295    with open(runtime_h_path) as f:
296      for line in f.readlines():
297        m = pattern.match(line)
298        if m:
299          runtime_functions.append(m.group(1))
300    if len(runtime_functions) < 450:
301      print ("Runtime functions list is suspiciously short. "
302             "Consider updating the presubmit script.")
303      sys.exit(1)
304    str = '(\%\s+(' + '|'.join(runtime_functions) + '))[\s\(]'
305    return re.compile(str)
306
307  # Overwriting the one in the parent class.
308  def FindFilesIn(self, path):
309    if os.path.exists(path+'/.git'):
310      output = subprocess.Popen('git ls-files --full-name',
311                                stdout=PIPE, cwd=path, shell=True)
312      result = []
313      for file in output.stdout.read().split():
314        for dir_part in os.path.dirname(file).replace(os.sep, '/').split('/'):
315          if self.IgnoreDir(dir_part):
316            break
317        else:
318          if (self.IsRelevant(file) and os.path.exists(file)
319              and not self.IgnoreFile(file)):
320            result.append(join(path, file))
321      if output.wait() == 0:
322        return result
323    return super(SourceProcessor, self).FindFilesIn(path)
324
325  def IsRelevant(self, name):
326    for ext in SourceProcessor.RELEVANT_EXTENSIONS:
327      if name.endswith(ext):
328        return True
329    return False
330
331  def GetPathsToSearch(self):
332    return ['.']
333
334  def IgnoreDir(self, name):
335    return (super(SourceProcessor, self).IgnoreDir(name) or
336            name in ('third_party', 'out', 'obj', 'DerivedSources'))
337
338  IGNORE_COPYRIGHTS = ['box2d.js',
339                       'cpplint.py',
340                       'check_injected_script_source.py',
341                       'copy.js',
342                       'corrections.js',
343                       'crypto.js',
344                       'daemon.py',
345                       'debugger-script.js',
346                       'earley-boyer.js',
347                       'fannkuch.js',
348                       'fasta.js',
349                       'generate_protocol_externs.py',
350                       'injected-script.cc',
351                       'injected-script.h',
352                       'injected-script-source.js',
353                       'java-script-call-frame.cc',
354                       'java-script-call-frame.h',
355                       'jsmin.py',
356                       'libraries.cc',
357                       'libraries-empty.cc',
358                       'lua_binarytrees.js',
359                       'meta-123.js',
360                       'memops.js',
361                       'poppler.js',
362                       'primes.js',
363                       'raytrace.js',
364                       'regexp-pcre.js',
365                       'resources-123.js',
366                       'rjsmin.py',
367                       'sqlite.js',
368                       'sqlite-change-heap.js',
369                       'sqlite-pointer-masking.js',
370                       'sqlite-safe-heap.js',
371                       'v8-debugger-script.h',
372                       'v8-function-call.cc',
373                       'v8-function-call.h',
374                       'v8-inspector-impl.cc',
375                       'v8-inspector-impl.h',
376                       'v8-runtime-agent-impl.cc',
377                       'v8-runtime-agent-impl.h',
378                       'gnuplot-4.6.3-emscripten.js',
379                       'zlib.js']
380  IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js', 'html-comments.js']
381
382  IGNORE_COPYRIGHTS_DIRECTORY = "test/test262/local-tests"
383
384  def EndOfDeclaration(self, line):
385    return line == "}" or line == "};"
386
387  def StartOfDeclaration(self, line):
388    return line.find("//") == 0 or \
389           line.find("/*") == 0 or \
390           line.find(") {") != -1
391
392  def ProcessContents(self, name, contents):
393    result = True
394    base = basename(name)
395    if not base in SourceProcessor.IGNORE_TABS:
396      if '\t' in contents:
397        print "%s contains tabs" % name
398        result = False
399    if not base in SourceProcessor.IGNORE_COPYRIGHTS and \
400        not SourceProcessor.IGNORE_COPYRIGHTS_DIRECTORY in name:
401      if not COPYRIGHT_HEADER_PATTERN.search(contents):
402        print "%s is missing a correct copyright header." % name
403        result = False
404    if ' \n' in contents or contents.endswith(' '):
405      line = 0
406      lines = []
407      parts = contents.split(' \n')
408      if not contents.endswith(' '):
409        parts.pop()
410      for part in parts:
411        line += part.count('\n') + 1
412        lines.append(str(line))
413      linenumbers = ', '.join(lines)
414      if len(lines) > 1:
415        print "%s has trailing whitespaces in lines %s." % (name, linenumbers)
416      else:
417        print "%s has trailing whitespaces in line %s." % (name, linenumbers)
418      result = False
419    if not contents.endswith('\n') or contents.endswith('\n\n'):
420      print "%s does not end with a single new line." % name
421      result = False
422    # Sanitize flags for fuzzer.
423    if "mjsunit" in name or "debugger" in name:
424      match = FLAGS_LINE.search(contents)
425      if match:
426        print "%s Flags should use '-' (not '_')" % name
427        result = False
428      if not "mjsunit/mjsunit.js" in name:
429        if ASSERT_OPTIMIZED_PATTERN.search(contents) and \
430            not FLAGS_ENABLE_OPT.search(contents):
431          print "%s Flag --opt should be set if " \
432                "assertOptimized() is used" % name
433          result = False
434        if ASSERT_UNOPTIMIZED_PATTERN.search(contents) and \
435            not FLAGS_NO_ALWAYS_OPT.search(contents):
436          print "%s Flag --no-always-opt should be set if " \
437                "assertUnoptimized() is used" % name
438          result = False
439
440      match = self.runtime_function_call_pattern.search(contents)
441      if match:
442        print "%s has unexpected spaces in a runtime call '%s'" % (name, match.group(1))
443        result = False
444    return result
445
446  def ProcessFiles(self, files):
447    success = True
448    violations = 0
449    for file in files:
450      try:
451        handle = open(file)
452        contents = handle.read()
453        if not self.ProcessContents(file, contents):
454          success = False
455          violations += 1
456      finally:
457        handle.close()
458    print "Total violating files: %s" % violations
459    return success
460
461def _CheckStatusFileForDuplicateKeys(filepath):
462  comma_space_bracket = re.compile(", *]")
463  lines = []
464  with open(filepath) as f:
465    for line in f.readlines():
466      # Skip all-comment lines.
467      if line.lstrip().startswith("#"): continue
468      # Strip away comments at the end of the line.
469      comment_start = line.find("#")
470      if comment_start != -1:
471        line = line[:comment_start]
472      line = line.strip()
473      # Strip away trailing commas within the line.
474      line = comma_space_bracket.sub("]", line)
475      if len(line) > 0:
476        lines.append(line)
477
478  # Strip away trailing commas at line ends. Ugh.
479  for i in range(len(lines) - 1):
480    if (lines[i].endswith(",") and len(lines[i + 1]) > 0 and
481        lines[i + 1][0] in ("}", "]")):
482      lines[i] = lines[i][:-1]
483
484  contents = "\n".join(lines)
485  # JSON wants double-quotes.
486  contents = contents.replace("'", '"')
487  # Fill in keywords (like PASS, SKIP).
488  for key in statusfile.KEYWORDS:
489    contents = re.sub(r"\b%s\b" % key, "\"%s\"" % key, contents)
490
491  status = {"success": True}
492  def check_pairs(pairs):
493    keys = {}
494    for key, value in pairs:
495      if key in keys:
496        print("%s: Error: duplicate key %s" % (filepath, key))
497        status["success"] = False
498      keys[key] = True
499
500  json.loads(contents, object_pairs_hook=check_pairs)
501  return status["success"]
502
503
504class StatusFilesProcessor(SourceFileProcessor):
505  """Checks status files for incorrect syntax and duplicate keys."""
506
507  def IsRelevant(self, name):
508    # Several changes to files under the test directories could impact status
509    # files.
510    return True
511
512  def GetPathsToSearch(self):
513    return ['test', 'tools/testrunner']
514
515  def ProcessFiles(self, files):
516    success = True
517    for status_file_path in sorted(self._GetStatusFiles(files)):
518      success &= statusfile.PresubmitCheck(status_file_path)
519      success &= _CheckStatusFileForDuplicateKeys(status_file_path)
520    return success
521
522  def _GetStatusFiles(self, files):
523    test_path = join(dirname(TOOLS_PATH), 'test')
524    testrunner_path = join(TOOLS_PATH, 'testrunner')
525    status_files = set()
526
527    for file_path in files:
528      if file_path.startswith(testrunner_path):
529        for suitepath in os.listdir(test_path):
530          suitename = os.path.basename(suitepath)
531          status_file = os.path.join(
532              test_path, suitename, suitename + ".status")
533          if os.path.exists(status_file):
534            status_files.add(status_file)
535        return status_files
536
537    for file_path in files:
538      if file_path.startswith(test_path):
539        # Strip off absolute path prefix pointing to test suites.
540        pieces = file_path[len(test_path):].lstrip(os.sep).split(os.sep)
541        if pieces:
542          # Infer affected status file name. Only care for existing status
543          # files. Some directories under "test" don't have any.
544          if not os.path.isdir(join(test_path, pieces[0])):
545            continue
546          status_file = join(test_path, pieces[0], pieces[0] + ".status")
547          if not os.path.exists(status_file):
548            continue
549          status_files.add(status_file)
550    return status_files
551
552
553def CheckDeps(workspace):
554  checkdeps_py = join(workspace, 'buildtools', 'checkdeps', 'checkdeps.py')
555  return subprocess.call([sys.executable, checkdeps_py, workspace]) == 0
556
557
558def PyTests(workspace):
559  result = True
560  for script in [
561      join(workspace, 'tools', 'release', 'test_scripts.py'),
562      join(workspace, 'tools', 'unittests', 'run_tests_test.py'),
563    ]:
564    print 'Running ' + script
565    result &= subprocess.call(
566        [sys.executable, script], stdout=subprocess.PIPE) == 0
567  return result
568
569
570def GetOptions():
571  result = optparse.OptionParser()
572  result.add_option('--no-lint', help="Do not run cpplint", default=False,
573                    action="store_true")
574  return result
575
576
577def Main():
578  workspace = abspath(join(dirname(sys.argv[0]), '..'))
579  parser = GetOptions()
580  (options, args) = parser.parse_args()
581  success = True
582  print "Running checkdeps..."
583  success &= CheckDeps(workspace)
584  if not options.no_lint:
585    print "Running C++ lint check..."
586    success &= CppLintProcessor().RunOnPath(workspace)
587  print "Running copyright header, trailing whitespaces and " \
588        "two empty lines between declarations check..."
589  success &= SourceProcessor().RunOnPath(workspace)
590  print "Running status-files check..."
591  success &= StatusFilesProcessor().RunOnPath(workspace)
592  print "Running python tests..."
593  success &= PyTests(workspace)
594  if success:
595    return 0
596  else:
597    return 1
598
599
600if __name__ == '__main__':
601  sys.exit(Main())
602