• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright 2014 The Chromium Authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5
6"""Adaptor script called through build/isolate.gypi.
7
8Creates a wrapping .isolate which 'includes' the original one, that can be
9consumed by tools/swarming_client/isolate.py. Path variables are determined
10based on the current working directory. The relative_cwd in the .isolated file
11is determined based on the .isolate file that declare the 'command' variable to
12be used so the wrapping .isolate doesn't affect this value.
13
14This script loads build.ninja and processes it to determine all the executables
15referenced by the isolated target. It adds them in the wrapping .isolate file.
16
17WARNING: The target to use for build.ninja analysis is the base name of the
18.isolate file plus '_run'. For example, 'foo_test.isolate' would have the target
19'foo_test_run' analysed.
20"""
21
22import StringIO
23import glob
24import logging
25import os
26import posixpath
27import subprocess
28import sys
29import time
30
31TOOLS_DIR = os.path.dirname(os.path.abspath(__file__))
32SWARMING_CLIENT_DIR = os.path.join(TOOLS_DIR, 'swarming_client')
33SRC_DIR = os.path.dirname(TOOLS_DIR)
34
35sys.path.insert(0, SWARMING_CLIENT_DIR)
36
37import isolate_format
38
39
40def load_ninja_recursively(build_dir, ninja_path, build_steps):
41  """Crudely extracts all the subninja and build referenced in ninja_path.
42
43  In particular, it ignores rule and variable declarations. The goal is to be
44  performant (well, as much as python can be performant) which is currently in
45  the <200ms range for a complete chromium tree. As such the code is laid out
46  for performance instead of readability.
47  """
48  logging.debug('Loading %s', ninja_path)
49  try:
50    with open(os.path.join(build_dir, ninja_path), 'rb') as f:
51      line = None
52      merge_line = ''
53      subninja = []
54      for line in f:
55        line = line.rstrip()
56        if not line:
57          continue
58
59        if line[-1] == '$':
60          # The next line needs to be merged in.
61          merge_line += line[:-1]
62          continue
63
64        if merge_line:
65          line = merge_line + line
66          merge_line = ''
67
68        statement = line[:line.find(' ')]
69        if statement == 'build':
70          # Save the dependency list as a raw string. Only the lines needed will
71          # be processed with raw_build_to_deps(). This saves a good 70ms of
72          # processing time.
73          build_target, dependencies = line[6:].split(': ', 1)
74          # Interestingly, trying to be smart and only saving the build steps
75          # with the intended extensions ('', '.stamp', '.so') slows down
76          # parsing even if 90% of the build rules can be skipped.
77          # On Windows, a single step may generate two target, so split items
78          # accordingly. It has only been seen for .exe/.exe.pdb combos.
79          for i in build_target.strip().split():
80            build_steps[i] = dependencies
81        elif statement == 'subninja':
82          subninja.append(line[9:])
83  except IOError:
84    print >> sys.stderr, 'Failed to open %s' % ninja_path
85    raise
86
87  total = 1
88  for rel_path in subninja:
89    try:
90      # Load each of the files referenced.
91      # TODO(maruel): Skip the files known to not be needed. It saves an aweful
92      # lot of processing time.
93      total += load_ninja_recursively(build_dir, rel_path, build_steps)
94    except IOError:
95      print >> sys.stderr, '... as referenced by %s' % ninja_path
96      raise
97  return total
98
99
100def load_ninja(build_dir):
101  """Loads the tree of .ninja files in build_dir."""
102  build_steps = {}
103  total = load_ninja_recursively(build_dir, 'build.ninja', build_steps)
104  logging.info('Loaded %d ninja files, %d build steps', total, len(build_steps))
105  return build_steps
106
107
108def using_blacklist(item):
109  """Returns True if an item should be analyzed.
110
111  Ignores many rules that are assumed to not depend on a dynamic library. If
112  the assumption doesn't hold true anymore for a file format, remove it from
113  this list. This is simply an optimization.
114  """
115  IGNORED = (
116    '.a', '.cc', '.css', '.def', '.frag', '.h', '.html', '.js', '.json',
117    '.manifest', '.o', '.obj', '.pak', '.png', '.pdb', '.strings', '.test',
118    '.txt', '.vert',
119  )
120  # ninja files use native path format.
121  ext = os.path.splitext(item)[1]
122  if ext in IGNORED:
123    return False
124  # Special case Windows, keep .dll.lib but discard .lib.
125  if item.endswith('.dll.lib'):
126    return True
127  if ext == '.lib':
128    return False
129  return item not in ('', '|', '||')
130
131
132def raw_build_to_deps(item):
133  """Converts a raw ninja build statement into the list of interesting
134  dependencies.
135  """
136  # TODO(maruel): Use a whitelist instead? .stamp, .so.TOC, .dylib.TOC,
137  # .dll.lib, .exe and empty.
138  # The first item is the build rule, e.g. 'link', 'cxx', 'phony', etc.
139  return filter(using_blacklist, item.split(' ')[1:])
140
141
142def collect_deps(target, build_steps, dependencies_added, rules_seen):
143  """Recursively adds all the interesting dependencies for |target|
144  into |dependencies_added|.
145  """
146  if rules_seen is None:
147    rules_seen = set()
148  if target in rules_seen:
149    # TODO(maruel): Figure out how it happens.
150    logging.warning('Circular dependency for %s!', target)
151    return
152  rules_seen.add(target)
153  try:
154    dependencies = raw_build_to_deps(build_steps[target])
155  except KeyError:
156    logging.info('Failed to find a build step to generate: %s', target)
157    return
158  logging.debug('collect_deps(%s) -> %s', target, dependencies)
159  for dependency in dependencies:
160    dependencies_added.add(dependency)
161    collect_deps(dependency, build_steps, dependencies_added, rules_seen)
162
163
164def post_process_deps(build_dir, dependencies):
165  """Processes the dependency list with OS specific rules."""
166  def filter_item(i):
167    if i.endswith('.so.TOC'):
168      # Remove only the suffix .TOC, not the .so!
169      return i[:-4]
170    if i.endswith('.dylib.TOC'):
171      # Remove only the suffix .TOC, not the .dylib!
172      return i[:-4]
173    if i.endswith('.dll.lib'):
174      # Remove only the suffix .lib, not the .dll!
175      return i[:-4]
176    return i
177
178  # Check for execute access. This gets rid of all the phony rules.
179  return [
180    i for i in map(filter_item, dependencies)
181    if os.access(os.path.join(build_dir, i), os.X_OK)
182  ]
183
184
185def create_wrapper(args, isolate_index, isolated_index):
186  """Creates a wrapper .isolate that add dynamic libs.
187
188  The original .isolate is not modified.
189  """
190  cwd = os.getcwd()
191  isolate = args[isolate_index]
192  # The code assumes the .isolate file is always specified path-less in cwd. Fix
193  # if this assumption doesn't hold true.
194  assert os.path.basename(isolate) == isolate, isolate
195
196  # This will look like ../out/Debug. This is based against cwd. Note that this
197  # must equal the value provided as PRODUCT_DIR.
198  build_dir = os.path.dirname(args[isolated_index])
199
200  # This will look like chrome/unit_tests.isolate. It is based against SRC_DIR.
201  # It's used to calculate temp_isolate.
202  src_isolate = os.path.relpath(os.path.join(cwd, isolate), SRC_DIR)
203
204  # The wrapping .isolate. This will look like
205  # ../out/Debug/gen/chrome/unit_tests.isolate.
206  temp_isolate = os.path.join(build_dir, 'gen', src_isolate)
207  temp_isolate_dir = os.path.dirname(temp_isolate)
208
209  # Relative path between the new and old .isolate file.
210  isolate_relpath = os.path.relpath(
211      '.', temp_isolate_dir).replace(os.path.sep, '/')
212
213  # It's a big assumption here that the name of the isolate file matches the
214  # primary target '_run'. Fix accordingly if this doesn't hold true, e.g.
215  # complain to maruel@.
216  target = isolate[:-len('.isolate')] + '_run'
217  build_steps = load_ninja(build_dir)
218  binary_deps = set()
219  collect_deps(target, build_steps, binary_deps, None)
220  binary_deps = post_process_deps(build_dir, binary_deps)
221  logging.debug(
222      'Binary dependencies:%s', ''.join('\n  ' + i for i in binary_deps))
223
224  # Now do actual wrapping .isolate.
225  isolate_dict = {
226    'includes': [
227      posixpath.join(isolate_relpath, isolate),
228    ],
229    'variables': {
230      # Will look like ['<(PRODUCT_DIR)/lib/flibuser_prefs.so'].
231      isolate_format.KEY_TRACKED: sorted(
232          '<(PRODUCT_DIR)/%s' % i.replace(os.path.sep, '/')
233          for i in binary_deps),
234    },
235  }
236  if not os.path.isdir(temp_isolate_dir):
237    os.makedirs(temp_isolate_dir)
238  comment = (
239      '# Warning: this file was AUTOGENERATED.\n'
240      '# DO NO EDIT.\n')
241  out = StringIO.StringIO()
242  isolate_format.print_all(comment, isolate_dict, out)
243  isolate_content = out.getvalue()
244  with open(temp_isolate, 'wb') as f:
245    f.write(isolate_content)
246  logging.info('Added %d dynamic libs', len(binary_deps))
247  logging.debug('%s', isolate_content)
248  args[isolate_index] = temp_isolate
249
250
251def main():
252  logging.basicConfig(level=logging.ERROR, format='%(levelname)7s %(message)s')
253  args = sys.argv[1:]
254  isolate = None
255  isolated = None
256  is_component = False
257  for i, arg in enumerate(args):
258    if arg == '--isolate':
259      isolate = i + 1
260    if arg == '--isolated':
261      isolated = i + 1
262    if arg == 'component=shared_library':
263      is_component = True
264  if isolate is None or isolated is None:
265    print >> sys.stderr, 'Internal failure'
266    return 1
267
268  if is_component:
269    create_wrapper(args, isolate, isolated)
270
271  swarming_client = os.path.join(SRC_DIR, 'tools', 'swarming_client')
272  sys.stdout.flush()
273  result = subprocess.call(
274      [sys.executable, os.path.join(swarming_client, 'isolate.py')] + args)
275  return result
276
277
278if __name__ == '__main__':
279  sys.exit(main())
280