• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python
2# Copyright (C) 2019 The Android Open Source Project
3#
4# Licensed under the Apache License, Version 2.0 (the "License");
5# you may not use this file except in compliance with the License.
6# You may obtain a copy of the License at
7#
8#      http://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS,
12# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13# See the License for the specific language governing permissions and
14# limitations under the License.
15
16# This tool uses a collection of BUILD.gn files and build targets to generate
17# an "amalgamated" C++ header and source file pair which compiles to an
18# equivalent program. The tool also outputs the necessary compiler and linker
19# flags needed to compile the resulting source code.
20
21from __future__ import print_function
22import argparse
23import os
24import re
25import shutil
26import subprocess
27import sys
28import tempfile
29
30import gn_utils
31
32# Default targets to include in the result.
33# TODO(primiano): change this script to recurse into target deps when generating
34# headers, but only for proto targets. .pbzero.h files don't include each other
35# and we need to list targets here individually, which is unmaintainable.
36default_targets = [
37    '//:libperfetto_client_experimental',
38    '//include/perfetto/protozero:protozero',
39    '//protos/perfetto/config:zero',
40    '//protos/perfetto/trace:zero',
41]
42
43# Arguments for the GN output directory (unless overridden from the command
44# line).
45gn_args = ' '.join([
46    'is_debug=false',
47    'is_perfetto_build_generator=true',
48    'is_perfetto_embedder=true',
49    'use_custom_libcxx=false',
50    'enable_perfetto_ipc=true',
51])
52
53# By default, the amalgamated .h only recurses in #includes but not in the
54# target deps. In the case of protos we want to follow deps even in lieu of
55# direct #includes. This is because, by design, protozero headers don't
56# inlcude each other but rely on forward declarations. The alternative would
57# be adding each proto sub-target individually (e.g. //proto/trace/gpu:zero),
58# but doing that is unmaintainable.
59recurse_in_header_deps = '^//protos/.*zero$'
60
61# Compiler flags which aren't filtered out.
62cflag_whitelist = r'^-(W.*|fno-exceptions|fPIC|std.*|fvisibility.*)$'
63
64# Linker flags which aren't filtered out.
65ldflag_whitelist = r'^-()$'
66
67# Libraries which are filtered out.
68lib_blacklist = r'^(c|gcc_eh)$'
69
70# Macros which aren't filtered out.
71define_whitelist = r'^(PERFETTO.*|GOOGLE_PROTOBUF.*)$'
72
73# Includes which will be removed from the generated source.
74includes_to_remove = r'^(gtest).*$'
75
76default_cflags = [
77    # Since we're expanding header files into the generated source file, some
78    # constant may remain unused.
79    '-Wno-unused-const-variable'
80]
81
82# Build flags to satisfy a protobuf (lite or full) dependency.
83protobuf_cflags = [
84    # Note that these point to the local copy of protobuf in buildtools. In
85    # reality the user of the amalgamated result will have to provide a path to
86    # an installed copy of the exact same version of protobuf which was used to
87    # generate the amalgamated build.
88    '-isystembuildtools/protobuf/src',
89    '-Lbuildtools/protobuf/src/.libs',
90    # We also need to disable some warnings for protobuf.
91    '-Wno-missing-prototypes',
92    '-Wno-missing-variable-declarations',
93    '-Wno-sign-conversion',
94    '-Wno-unknown-pragmas',
95    '-Wno-unused-macros',
96]
97
98# A mapping of dependencies to system libraries. Libraries in this map will not
99# be built statically but instead added as dependencies of the amalgamated
100# project.
101system_library_map = {
102    '//buildtools:protobuf_full': {
103        'libs': ['protobuf'],
104        'cflags': protobuf_cflags,
105    },
106    '//buildtools:protobuf_lite': {
107        'libs': ['protobuf-lite'],
108        'cflags': protobuf_cflags,
109    },
110    '//buildtools:protoc_lib': {
111        'libs': ['protoc']
112    },
113}
114
115# ----------------------------------------------------------------------------
116# End of configuration.
117# ----------------------------------------------------------------------------
118
119tool_name = os.path.basename(__file__)
120project_root = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
121preamble = """// Copyright (C) 2019 The Android Open Source Project
122//
123// Licensed under the Apache License, Version 2.0 (the "License");
124// you may not use this file except in compliance with the License.
125// You may obtain a copy of the License at
126//
127//      http://www.apache.org/licenses/LICENSE-2.0
128//
129// Unless required by applicable law or agreed to in writing, software
130// distributed under the License is distributed on an "AS IS" BASIS,
131// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
132// See the License for the specific language governing permissions and
133// limitations under the License.
134//
135// This file is automatically generated by %s. Do not edit.
136""" % tool_name
137
138
139def apply_blacklist(blacklist, items):
140  return [item for item in items if not re.match(blacklist, item)]
141
142
143def apply_whitelist(whitelist, items):
144  return [item for item in items if re.match(whitelist, item)]
145
146
147def normalize_path(path):
148  path = os.path.relpath(path, project_root)
149  path = re.sub(r'^out/[^/]+/', '', path)
150  return path
151
152
153class Error(Exception):
154  pass
155
156
157class DependencyNode(object):
158  """A target in a GN build description along with its dependencies."""
159
160  def __init__(self, target_name):
161    self.target_name = target_name
162    self.dependencies = set()
163
164  def add_dependency(self, target_node):
165    if target_node in self.dependencies:
166      return
167    self.dependencies.add(target_node)
168
169  def iterate_depth_first(self):
170    for node in sorted(self.dependencies, key=lambda n: n.target_name):
171      for node in node.iterate_depth_first():
172        yield node
173    if self.target_name:
174      yield self
175
176
177class DependencyTree(object):
178  """A tree of GN build target dependencies."""
179
180  def __init__(self):
181    self.target_to_node_map = {}
182    self.root = self._get_or_create_node(None)
183
184  def _get_or_create_node(self, target_name):
185    if target_name in self.target_to_node_map:
186      return self.target_to_node_map[target_name]
187    node = DependencyNode(target_name)
188    self.target_to_node_map[target_name] = node
189    return node
190
191  def add_dependency(self, from_target, to_target):
192    from_node = self._get_or_create_node(from_target)
193    to_node = self._get_or_create_node(to_target)
194    assert from_node is not to_node
195    from_node.add_dependency(to_node)
196
197  def iterate_depth_first(self):
198    for node in self.root.iterate_depth_first():
199      yield node
200
201
202class AmalgamatedProject(object):
203  """In-memory representation of an amalgamated source/header pair."""
204
205  def __init__(self, desc, source_deps, compute_deps_only=False):
206    """Constructor.
207
208        Args:
209            desc: JSON build description.
210            source_deps: A map of (source file, [dependency header]) which is
211                to detect which header files are included by each source file.
212            compute_deps_only: If True, the project will only be used to compute
213                dependency information. Use |get_source_files()| to retrieve
214                the result.
215        """
216    self.desc = desc
217    self.source_deps = source_deps
218    self.header = []
219    self.source = []
220    self.source_defines = []
221    # Note that we don't support multi-arg flags.
222    self.cflags = set(default_cflags)
223    self.ldflags = set()
224    self.defines = set()
225    self.libs = set()
226    self._dependency_tree = DependencyTree()
227    self._processed_sources = set()
228    self._processed_headers = set()
229    self._processed_header_deps = set()
230    self._processed_source_headers = set()  # Header files included from .cc
231    self._include_re = re.compile(r'#include "(.*)"')
232    self._compute_deps_only = compute_deps_only
233
234  def add_target(self, target_name):
235    """Include |target_name| in the amalgamated result."""
236    self._dependency_tree.add_dependency(None, target_name)
237    self._add_target_dependencies(target_name)
238    self._add_target_flags(target_name)
239    self._add_target_headers(target_name)
240
241    # Recurse into target deps, but only for protos. This generates headers
242    # for all the .pbzero.h files, even if they don't #include each other.
243    for _, dep in self._iterate_dep_edges(target_name):
244      if (dep not in self._processed_header_deps and
245          re.match(recurse_in_header_deps, dep)):
246        self._processed_header_deps.add(dep)
247        self.add_target(dep)
248
249  def _iterate_dep_edges(self, target_name):
250    target = self.desc[target_name]
251    for dep in target.get('deps', []):
252      # Ignore system libraries since they will be added as build-time
253      # dependencies.
254      if dep in system_library_map:
255        continue
256      # Don't descend into build action dependencies.
257      if self.desc[dep]['type'] == 'action':
258        continue
259      for sub_target, sub_dep in self._iterate_dep_edges(dep):
260        yield sub_target, sub_dep
261      yield target_name, dep
262
263  def _iterate_target_and_deps(self, target_name):
264    yield target_name
265    for _, dep in self._iterate_dep_edges(target_name):
266      yield dep
267
268  def _add_target_dependencies(self, target_name):
269    for target, dep in self._iterate_dep_edges(target_name):
270      self._dependency_tree.add_dependency(target, dep)
271
272    def process_dep(dep):
273      if dep in system_library_map:
274        self.libs.update(system_library_map[dep].get('libs', []))
275        self.cflags.update(system_library_map[dep].get('cflags', []))
276        self.defines.update(system_library_map[dep].get('defines', []))
277        return True
278
279    def walk_all_deps(target_name):
280      target = self.desc[target_name]
281      for dep in target.get('deps', []):
282        if process_dep(dep):
283          return
284        walk_all_deps(dep)
285
286    walk_all_deps(target_name)
287
288  def _filter_cflags(self, cflags):
289    # Since we want to deduplicate flags, combine two-part switches (e.g.,
290    # "-foo bar") into one value ("-foobar") so we can store the result as
291    # a set.
292    result = []
293    for flag in cflags:
294      if flag.startswith('-'):
295        result.append(flag)
296      else:
297        result[-1] += flag
298    return apply_whitelist(cflag_whitelist, result)
299
300  def _add_target_flags(self, target_name):
301    for target_name in self._iterate_target_and_deps(target_name):
302      target = self.desc[target_name]
303      self.cflags.update(self._filter_cflags(target.get('cflags', [])))
304      self.cflags.update(self._filter_cflags(target.get('cflags_cc', [])))
305      self.ldflags.update(
306          apply_whitelist(ldflag_whitelist, target.get('ldflags', [])))
307      self.libs.update(apply_blacklist(lib_blacklist, target.get('libs', [])))
308      self.defines.update(
309          apply_whitelist(define_whitelist, target.get('defines', [])))
310
311  def _add_target_headers(self, target_name):
312    target = self.desc[target_name]
313    if not 'sources' in target:
314      return
315    headers = [
316        gn_utils.label_to_path(s) for s in target['sources'] if s.endswith('.h')
317    ]
318    for header in headers:
319      self._add_header(target_name, header)
320
321  def _get_include_dirs(self, target_name):
322    include_dirs = set()
323    for target_name in self._iterate_target_and_deps(target_name):
324      target = self.desc[target_name]
325      if 'include_dirs' in target:
326        include_dirs.update(
327            [gn_utils.label_to_path(d) for d in target['include_dirs']])
328    return include_dirs
329
330  def _add_source_included_header(self, include_dirs, allowed_files,
331                                  header_name):
332    if header_name in self._processed_source_headers:
333      return
334    self._processed_source_headers.add(header_name)
335    for include_dir in include_dirs:
336      rel_path = os.path.join(include_dir, header_name)
337      full_path = os.path.join(gn_utils.repo_root(), rel_path)
338      if os.path.exists(full_path):
339        if not rel_path in allowed_files:
340          return
341        with open(full_path) as f:
342          self.source.append(
343              '// %s begin header: %s' % (tool_name, normalize_path(full_path)))
344          self.source.extend(
345              self._process_source_includes(include_dirs, allowed_files, f))
346        return
347    if self._compute_deps_only:
348      return
349    msg = 'Looked in %s' % ', '.join('"%s"' % d for d in include_dirs)
350    raise Error('Header file %s not found. %s' % (header_name, msg))
351
352  def _add_source(self, target_name, source_name):
353    if source_name in self._processed_sources:
354      return
355    self._processed_sources.add(source_name)
356    include_dirs = self._get_include_dirs(target_name)
357    deps = self.source_deps[source_name]
358    full_path = os.path.join(gn_utils.repo_root(), source_name)
359    if not os.path.exists(full_path):
360      raise Error('Source file %s not found' % source_name)
361    with open(full_path) as f:
362      self.source.append(
363          '// %s begin source: %s' % (tool_name, normalize_path(full_path)))
364      try:
365        self.source.extend(
366            self._patch_source(
367                source_name, self._process_source_includes(
368                    include_dirs, deps, f)))
369      except Error as e:
370        raise Error('Failed adding source %s: %s' % (source_name, e.message))
371
372  def _add_header_included_header(self, include_dirs, header_name):
373    if header_name in self._processed_headers:
374      return
375    self._processed_headers.add(header_name)
376    for include_dir in include_dirs:
377      full_path = os.path.join(gn_utils.repo_root(), include_dir, header_name)
378      if os.path.exists(full_path):
379        with open(full_path) as f:
380          self.header.append(
381              '// %s begin header: %s' % (tool_name, normalize_path(full_path)))
382          self.header.extend(self._process_header_includes(include_dirs, f))
383        return
384    if self._compute_deps_only:
385      return
386    msg = 'Looked in %s' % ', '.join('"%s"' % d for d in include_dirs)
387    raise Error('Header file %s not found. %s' % (header_name, msg))
388
389  def _add_header(self, target_name, header_name):
390    if header_name in self._processed_headers:
391      return
392    self._processed_headers.add(header_name)
393    include_dirs = self._get_include_dirs(target_name)
394    full_path = os.path.join(gn_utils.repo_root(), header_name)
395    if not os.path.exists(full_path):
396      if self._compute_deps_only:
397        return
398      raise Error('Header file %s not found' % header_name)
399    with open(full_path) as f:
400      self.header.append(
401          '// %s begin header: %s' % (tool_name, normalize_path(full_path)))
402      try:
403        self.header.extend(self._process_header_includes(include_dirs, f))
404      except Error as e:
405        raise Error('Failed adding header %s: %s' % (header_name, e.message))
406
407  def _patch_source(self, source_name, lines):
408    result = []
409    namespace = re.sub(r'[^a-z]', '_',
410                       os.path.splitext(os.path.basename(source_name))[0])
411    for line in lines:
412      # Protobuf generates an identical anonymous function into each
413      # message description. Rename all but the first occurrence to avoid
414      # duplicate symbol definitions.
415      line = line.replace('MergeFromFail', '%s_MergeFromFail' % namespace)
416      result.append(line)
417    return result
418
419  def _process_source_includes(self, include_dirs, allowed_files, file):
420    result = []
421    for line in file:
422      line = line.rstrip('\n')
423      m = self._include_re.match(line)
424      if not m:
425        result.append(line)
426        continue
427      elif re.match(includes_to_remove, m.group(1)):
428        result.append('// %s removed: %s' % (tool_name, line))
429      else:
430        result.append('// %s expanded: %s' % (tool_name, line))
431        self._add_source_included_header(include_dirs, allowed_files,
432                                         m.group(1))
433    return result
434
435  def _process_header_includes(self, include_dirs, file):
436    result = []
437    for line in file:
438      line = line.rstrip('\n')
439      m = self._include_re.match(line)
440      if not m:
441        result.append(line)
442        continue
443      elif re.match(includes_to_remove, m.group(1)):
444        result.append('// %s removed: %s' % (tool_name, line))
445      else:
446        result.append('// %s expanded: %s' % (tool_name, line))
447        self._add_header_included_header(include_dirs, m.group(1))
448    return result
449
450  def generate(self):
451    """Prepares the output for this amalgamated project.
452
453        Call save() to persist the result.
454        """
455    assert not self._compute_deps_only
456    self.source_defines.append('// %s: predefined macros' % tool_name)
457
458    def add_define(name):
459      # Valued macros aren't supported for now.
460      assert '=' not in name
461      self.source_defines.append('#if !defined(%s)' % name)
462      self.source_defines.append('#define %s' % name)
463      self.source_defines.append('#endif')
464
465    for name in self.defines:
466      add_define(name)
467    for target_name, source_name in self.get_source_files():
468      self._add_source(target_name, source_name)
469
470  def get_source_files(self):
471    """Return a list of (target, [source file]) that describes the source
472           files pulled in by each target which is a dependency of this project.
473        """
474    source_files = []
475    for node in self._dependency_tree.iterate_depth_first():
476      target = self.desc[node.target_name]
477      if not 'sources' in target:
478        continue
479      sources = [(node.target_name, gn_utils.label_to_path(s))
480                 for s in target['sources']
481                 if s.endswith('.cc')]
482      source_files.extend(sources)
483    return source_files
484
485  def _get_nice_path(self, prefix, format):
486    basename = os.path.basename(prefix)
487    return os.path.join(
488        os.path.relpath(os.path.dirname(prefix)), format % basename)
489
490  def _make_directories(self, directory):
491    if not os.path.isdir(directory):
492      os.makedirs(directory)
493
494  def save(self, output_prefix):
495    """Save the generated header and source file pair.
496
497        Returns a message describing the output with build instructions.
498        """
499    header_file = self._get_nice_path(output_prefix, '%s.h')
500    source_file = self._get_nice_path(output_prefix, '%s.cc')
501    self._make_directories(os.path.dirname(header_file))
502    self._make_directories(os.path.dirname(source_file))
503    with open(header_file, 'w') as f:
504      f.write('\n'.join([preamble] + self.header + ['\n']))
505    with open(source_file, 'w') as f:
506      include_stmt = '#include "%s"' % os.path.basename(header_file)
507      f.write('\n'.join([preamble] + self.source_defines + [include_stmt] +
508                        self.source + ['\n']))
509    build_cmd = self.get_build_command(output_prefix)
510
511    return """Amalgamated project written to %s and %s.
512
513Build settings:
514 - cflags:    %s
515 - ldflags:   %s
516 - libs:      %s
517
518Example build command:
519
520%s
521""" % (header_file, source_file, ' '.join(self.cflags), ' '.join(self.ldflags),
522       ' '.join(self.libs), ' '.join(build_cmd))
523
524  def get_build_command(self, output_prefix):
525    """Returns an example command line for building the output source."""
526    source = self._get_nice_path(output_prefix, '%s.cc')
527    library = self._get_nice_path(output_prefix, 'lib%s.so')
528    build_cmd = ['clang++', source, '-o', library, '-shared'] + \
529        sorted(self.cflags) + sorted(self.ldflags)
530    for lib in sorted(self.libs):
531      build_cmd.append('-l%s' % lib)
532    return build_cmd
533
534
535def main():
536  parser = argparse.ArgumentParser(
537      description='Generate an amalgamated header/source pair from a GN '
538      'build description.')
539  parser.add_argument(
540      '--out',
541      help='The name of the temporary build folder in \'out\'',
542      default='tmp.gen_amalgamated.%u' % os.getpid())
543  parser.add_argument(
544      '--output',
545      help='Base name of files to create. A .cc/.h extension will be added',
546      default=os.path.join(gn_utils.repo_root(), 'out/amalgamated/perfetto'))
547  parser.add_argument(
548      '--gn_args',
549      help='GN arguments used to prepare the output directory',
550      default=gn_args)
551  parser.add_argument(
552      '--keep',
553      help='Don\'t delete the GN output directory at exit',
554      action='store_true')
555  parser.add_argument(
556      '--build', help='Also compile the generated files', action='store_true')
557  parser.add_argument(
558      '--check', help='Don\'t keep the generated files', action='store_true')
559  parser.add_argument('--quiet', help='Only report errors', action='store_true')
560  parser.add_argument(
561      '--dump-deps',
562      help='List all source files that the amalgamated output depends on',
563      action='store_true')
564  parser.add_argument(
565      'targets',
566      nargs=argparse.REMAINDER,
567      help='Targets to include in the output (e.g., "//:libperfetto")')
568  args = parser.parse_args()
569  targets = args.targets or default_targets
570
571  output = args.output
572  if args.check:
573    output = os.path.join(tempfile.mkdtemp(), 'perfetto_amalgamated')
574
575  out = gn_utils.prepare_out_directory(args.gn_args, args.out)
576  if not args.quiet:
577    print('Building project...')
578  try:
579    desc = gn_utils.load_build_description(out)
580
581    # We need to build everything first so that the necessary header
582    # dependencies get generated. However if we are just dumping dependency
583    # information this can be skipped, allowing cross-platform operation.
584    if not args.dump_deps:
585      gn_utils.build_targets(out, targets)
586    source_deps = gn_utils.compute_source_dependencies(out)
587    project = AmalgamatedProject(
588        desc, source_deps, compute_deps_only=args.dump_deps)
589
590    for target in targets:
591      project.add_target(target)
592
593    if args.dump_deps:
594      source_files = [
595          source_file for _, source_file in project.get_source_files()
596      ]
597      print('\n'.join(sorted(set(source_files))))
598      return
599
600    project.generate()
601    result = project.save(output)
602    if not args.quiet:
603      print(result)
604    if args.build:
605      if not args.quiet:
606        sys.stdout.write('Building amalgamated project...')
607        sys.stdout.flush()
608      subprocess.check_call(project.get_build_command(output))
609      if not args.quiet:
610        print('done')
611  finally:
612    if not args.keep:
613      shutil.rmtree(out)
614    if args.check:
615      shutil.rmtree(os.path.dirname(output))
616
617
618if __name__ == '__main__':
619  sys.exit(main())
620