• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2015 gRPC authors.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""Provides distutils command classes for the GRPC Python setup process."""
15
16from __future__ import print_function
17
18import distutils
19import glob
20import os
21import os.path
22import platform
23import re
24import shutil
25import subprocess
26import sys
27import traceback
28
29import setuptools
30from setuptools.command import build_ext
31from setuptools.command import build_py
32from setuptools.command import easy_install
33from setuptools.command import install
34from setuptools.command import test
35
36import support
37
38PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
39GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../')
40PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto')
41PROTO_GEN_STEM = os.path.join(GRPC_STEM, 'src', 'python', 'gens')
42CYTHON_STEM = os.path.join(PYTHON_STEM, 'grpc', '_cython')
43
44
45class CommandError(Exception):
46    """Simple exception class for GRPC custom commands."""
47
48
49# TODO(atash): Remove this once PyPI has better Linux bdist support. See
50# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
51def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
52    """Returns a string path to a bdist file for Linux to install.
53
54  If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
55  warning and builds from source.
56  """
57    # TODO(atash): somehow the name that's returned from `wheel` is different
58    # between different versions of 'wheel' (but from a compatibility standpoint,
59    # the names are compatible); we should have some way of determining name
60    # compatibility in the same way `wheel` does to avoid having to rename all of
61    # the custom wheels that we build/upload to GCS.
62
63    # Break import style to ensure that setup.py has had a chance to install the
64    # relevant package.
65    from six.moves.urllib import request
66    decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
67    try:
68        url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
69        bdist_data = request.urlopen(url).read()
70    except IOError as error:
71        raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
72            traceback.format_exc(), decorated_path, error.message))
73    # Our chosen local bdist path.
74    bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
75    try:
76        with open(bdist_path, 'w') as bdist_file:
77            bdist_file.write(bdist_data)
78    except IOError as error:
79        raise CommandError('{}\n\nCould not write grpcio bdist: {}'.format(
80            traceback.format_exc(), error.message))
81    return bdist_path
82
83
84class SphinxDocumentation(setuptools.Command):
85    """Command to generate documentation via sphinx."""
86
87    description = 'generate sphinx documentation'
88    user_options = []
89
90    def initialize_options(self):
91        pass
92
93    def finalize_options(self):
94        pass
95
96    def run(self):
97        # We import here to ensure that setup.py has had a chance to install the
98        # relevant package eggs first.
99        import sphinx.cmd.build
100        source_dir = os.path.join(GRPC_STEM, 'doc', 'python', 'sphinx')
101        target_dir = os.path.join(GRPC_STEM, 'doc', 'build')
102        exit_code = sphinx.cmd.build.build_main(
103            ['-b', 'html', '-W', '--keep-going', source_dir, target_dir])
104        if exit_code is not 0:
105            raise CommandError(
106                "Documentation generation has warnings or errors")
107
108
109class BuildProjectMetadata(setuptools.Command):
110    """Command to generate project metadata in a module."""
111
112    description = 'build grpcio project metadata files'
113    user_options = []
114
115    def initialize_options(self):
116        pass
117
118    def finalize_options(self):
119        pass
120
121    def run(self):
122        with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
123                  'w') as module_file:
124            module_file.write('__version__ = """{}"""'.format(
125                self.distribution.get_version()))
126
127
128class BuildPy(build_py.build_py):
129    """Custom project build command."""
130
131    def run(self):
132        self.run_command('build_project_metadata')
133        build_py.build_py.run(self)
134
135
136def _poison_extensions(extensions, message):
137    """Includes a file that will always fail to compile in all extensions."""
138    poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
139    with open(poison_filename, 'w') as poison:
140        poison.write('#error {}'.format(message))
141    for extension in extensions:
142        extension.sources = [poison_filename]
143
144
145def check_and_update_cythonization(extensions):
146    """Replace .pyx files with their generated counterparts and return whether or
147     not cythonization still needs to occur."""
148    for extension in extensions:
149        generated_pyx_sources = []
150        other_sources = []
151        for source in extension.sources:
152            base, file_ext = os.path.splitext(source)
153            if file_ext == '.pyx':
154                generated_pyx_source = next((base + gen_ext for gen_ext in (
155                    '.c',
156                    '.cpp',
157                ) if os.path.isfile(base + gen_ext)), None)
158                if generated_pyx_source:
159                    generated_pyx_sources.append(generated_pyx_source)
160                else:
161                    sys.stderr.write('Cython-generated files are missing...\n')
162                    return False
163            else:
164                other_sources.append(source)
165        extension.sources = generated_pyx_sources + other_sources
166    sys.stderr.write('Found cython-generated files...\n')
167    return True
168
169
170def try_cythonize(extensions, linetracing=False, mandatory=True):
171    """Attempt to cythonize the extensions.
172
173  Args:
174    extensions: A list of `distutils.extension.Extension`.
175    linetracing: A bool indicating whether or not to enable linetracing.
176    mandatory: Whether or not having Cython-generated files is mandatory. If it
177      is, extensions will be poisoned when they can't be fully generated.
178  """
179    try:
180        # Break import style to ensure we have access to Cython post-setup_requires
181        import Cython.Build
182    except ImportError:
183        if mandatory:
184            sys.stderr.write(
185                "This package needs to generate C files with Cython but it cannot. "
186                "Poisoning extension sources to disallow extension commands...")
187            _poison_extensions(
188                extensions,
189                "Extensions have been poisoned due to missing Cython-generated code."
190            )
191        return extensions
192    cython_compiler_directives = {}
193    if linetracing:
194        additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
195        cython_compiler_directives['linetrace'] = True
196    return Cython.Build.cythonize(
197        extensions,
198        include_path=[
199            include_dir for extension in extensions
200            for include_dir in extension.include_dirs
201        ] + [CYTHON_STEM],
202        compiler_directives=cython_compiler_directives)
203
204
205class BuildExt(build_ext.build_ext):
206    """Custom build_ext command to enable compiler-specific flags."""
207
208    C_OPTIONS = {
209        'unix': ('-pthread',),
210        'msvc': (),
211    }
212    LINK_OPTIONS = {}
213
214    def build_extensions(self):
215
216        def compiler_ok_with_extra_std():
217            """Test if default compiler is okay with specifying c++ version
218            when invoked in C mode. GCC is okay with this, while clang is not.
219            """
220            if platform.system() != 'Windows':
221                return False
222            # TODO(lidiz) Remove the generated a.out for success tests.
223            cc_test = subprocess.Popen(['cc', '-x', 'c', '-std=c++11', '-'],
224                                       stdin=subprocess.PIPE,
225                                       stdout=subprocess.PIPE,
226                                       stderr=subprocess.PIPE)
227            _, cc_err = cc_test.communicate(input=b'int main(){return 0;}')
228            return not 'invalid argument' in str(cc_err)
229
230        # This special conditioning is here due to difference of compiler
231        #   behavior in gcc and clang. The clang doesn't take --stdc++11
232        #   flags but gcc does. Since the setuptools of Python only support
233        #   all C or all C++ compilation, the mix of C and C++ will crash.
234        #   *By default*, macOS and FreBSD use clang and Linux use gcc
235        #
236        #   If we are not using a permissive compiler that's OK with being
237        #   passed wrong std flags, swap out compile function by adding a filter
238        #   for it.
239        if not compiler_ok_with_extra_std():
240            old_compile = self.compiler._compile
241
242            def new_compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
243                if src[-2:] == '.c':
244                    extra_postargs = [
245                        arg for arg in extra_postargs if not '-std=c++' in arg
246                    ]
247                return old_compile(obj, src, ext, cc_args, extra_postargs,
248                                   pp_opts)
249
250            self.compiler._compile = new_compile
251
252        compiler = self.compiler.compiler_type
253        if compiler in BuildExt.C_OPTIONS:
254            for extension in self.extensions:
255                extension.extra_compile_args += list(
256                    BuildExt.C_OPTIONS[compiler])
257        if compiler in BuildExt.LINK_OPTIONS:
258            for extension in self.extensions:
259                extension.extra_link_args += list(
260                    BuildExt.LINK_OPTIONS[compiler])
261        if not check_and_update_cythonization(self.extensions):
262            self.extensions = try_cythonize(self.extensions)
263        try:
264            build_ext.build_ext.build_extensions(self)
265        except Exception as error:
266            formatted_exception = traceback.format_exc()
267            support.diagnose_build_ext_error(self, error, formatted_exception)
268            raise CommandError(
269                "Failed `build_ext` step:\n{}".format(formatted_exception))
270
271
272class Gather(setuptools.Command):
273    """Command to gather project dependencies."""
274
275    description = 'gather dependencies for grpcio'
276    user_options = [
277        ('test', 't', 'flag indicating to gather test dependencies'),
278        ('install', 'i', 'flag indicating to gather install dependencies')
279    ]
280
281    def initialize_options(self):
282        self.test = False
283        self.install = False
284
285    def finalize_options(self):
286        # distutils requires this override.
287        pass
288
289    def run(self):
290        if self.install and self.distribution.install_requires:
291            self.distribution.fetch_build_eggs(
292                self.distribution.install_requires)
293        if self.test and self.distribution.tests_require:
294            self.distribution.fetch_build_eggs(self.distribution.tests_require)
295
296
297class Clean(setuptools.Command):
298    """Command to clean build artifacts."""
299
300    description = 'Clean build artifacts.'
301    user_options = [
302        ('all', 'a', 'a phony flag to allow our script to continue'),
303    ]
304
305    _FILE_PATTERNS = (
306        'python_build',
307        'src/python/grpcio/__pycache__/',
308        'src/python/grpcio/grpc/_cython/cygrpc.cpp',
309        'src/python/grpcio/grpc/_cython/*.so',
310        'src/python/grpcio/grpcio.egg-info/',
311    )
312    _CURRENT_DIRECTORY = os.path.normpath(
313        os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../.."))
314
315    def initialize_options(self):
316        self.all = False
317
318    def finalize_options(self):
319        pass
320
321    def run(self):
322        for path_spec in self._FILE_PATTERNS:
323            this_glob = os.path.normpath(
324                os.path.join(Clean._CURRENT_DIRECTORY, path_spec))
325            abs_paths = glob.glob(this_glob)
326            for path in abs_paths:
327                if not str(path).startswith(Clean._CURRENT_DIRECTORY):
328                    raise ValueError(
329                        "Cowardly refusing to delete {}.".format(path))
330                print("Removing {}".format(os.path.relpath(path)))
331                if os.path.isfile(path):
332                    os.remove(str(path))
333                else:
334                    shutil.rmtree(str(path))
335