• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2015 gRPC authors.
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7#     http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14"""Provides distutils command classes for the GRPC Python setup process."""
15
16from __future__ import print_function
17
18import distutils
19import glob
20import os
21import os.path
22import platform
23import re
24import shutil
25import subprocess
26import sys
27import traceback
28
29import setuptools
30from setuptools.command import build_ext
31from setuptools.command import build_py
32from setuptools.command import easy_install
33from setuptools.command import install
34from setuptools.command import test
35
36import support
37
38PYTHON_STEM = os.path.dirname(os.path.abspath(__file__))
39GRPC_STEM = os.path.abspath(PYTHON_STEM + '../../../../')
40PROTO_STEM = os.path.join(GRPC_STEM, 'src', 'proto')
41PROTO_GEN_STEM = os.path.join(GRPC_STEM, 'src', 'python', 'gens')
42CYTHON_STEM = os.path.join(PYTHON_STEM, 'grpc', '_cython')
43
44
45class CommandError(Exception):
46    """Simple exception class for GRPC custom commands."""
47
48
49# TODO(atash): Remove this once PyPI has better Linux bdist support. See
50# https://bitbucket.org/pypa/pypi/issues/120/binary-wheels-for-linux-are-not-supported
51def _get_grpc_custom_bdist(decorated_basename, target_bdist_basename):
52    """Returns a string path to a bdist file for Linux to install.
53
54  If we can retrieve a pre-compiled bdist from online, uses it. Else, emits a
55  warning and builds from source.
56  """
57    # TODO(atash): somehow the name that's returned from `wheel` is different
58    # between different versions of 'wheel' (but from a compatibility standpoint,
59    # the names are compatible); we should have some way of determining name
60    # compatibility in the same way `wheel` does to avoid having to rename all of
61    # the custom wheels that we build/upload to GCS.
62
63    # Break import style to ensure that setup.py has had a chance to install the
64    # relevant package.
65    from six.moves.urllib import request
66    decorated_path = decorated_basename + GRPC_CUSTOM_BDIST_EXT
67    try:
68        url = BINARIES_REPOSITORY + '/{target}'.format(target=decorated_path)
69        bdist_data = request.urlopen(url).read()
70    except IOError as error:
71        raise CommandError('{}\n\nCould not find the bdist {}: {}'.format(
72            traceback.format_exc(), decorated_path, error.message))
73    # Our chosen local bdist path.
74    bdist_path = target_bdist_basename + GRPC_CUSTOM_BDIST_EXT
75    try:
76        with open(bdist_path, 'w') as bdist_file:
77            bdist_file.write(bdist_data)
78    except IOError as error:
79        raise CommandError('{}\n\nCould not write grpcio bdist: {}'.format(
80            traceback.format_exc(), error.message))
81    return bdist_path
82
83
84class SphinxDocumentation(setuptools.Command):
85    """Command to generate documentation via sphinx."""
86
87    description = 'generate sphinx documentation'
88    user_options = []
89
90    def initialize_options(self):
91        pass
92
93    def finalize_options(self):
94        pass
95
96    def run(self):
97        # We import here to ensure that setup.py has had a chance to install the
98        # relevant package eggs first.
99        import sphinx.cmd.build
100        source_dir = os.path.join(GRPC_STEM, 'doc', 'python', 'sphinx')
101        target_dir = os.path.join(GRPC_STEM, 'doc', 'build')
102        exit_code = sphinx.cmd.build.build_main(
103            ['-b', 'html', '-W', '--keep-going', source_dir, target_dir])
104        if exit_code != 0:
105            raise CommandError(
106                "Documentation generation has warnings or errors")
107
108
109class BuildProjectMetadata(setuptools.Command):
110    """Command to generate project metadata in a module."""
111
112    description = 'build grpcio project metadata files'
113    user_options = []
114
115    def initialize_options(self):
116        pass
117
118    def finalize_options(self):
119        pass
120
121    def run(self):
122        with open(os.path.join(PYTHON_STEM, 'grpc/_grpcio_metadata.py'),
123                  'w') as module_file:
124            module_file.write('__version__ = """{}"""'.format(
125                self.distribution.get_version()))
126
127
128class BuildPy(build_py.build_py):
129    """Custom project build command."""
130
131    def run(self):
132        self.run_command('build_project_metadata')
133        build_py.build_py.run(self)
134
135
136def _poison_extensions(extensions, message):
137    """Includes a file that will always fail to compile in all extensions."""
138    poison_filename = os.path.join(PYTHON_STEM, 'poison.c')
139    with open(poison_filename, 'w') as poison:
140        poison.write('#error {}'.format(message))
141    for extension in extensions:
142        extension.sources = [poison_filename]
143
144
145def check_and_update_cythonization(extensions):
146    """Replace .pyx files with their generated counterparts and return whether or
147     not cythonization still needs to occur."""
148    for extension in extensions:
149        generated_pyx_sources = []
150        other_sources = []
151        for source in extension.sources:
152            base, file_ext = os.path.splitext(source)
153            if file_ext == '.pyx':
154                generated_pyx_source = next((base + gen_ext for gen_ext in (
155                    '.c',
156                    '.cpp',
157                ) if os.path.isfile(base + gen_ext)), None)
158                if generated_pyx_source:
159                    generated_pyx_sources.append(generated_pyx_source)
160                else:
161                    sys.stderr.write('Cython-generated files are missing...\n')
162                    return False
163            else:
164                other_sources.append(source)
165        extension.sources = generated_pyx_sources + other_sources
166    sys.stderr.write('Found cython-generated files...\n')
167    return True
168
169
170def try_cythonize(extensions, linetracing=False, mandatory=True):
171    """Attempt to cythonize the extensions.
172
173  Args:
174    extensions: A list of `distutils.extension.Extension`.
175    linetracing: A bool indicating whether or not to enable linetracing.
176    mandatory: Whether or not having Cython-generated files is mandatory. If it
177      is, extensions will be poisoned when they can't be fully generated.
178  """
179    try:
180        # Break import style to ensure we have access to Cython post-setup_requires
181        import Cython.Build
182    except ImportError:
183        if mandatory:
184            sys.stderr.write(
185                "This package needs to generate C files with Cython but it cannot. "
186                "Poisoning extension sources to disallow extension commands...")
187            _poison_extensions(
188                extensions,
189                "Extensions have been poisoned due to missing Cython-generated code."
190            )
191        return extensions
192    cython_compiler_directives = {}
193    if linetracing:
194        additional_define_macros = [('CYTHON_TRACE_NOGIL', '1')]
195        cython_compiler_directives['linetrace'] = True
196    return Cython.Build.cythonize(
197        extensions,
198        include_path=[
199            include_dir for extension in extensions
200            for include_dir in extension.include_dirs
201        ] + [CYTHON_STEM],
202        compiler_directives=cython_compiler_directives)
203
204
205class BuildExt(build_ext.build_ext):
206    """Custom build_ext command to enable compiler-specific flags."""
207
208    C_OPTIONS = {
209        'unix': ('-pthread',),
210        'msvc': (),
211    }
212    LINK_OPTIONS = {}
213
214    def build_extensions(self):
215
216        def compiler_ok_with_extra_std():
217            """Test if default compiler is okay with specifying c++ version
218            when invoked in C mode. GCC is okay with this, while clang is not.
219            """
220            try:
221                # TODO(lidiz) Remove the generated a.out for success tests.
222                cc_test = subprocess.Popen(['cc', '-x', 'c', '-std=c++11', '-'],
223                                           stdin=subprocess.PIPE,
224                                           stdout=subprocess.PIPE,
225                                           stderr=subprocess.PIPE)
226                _, cc_err = cc_test.communicate(input=b'int main(){return 0;}')
227                return not 'invalid argument' in str(cc_err)
228            except:
229                sys.stderr.write('Non-fatal exception:' +
230                                 traceback.format_exc() + '\n')
231                return False
232
233        # This special conditioning is here due to difference of compiler
234        #   behavior in gcc and clang. The clang doesn't take --stdc++11
235        #   flags but gcc does. Since the setuptools of Python only support
236        #   all C or all C++ compilation, the mix of C and C++ will crash.
237        #   *By default*, macOS and FreBSD use clang and Linux use gcc
238        #
239        #   If we are not using a permissive compiler that's OK with being
240        #   passed wrong std flags, swap out compile function by adding a filter
241        #   for it.
242        if not compiler_ok_with_extra_std():
243            old_compile = self.compiler._compile
244
245            def new_compile(obj, src, ext, cc_args, extra_postargs, pp_opts):
246                if src[-2:] == '.c':
247                    extra_postargs = [
248                        arg for arg in extra_postargs if not '-std=c++' in arg
249                    ]
250                return old_compile(obj, src, ext, cc_args, extra_postargs,
251                                   pp_opts)
252
253            self.compiler._compile = new_compile
254
255        compiler = self.compiler.compiler_type
256        if compiler in BuildExt.C_OPTIONS:
257            for extension in self.extensions:
258                extension.extra_compile_args += list(
259                    BuildExt.C_OPTIONS[compiler])
260        if compiler in BuildExt.LINK_OPTIONS:
261            for extension in self.extensions:
262                extension.extra_link_args += list(
263                    BuildExt.LINK_OPTIONS[compiler])
264        if not check_and_update_cythonization(self.extensions):
265            self.extensions = try_cythonize(self.extensions)
266        try:
267            build_ext.build_ext.build_extensions(self)
268        except Exception as error:
269            formatted_exception = traceback.format_exc()
270            support.diagnose_build_ext_error(self, error, formatted_exception)
271            raise CommandError(
272                "Failed `build_ext` step:\n{}".format(formatted_exception))
273
274
275class Gather(setuptools.Command):
276    """Command to gather project dependencies."""
277
278    description = 'gather dependencies for grpcio'
279    user_options = [
280        ('test', 't', 'flag indicating to gather test dependencies'),
281        ('install', 'i', 'flag indicating to gather install dependencies')
282    ]
283
284    def initialize_options(self):
285        self.test = False
286        self.install = False
287
288    def finalize_options(self):
289        # distutils requires this override.
290        pass
291
292    def run(self):
293        if self.install and self.distribution.install_requires:
294            self.distribution.fetch_build_eggs(
295                self.distribution.install_requires)
296        if self.test and self.distribution.tests_require:
297            self.distribution.fetch_build_eggs(self.distribution.tests_require)
298
299
300class Clean(setuptools.Command):
301    """Command to clean build artifacts."""
302
303    description = 'Clean build artifacts.'
304    user_options = [
305        ('all', 'a', 'a phony flag to allow our script to continue'),
306    ]
307
308    _FILE_PATTERNS = (
309        'python_build',
310        'src/python/grpcio/__pycache__/',
311        'src/python/grpcio/grpc/_cython/cygrpc.cpp',
312        'src/python/grpcio/grpc/_cython/*.so',
313        'src/python/grpcio/grpcio.egg-info/',
314    )
315    _CURRENT_DIRECTORY = os.path.normpath(
316        os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../.."))
317
318    def initialize_options(self):
319        self.all = False
320
321    def finalize_options(self):
322        pass
323
324    def run(self):
325        for path_spec in self._FILE_PATTERNS:
326            this_glob = os.path.normpath(
327                os.path.join(Clean._CURRENT_DIRECTORY, path_spec))
328            abs_paths = glob.glob(this_glob)
329            for path in abs_paths:
330                if not str(path).startswith(Clean._CURRENT_DIRECTORY):
331                    raise ValueError(
332                        "Cowardly refusing to delete {}.".format(path))
333                print("Removing {}".format(os.path.relpath(path)))
334                if os.path.isfile(path):
335                    os.remove(str(path))
336                else:
337                    shutil.rmtree(str(path))
338