• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# Copyright 2020 The Pigweed Authors
2#
3# Licensed under the Apache License, Version 2.0 (the "License"); you may not
4# use this file except in compliance with the License. You may obtain a copy of
5# the License at
6#
7#     https://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
12# License for the specific language governing permissions and limitations under
13# the License.
14"""Preconfigured checks for Python code.
15
16These checks assume that they are running in a preconfigured Python environment.
17"""
18
19import difflib
20import json
21import logging
22from pathlib import Path
23import platform
24import shutil
25import sys
26from tempfile import TemporaryDirectory
27
28from pw_env_setup import python_packages
29
30from pw_presubmit.presubmit import (
31    call,
32    Check,
33    filter_paths,
34)
35from pw_presubmit.presubmit_context import (
36    PresubmitContext,
37    PresubmitFailure,
38)
39from pw_presubmit import build
40from pw_presubmit.tools import log_run, colorize_diff_line
41
42_LOG = logging.getLogger(__name__)
43
44_PYTHON_EXTENSIONS = ('.py', '.gn', '.gni')
45
46_PYTHON_PACKAGE_EXTENSIONS = (
47    'setup.cfg',
48    'constraint.list',
49    'requirements.txt',
50)
51
52_PYTHON_IS_3_9_OR_HIGHER = sys.version_info >= (
53    3,
54    9,
55)
56
57
58@filter_paths(endswith=_PYTHON_EXTENSIONS)
59def gn_python_check(ctx: PresubmitContext):
60    build.gn_gen(ctx)
61    build.ninja(ctx, 'python.tests', 'python.lint')
62
63
64def _transform_lcov_file_paths(lcov_file: Path, repo_root: Path) -> str:
65    """Modify file paths in an lcov file to be relative to the repo root.
66
67    See `man geninfo` for info on the lcov format."""
68
69    lcov_input = lcov_file.read_text()
70    lcov_output = ''
71
72    if not _PYTHON_IS_3_9_OR_HIGHER:
73        return lcov_input
74
75    for line in lcov_input.splitlines():
76        if not line.startswith('SF:'):
77            lcov_output += line + '\n'
78            continue
79
80        # Get the file path after SF:
81        file_string = line[3:].rstrip()
82        source_file_path = Path(file_string)
83
84        # Attempt to map a generated Python package source file to the root
85        # source tree.
86        # pylint: disable=no-member
87        if not source_file_path.is_relative_to(
88            repo_root  # type: ignore[attr-defined]
89        ):
90            # pylint: enable=no-member
91            source_file_path = repo_root / str(source_file_path).replace(
92                'python/gen/', ''
93            ).replace('py.generated_python_package/', '')
94
95        # If mapping fails don't modify this line.
96        # pylint: disable=no-member
97        if not source_file_path.is_relative_to(
98            repo_root  # type: ignore[attr-defined]
99        ):
100            # pylint: enable=no-member
101            lcov_output += line + '\n'
102            continue
103
104        source_file_path = source_file_path.relative_to(repo_root)
105        lcov_output += f'SF:{source_file_path}\n'
106
107    return lcov_output
108
109
110@filter_paths(endswith=_PYTHON_EXTENSIONS)
111def gn_python_test_coverage(ctx: PresubmitContext):
112    """Run Python tests with coverage and create reports."""
113    build.gn_gen(ctx, pw_build_PYTHON_TEST_COVERAGE=True)
114    build.ninja(ctx, 'python.tests')
115
116    # Find coverage data files
117    coverage_data_files = list(ctx.output_dir.glob('**/*.coverage'))
118    if not coverage_data_files:
119        return
120
121    # Merge coverage data files to out/.coverage
122    call(
123        'coverage',
124        'combine',
125        # Leave existing coverage files in place; by default they are deleted.
126        '--keep',
127        *coverage_data_files,
128        cwd=ctx.output_dir,
129    )
130    combined_data_file = ctx.output_dir / '.coverage'
131    _LOG.info('Coverage data saved to: %s', combined_data_file.resolve())
132
133    # Always ignore generated proto python and setup.py files.
134    coverage_omit_patterns = '--omit=*_pb2.py,*/setup.py'
135
136    # Output coverage percentage summary to the terminal of changed files.
137    changed_python_files = list(
138        str(p) for p in ctx.paths if str(p).endswith('.py')
139    )
140    report_args = [
141        'coverage',
142        'report',
143        '--ignore-errors',
144        coverage_omit_patterns,
145    ]
146    report_args += changed_python_files
147    log_run(report_args, check=False, cwd=ctx.output_dir)
148
149    # Generate a json report
150    call('coverage', 'lcov', coverage_omit_patterns, cwd=ctx.output_dir)
151    lcov_data_file = ctx.output_dir / 'coverage.lcov'
152    lcov_data_file.write_text(
153        _transform_lcov_file_paths(lcov_data_file, repo_root=ctx.root)
154    )
155    _LOG.info('Coverage lcov saved to: %s', lcov_data_file.resolve())
156
157    # Generate an html report
158    call('coverage', 'html', coverage_omit_patterns, cwd=ctx.output_dir)
159    html_report = ctx.output_dir / 'htmlcov' / 'index.html'
160    _LOG.info('Coverage html report saved to: %s', html_report.resolve())
161
162
163@filter_paths(endswith=_PYTHON_PACKAGE_EXTENSIONS)
164def vendor_python_wheels(ctx: PresubmitContext) -> None:
165    """Download Python packages locally for the current platform."""
166    build.gn_gen(ctx)
167    build.ninja(ctx, 'pip_vendor_wheels')
168
169    download_log = (
170        ctx.output_dir
171        / 'python/gen/pw_env_setup/pigweed_build_venv.vendor_wheels'
172        / 'pip_download_log.txt'
173    )
174    _LOG.info('Python package download log: %s', download_log)
175
176    wheel_output = (
177        ctx.output_dir
178        / 'python/gen/pw_env_setup'
179        / 'pigweed_build_venv.vendor_wheels/wheels/'
180    )
181    wheel_destination = ctx.output_dir / 'python_wheels'
182    shutil.rmtree(wheel_destination, ignore_errors=True)
183    shutil.copytree(wheel_output, wheel_destination, dirs_exist_ok=True)
184
185    _LOG.info('Python packages downloaded to: %s', wheel_destination)
186
187
188def _generate_constraint_with_hashes(
189    ctx: PresubmitContext, input_file: Path, output_file: Path
190) -> None:
191    assert input_file.is_file()
192
193    call(
194        "pip-compile",
195        input_file,
196        "--generate-hashes",
197        "--reuse-hashes",
198        "--resolver=backtracking",
199        "--strip-extras",
200        # Force pinning pip and setuptools
201        "--allow-unsafe",
202        "-o",
203        output_file,
204    )
205
206    # Remove absolute paths from comments
207    output_text = output_file.read_text()
208    output_text = output_text.replace(str(ctx.output_dir), '')
209    output_text = output_text.replace(str(ctx.root), '')
210    output_text = output_text.replace(str(output_file.parent), '')
211
212    final_output_text = ''
213    for line in output_text.splitlines(keepends=True):
214        # Remove --find-links lines
215        if line.startswith('--find-links'):
216            continue
217        # Remove blank lines
218        if line == '\n':
219            continue
220        final_output_text += line
221
222    output_file.write_text(final_output_text)
223
224
225def _update_upstream_python_constraints(
226    ctx: PresubmitContext,
227    update_files: bool = False,
228) -> None:
229    """Regenerate platform specific Python constraint files with hashes."""
230    with TemporaryDirectory() as tmpdirname:
231        out_dir = Path(tmpdirname)
232        build.gn_gen(
233            ctx,
234            pw_build_PIP_REQUIREMENTS=[],
235            # Use the constraint file without hashes as the input. This is where
236            # new packages are added by developers.
237            pw_build_PIP_CONSTRAINTS=[
238                '//pw_env_setup/py/pw_env_setup/virtualenv_setup/'
239                'constraint.list',
240            ],
241            # This should always be set to false when regenrating constraints.
242            pw_build_PYTHON_PIP_INSTALL_REQUIRE_HASHES=False,
243        )
244        build.ninja(ctx, 'pip_constraint_update')
245
246        # Either: darwin, linux or windows
247        platform_name = platform.system().lower()
248
249        constraint_hashes_filename = f'constraint_hashes_{platform_name}.list'
250        constraint_hashes_original = (
251            ctx.root
252            / 'pw_env_setup/py/pw_env_setup/virtualenv_setup'
253            / constraint_hashes_filename
254        )
255        constraint_hashes_tmp_out = out_dir / constraint_hashes_filename
256        _generate_constraint_with_hashes(
257            ctx,
258            input_file=(
259                ctx.output_dir
260                / 'python/gen/pw_env_setup/pigweed_build_venv'
261                / 'compiled_requirements.txt'
262            ),
263            output_file=constraint_hashes_tmp_out,
264        )
265
266        build.gn_gen(
267            ctx,
268            # This should always be set to false when regenrating constraints.
269            pw_build_PYTHON_PIP_INSTALL_REQUIRE_HASHES=False,
270        )
271        build.ninja(ctx, 'pip_constraint_update')
272
273        upstream_requirements_lock_filename = (
274            f'upstream_requirements_{platform_name}_lock.txt'
275        )
276        upstream_requirements_lock_original = (
277            ctx.root
278            / 'pw_env_setup/py/pw_env_setup/virtualenv_setup'
279            / upstream_requirements_lock_filename
280        )
281        upstream_requirements_lock_tmp_out = (
282            out_dir / upstream_requirements_lock_filename
283        )
284        _generate_constraint_with_hashes(
285            ctx,
286            input_file=(
287                ctx.output_dir
288                / 'python/gen/pw_env_setup/pigweed_build_venv'
289                / 'compiled_requirements.txt'
290            ),
291            output_file=upstream_requirements_lock_tmp_out,
292        )
293
294        if update_files:
295            constraint_hashes_original.write_text(
296                constraint_hashes_tmp_out.read_text()
297            )
298            _LOG.info('Updated: %s', constraint_hashes_original)
299            upstream_requirements_lock_original.write_text(
300                upstream_requirements_lock_tmp_out.read_text()
301            )
302            _LOG.info('Updated: %s', upstream_requirements_lock_original)
303            return
304
305        # Make a diff of required changes
306        constraint_hashes_diff = list(
307            difflib.unified_diff(
308                constraint_hashes_original.read_text(
309                    'utf-8', errors='replace'
310                ).splitlines(),
311                constraint_hashes_tmp_out.read_text(
312                    'utf-8', errors='replace'
313                ).splitlines(),
314                fromfile=str(constraint_hashes_original) + ' (original)',
315                tofile=str(constraint_hashes_original) + ' (updated)',
316                lineterm='',
317                n=1,
318            )
319        )
320        upstream_requirements_lock_diff = list(
321            difflib.unified_diff(
322                upstream_requirements_lock_original.read_text(
323                    'utf-8', errors='replace'
324                ).splitlines(),
325                upstream_requirements_lock_tmp_out.read_text(
326                    'utf-8', errors='replace'
327                ).splitlines(),
328                fromfile=str(upstream_requirements_lock_original)
329                + ' (original)',
330                tofile=str(upstream_requirements_lock_original) + ' (updated)',
331                lineterm='',
332                n=1,
333            )
334        )
335        if constraint_hashes_diff:
336            for line in constraint_hashes_diff:
337                print(colorize_diff_line(line))
338        if upstream_requirements_lock_diff:
339            for line in upstream_requirements_lock_diff:
340                print(colorize_diff_line(line))
341        if constraint_hashes_diff or upstream_requirements_lock_diff:
342            raise PresubmitFailure(
343                'Please run:\n'
344                '\n'
345                '  pw presubmit --step update_upstream_python_constraints'
346            )
347
348
349@filter_paths(endswith=_PYTHON_PACKAGE_EXTENSIONS)
350def check_upstream_python_constraints(ctx: PresubmitContext) -> None:
351    _update_upstream_python_constraints(ctx, update_files=False)
352
353
354@filter_paths(endswith=_PYTHON_PACKAGE_EXTENSIONS)
355def update_upstream_python_constraints(ctx: PresubmitContext) -> None:
356    _update_upstream_python_constraints(ctx, update_files=True)
357
358
359@filter_paths(endswith=_PYTHON_EXTENSIONS + ('.pylintrc',))
360def gn_python_lint(ctx: PresubmitContext) -> None:
361    build.gn_gen(ctx)
362    build.ninja(ctx, 'python.lint')
363
364
365@Check
366def check_python_versions(ctx: PresubmitContext):
367    """Checks that the list of installed packages is as expected."""
368
369    build.gn_gen(ctx)
370    constraint_file: str | None = None
371    requirement_file: str | None = None
372    try:
373        for arg in build.get_gn_args(ctx.output_dir):
374            if arg['name'] == 'pw_build_PIP_CONSTRAINTS':
375                constraint_file = json.loads(arg['current']['value'])[0].strip(
376                    '/'
377                )
378            if arg['name'] == 'pw_build_PIP_REQUIREMENTS':
379                requirement_file = json.loads(arg['current']['value'])[0].strip(
380                    '/'
381                )
382    except json.JSONDecodeError:
383        _LOG.warning('failed to parse GN args json')
384        return
385
386    if not constraint_file:
387        _LOG.warning('could not find pw_build_PIP_CONSTRAINTS GN arg')
388        return
389    ignored_requirements_arg = None
390    if requirement_file:
391        ignored_requirements_arg = [(ctx.root / requirement_file)]
392
393    if (
394        python_packages.diff(
395            expected=(ctx.root / constraint_file),
396            ignore_requirements_file=ignored_requirements_arg,
397        )
398        != 0
399    ):
400        raise PresubmitFailure
401