1#!/usr/bin/env python3 2 3#Copyright 2019 gRPC authors. 4# 5# Licensed under the Apache License, Version 2.0 (the "License"); 6# you may not use this file except in compliance with the License. 7# You may obtain a copy of the License at 8# 9# http://www.apache.org/licenses/LICENSE-2.0 10# 11# Unless required by applicable law or agreed to in writing, software 12# distributed under the License is distributed on an "AS IS" BASIS, 13# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14# See the License for the specific language governing permissions and 15# limitations under the License. 16"""Verifies that all gRPC Python artifacts have been successfully published. 17 18This script is intended to be run from a directory containing the artifacts 19that have been uploaded and only the artifacts that have been uploaded. We use 20PyPI's JSON API to verify that the proper filenames and checksums are present. 21 22Note that PyPI may take several minutes to update its metadata. Don't have a 23heart attack immediately. 24 25This sanity check is a good first step, but ideally, we would automate the 26entire release process. 27""" 28 29import argparse 30import collections 31import hashlib 32import os 33import requests 34import sys 35 36_DEFAULT_PACKAGES = [ 37 "grpcio", 38 "grpcio-tools", 39 "grpcio-status", 40 "grpcio-health-checking", 41 "grpcio-reflection", 42 "grpcio-channelz", 43 "grpcio-testing", 44] 45 46Artifact = collections.namedtuple("Artifact", ("filename", "checksum")) 47 48 49def _get_md5_checksum(filename): 50 """Calculate the md5sum for a file.""" 51 hash_md5 = hashlib.md5() 52 with open(filename, 'rb') as f: 53 for chunk in iter(lambda: f.read(4096), b""): 54 hash_md5.update(chunk) 55 return hash_md5.hexdigest() 56 57 58def _get_local_artifacts(): 59 """Get a set of artifacts representing all files in the cwd.""" 60 return set( 61 Artifact(f, _get_md5_checksum(f)) for f in os.listdir(os.getcwd())) 62 63 64def _get_remote_artifacts_for_package(package, version): 65 """Get a list of artifacts based on PyPi's json metadata. 66 67 Note that this data will not updated immediately after upload. In my 68 experience, it has taken a minute on average to be fresh. 69 """ 70 artifacts = set() 71 payload = requests.get("https://pypi.org/pypi/{}/{}/json".format( 72 package, version)).json() 73 for download_info in payload['releases'][version]: 74 artifacts.add( 75 Artifact(download_info['filename'], download_info['md5_digest'])) 76 return artifacts 77 78 79def _get_remote_artifacts_for_packages(packages, version): 80 artifacts = set() 81 for package in packages: 82 artifacts |= _get_remote_artifacts_for_package(package, version) 83 return artifacts 84 85 86def _verify_release(version, packages): 87 """Compare the local artifacts to the packages uploaded to PyPI.""" 88 local_artifacts = _get_local_artifacts() 89 remote_artifacts = _get_remote_artifacts_for_packages(packages, version) 90 if local_artifacts != remote_artifacts: 91 local_but_not_remote = local_artifacts - remote_artifacts 92 remote_but_not_local = remote_artifacts - local_artifacts 93 if local_but_not_remote: 94 print("The following artifacts exist locally but not remotely.") 95 for artifact in local_but_not_remote: 96 print(artifact) 97 if remote_but_not_local: 98 print("The following artifacts exist remotely but not locally.") 99 for artifact in remote_but_not_local: 100 print(artifact) 101 sys.exit(1) 102 print("Release verified successfully.") 103 104 105if __name__ == "__main__": 106 parser = argparse.ArgumentParser( 107 "Verify a release. Run this from a directory containing only the" 108 "artifacts to be uploaded. Note that PyPI may take several minutes" 109 "after the upload to reflect the proper metadata.") 110 parser.add_argument("version") 111 parser.add_argument("packages", 112 nargs='*', 113 type=str, 114 default=_DEFAULT_PACKAGES) 115 args = parser.parse_args() 116 _verify_release(args.version, args.packages) 117