1# Copyright 2021 The Pigweed Authors 2# 3# Licensed under the Apache License, Version 2.0 (the "License"); you may not 4# use this file except in compliance with the License. You may obtain a copy of 5# the License at 6# 7# https://www.apache.org/licenses/LICENSE-2.0 8# 9# Unless required by applicable law or agreed to in writing, software 10# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 11# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 12# License for the specific language governing permissions and limitations under 13# the License. 14"""Upload update bundles to a GCS bucket for signing. 15 16This module does not implement any actual signing logic; that is left up to the 17remote signing service. This module takes care of uploading bundles to GCS for 18signing, waiting the the signed version to appear, and downloading the signed 19bundle from the output bucket. It can be used either as an entry point by 20invoking it as a runnable module and providing all the necessary arguments (run 21the tool with --help for details), or as a library by instantiating 22RemoteSignClient and calling its sign() method. 23 24The expected API for the remote signing service consists of the following: 25 26 - A pair of GCS buckets. One bucket to serve as a queue of update bundles 27 to be signed, and the other bucket to serve as the output area where 28 signed bundles are deposited. 29 30 - Three artifacts should be placed into the input queue bucket: 31 1. The update bundle to be signed 32 2. A signing request file whose name ends with "signing_request.json" 33 3. A public builder key named "<signing_request_name>.public_key.pem" 34 35Builder keys are used to generate intermediate signatures for the signing 36request. Specifically, a private builder key is used to generate an 37intermediate signature for both the update bundle to be signed, and the signing 38request file. These signatures are then added to the GCS blob metadata for 39their respective blobs. The corresponding public builder key is uploaded 40alongside the signing request. 41 42The signing service should be set up to trigger whenever a new signing request 43file is added anywhere inside the input queue bucket. The signing request file 44is a JSON file with the following fields: 45 46 remote_signing_key_name: A string that should correspond to the name of a 47 signing key known to the remote signing service. 48 49 bundle_path: The path (relative to GCS input bucket root) to an update 50 bundle to be signed by the remote signing service. 51 52 bundle_public_key_path: The path (relative to GCS input bucket root) to a 53 public builder key .pem file corresponding to the private builder key 54 that was used to sign the update bundle and signing request file. 55 56 output_bucket: Name of the output GCS bucket into which the remote signing 57 service should place signed artifacts. 58 59 output_path: The path (relative to the GCS output bucket root) at which the 60 signed update bundle should be deposited by the remote signing service. 61 62On the remote side, the signing service is expected to check the public builder 63key against its list of allowed builder keys. Provided the key is found in the 64allow list, the signing service should use it to verify the intermediate 65signatures of both the update bundle to be signed and the signing request file. 66If the key is not found in the allow list, or if the signature on the update 67bundle or signing request file do not match the public builder key, the signing 68service should reject the signing request. 69 70If the builder public key is found in the allow list and the intermediate 71signatures are verified, the signing service should produce a signed version of 72the update bundle, and place it the GCS output bucket at the specified path. 73 74In order to authenticate to GCS, the "Application Default Credentials" will be 75used. This assumes remote_sign.py is running in an environment that provides 76such credentials. See the Google cloud platform documentation for details: 77 78 https://cloud.google.com/docs/authentication/production 79 80For development purposes, it is possible to provide an alternate method of 81authenticating to GCS. The alternate authentication method should be a Python 82module that is importable in the running Python environment. The module should 83define a 'get_credentials()' function that takes no arguments and returns an 84instance of google.auth.credentials.Credentials. 85""" 86import argparse 87import base64 88import importlib 89import json 90from pathlib import Path 91import time 92 93from cryptography.hazmat import backends 94from cryptography.hazmat.primitives import hashes, serialization 95from cryptography.hazmat.primitives.asymmetric import ed25519, padding, rsa 96from google.auth.credentials import Credentials # type: ignore 97from google.cloud import storage # type: ignore 98from google.cloud.storage.bucket import Bucket # type: ignore 99 100DEFAULT_TIMEOUT_S = 600 101 102 103def _parse_args(): 104 """Parse CLI aguments.""" 105 parser = argparse.ArgumentParser(description=__doc__) 106 parser.add_argument( 107 '--project', help='GCP project that owns storage buckets.' 108 ) 109 parser.add_argument( 110 '--input-bucket', help='GCS bucket used as a signing queue' 111 ) 112 parser.add_argument( 113 '--output-bucket', help='GCS bucket to watch for signed bundles' 114 ) 115 parser.add_argument( 116 '--bundle', type=Path, help='Update bundle to upload for signing' 117 ) 118 parser.add_argument( 119 '--out', type=Path, help='Path to which to download signed bundle' 120 ) 121 parser.add_argument( 122 '--signing-key-name', 123 help='Name of signing key remote signing service should use', 124 ) 125 parser.add_argument( 126 '--builder-key', 127 type=Path, 128 help='Path to builder private key for intermediate signatures', 129 ) 130 parser.add_argument( 131 '--builder-public-key', type=Path, help='Path to builder public key' 132 ) 133 parser.add_argument( 134 '--bundle-blob-name', 135 default=None, 136 help='Path in the input bucket at which to upload bundle', 137 ) 138 parser.add_argument( 139 '--request-blob-name', 140 default=None, 141 help='Path in the input bucket at which to put signing request', 142 ) 143 parser.add_argument( 144 '--signed-bundle-blob-name', 145 default=None, 146 help='Path in the output bucket for the signed bundle', 147 ) 148 parser.add_argument( 149 '--dev-gcs-auth-module-override', 150 default=None, 151 help='Developer use only; custom auth module to use with GCS.', 152 ) 153 parser.add_argument( 154 '--timeout', 155 type=int, 156 default=DEFAULT_TIMEOUT_S, 157 help='Seconds to wait for signed bundle to appeaer before giving up.', 158 ) 159 160 return parser.parse_args() 161 162 163class BlobExistsError(Exception): 164 """Raised if the blob to be uploaded already exists in the input bucket.""" 165 166 167class RemoteSignClient: 168 """GCS client for use in remote signing.""" 169 170 def __init__(self, input_bucket: Bucket, output_bucket: Bucket): 171 # "Application Default Credentials" are used implicitly when None is 172 # passed to Client() as credentials. See the cloud docs for details: 173 # https://cloud.google.com/docs/authentication/production 174 self._input_bucket = input_bucket 175 self._output_bucket = output_bucket 176 177 @classmethod 178 def from_names( 179 cls, 180 project_name: str, 181 input_bucket_name: str, 182 output_bucket_name: str, 183 gcs_credentials: Credentials | None = None, 184 ): 185 storage_client = storage.Client( 186 project=project_name, credentials=gcs_credentials 187 ) 188 return cls( 189 input_bucket=storage_client.bucket(input_bucket_name), 190 output_bucket=storage_client.bucket(output_bucket_name), 191 ) 192 193 def sign( 194 self, 195 bundle: Path, 196 signing_key_name: str, 197 builder_key: Path, 198 builder_public_key: Path, 199 bundle_blob_name: str | None = None, 200 request_blob_name: str | None = None, 201 signed_bundle_blob_name: str | None = None, 202 request_overrides: dict | None = None, 203 timeout_s: int = DEFAULT_TIMEOUT_S, 204 ) -> bytes: 205 """Upload file to GCS and download signed counterpart when ready. 206 207 Args: 208 bundle: Path object for an UpdateBundle to upload for signing. 209 signing_key_name: Name of remote signing key to use for signing. 210 builder_key: Path to builder private key for intermediate signature. 211 builder_public_key: Path to corresponding builder public key. 212 bundle_blob_name: GCS path at which to upload bundle to sign. 213 request_blob_name: GCS path at which to upload request file. 214 signed_bundle_blob_name: GCS path in output bucket to request. 215 request_overrides: dict of signing request JSON keys and values to 216 add to the signing requests. If this dict contains any keys whose 217 values are already in the signing request, the existing values 218 will be overwritten by the ones passed in here. 219 timeout_s: Maximum seconds to wait for output before failing. 220 """ 221 if bundle_blob_name is None: 222 bundle_blob_name = bundle.name 223 224 if request_blob_name is None: 225 request_blob_name = f't{time.time()}_signing_request.json' 226 227 if not request_blob_name.endswith('signing_request.json'): 228 raise ValueError( 229 f'Signing request blob name {request_blob_name}' 230 ' does not end with "signing_request.json".' 231 ) 232 233 request_name = request_blob_name[:-5] # strip the ".json" 234 builder_public_key_blob_name = f'{request_name}.publickey.pem' 235 236 if signed_bundle_blob_name is None: 237 signed_bundle_blob_name = f'{bundle.name}.signed' 238 239 signing_request = { 240 'remote_signing_key_names': [signing_key_name], 241 'bundle_path': bundle_blob_name, 242 'bundle_public_key_path': builder_public_key_blob_name, 243 'output_bucket': self._output_bucket.name, 244 'output_path': signed_bundle_blob_name, 245 } 246 247 if request_overrides is not None: 248 signing_request.update(request_overrides) 249 250 builder_public_key_blob = self._input_bucket.blob( 251 builder_public_key_blob_name 252 ) 253 bundle_blob = self._input_bucket.blob(bundle_blob_name) 254 request_blob = self._input_bucket.blob(request_blob_name) 255 256 for blob in (builder_public_key_blob, bundle_blob, request_blob): 257 if blob.exists(): 258 raise BlobExistsError( 259 f'A blob named "{blob}" already exists in the input bucket.' 260 ' A unique blob name is required for uploading.' 261 ) 262 263 builder_public_key_blob.upload_from_filename(str(builder_public_key)) 264 265 bundle_blob.metadata = { 266 'signature': self._get_builder_signature( 267 bundle, builder_key 268 ).decode('ascii') 269 } 270 bundle_blob.upload_from_filename(str(bundle)) 271 272 encoded_json = bytes(json.dumps(signing_request), 'utf-8') 273 request_blob.metadata = { 274 'signature': self._get_builder_signature( 275 encoded_json, builder_key 276 ).decode('ascii') 277 } 278 279 # Despite its name, the upload_from_string() method can take either a 280 # str or bytes object; here we already pre-encoded the string in utf-8. 281 request_blob.upload_from_string(encoded_json) 282 283 return self._wait_for_blob(signed_bundle_blob_name, timeout_s=timeout_s) 284 285 def _wait_for_blob( 286 self, 287 blob_name, 288 interval: int = 1, 289 max_tries: int | None = None, 290 timeout_s: int = DEFAULT_TIMEOUT_S, 291 ) -> storage.Blob: 292 """Wait for a specific blob to appear in the output bucket. 293 294 Args: 295 blob_name: Name of the blob to wait for. 296 interval: Time (seconds) to wait between checks for blob's existence. 297 max_tries: Number of times to check for the blob before failing. 298 timeout_s: Maximum seconds to keep watching before failing. 299 """ 300 blob = self._output_bucket.blob(blob_name) 301 end_time = time.time() + timeout_s 302 tries = 0 303 while max_tries is None or tries < max_tries: 304 if time.time() > end_time: 305 raise FileNotFoundError( 306 'Timed out while waiting for signed blob.' 307 ) 308 if blob.exists(): 309 return blob 310 tries += 1 311 time.sleep(interval) 312 313 raise FileNotFoundError( 314 'Too many retries while waiting for signed blob.' 315 ) 316 317 @staticmethod 318 def _get_builder_signature(data: Path | bytes, key: Path) -> bytes: 319 """Generate a base64-encided builder signature for file. 320 321 In order for the remote signing system to have some level of trust in 322 the artifacts it's signing, an intermediate signature is used to verify 323 that the artifacts came from an approved builder. 324 """ 325 if isinstance(data, Path): 326 data = data.read_bytes() 327 328 private_key = serialization.load_pem_private_key( 329 key.read_bytes(), None, backends.default_backend() 330 ) 331 332 if isinstance(private_key, ed25519.Ed25519PrivateKey): 333 signature = private_key.sign(data) 334 335 elif isinstance(private_key, rsa.RSAPrivateKey): 336 signature = private_key.sign( 337 data, # type: ignore 338 padding=padding.PSS( 339 mgf=padding.MGF1(hashes.SHA256()), 340 salt_length=padding.PSS.MAX_LENGTH, 341 ), 342 algorithm=hashes.SHA256(), 343 ) 344 345 else: 346 raise TypeError( 347 f'Key {private_key} has unsupported type' 348 f' ({type(private_key)}). Valid types are' 349 ' Ed25519PrivateKey and RSAPrivateKey.' 350 ) 351 352 return base64.b64encode(signature) 353 354 355def _credentials_from_module(module_name: str) -> Credentials: 356 """Return GCS Credential from the named auth module. 357 358 The module name should correspond to a module that's importable in the 359 running Python environment. It must define a get_credentials() function 360 that takes no args and returns a Credentials instance. 361 """ 362 auth_module = importlib.import_module(module_name) 363 return auth_module.get_credentials() # type: ignore 364 365 366def main( # pylint: disable=too-many-arguments 367 project: str, 368 input_bucket: str, 369 output_bucket: str, 370 bundle: Path, 371 out: Path, 372 signing_key_name: str, 373 builder_key: Path, 374 builder_public_key: Path, 375 bundle_blob_name: str, 376 request_blob_name: str, 377 signed_bundle_blob_name: str, 378 dev_gcs_auth_module_override: str, 379 timeout: int, 380) -> None: 381 """Send bundle for remote signing and write signed counterpart to disk. 382 383 Args: 384 project: Project name for GCS project containing signing bucket pair. 385 input_bucket: Name of GCS bucket to deposit to-be-signed artifacts in. 386 output_bucket: Name of GCS bucket to watch for signed artifacts. 387 bundle: Update bundle to be signed. 388 out: Path to which to download signed version of bundle. 389 signing_key_name: Name of key the remote signing service should use. 390 bundle_blob_name: Path in input bucket to upload bundle to. 391 request_blob_name: Path in input bucket to upload signing request to. 392 signed_bundle_blob_name: Output bucket path for signed bundle. 393 dev_gcs_auth_module_override: For developer use only; optional module 394 to use to generate GCS client credentials. Must be importable in 395 the running Python environment, and must define a get_credentials() 396 function that takes no args and returns a Credentials instance. 397 timeout: Seconds to wait for signed bundle before giving up. 398 """ 399 credentials = None 400 if dev_gcs_auth_module_override is not None: 401 credentials = _credentials_from_module(dev_gcs_auth_module_override) 402 403 remote_sign_client = RemoteSignClient.from_names( 404 project_name=project, 405 input_bucket_name=input_bucket, 406 output_bucket_name=output_bucket, 407 gcs_credentials=credentials, 408 ) 409 410 signed_bundle = remote_sign_client.sign( 411 bundle, 412 signing_key_name, 413 builder_key, 414 builder_public_key, 415 bundle_blob_name, 416 request_blob_name, 417 signed_bundle_blob_name, 418 timeout_s=timeout, 419 ) 420 421 out.write_bytes(signed_bundle.download_as_bytes()) 422 423 424if __name__ == '__main__': 425 main(**vars(_parse_args())) 426