1 /* ====================================================================
2 * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
3 *
4 * Redistribution and use in source and binary forms, with or without
5 * modification, are permitted provided that the following conditions
6 * are met:
7 *
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 *
11 * 2. Redistributions in binary form must reproduce the above copyright
12 * notice, this list of conditions and the following disclaimer in
13 * the documentation and/or other materials provided with the
14 * distribution.
15 *
16 * 3. All advertising materials mentioning features or use of this
17 * software must display the following acknowledgment:
18 * "This product includes software developed by the OpenSSL Project
19 * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20 *
21 * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22 * endorse or promote products derived from this software without
23 * prior written permission. For written permission, please contact
24 * openssl-core@openssl.org.
25 *
26 * 5. Products derived from this software may not be called "OpenSSL"
27 * nor may "OpenSSL" appear in their names without prior written
28 * permission of the OpenSSL Project.
29 *
30 * 6. Redistributions of any form whatsoever must retain the following
31 * acknowledgment:
32 * "This product includes software developed by the OpenSSL Project
33 * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34 *
35 * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36 * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
39 * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44 * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46 * OF THE POSSIBILITY OF SUCH DAMAGE.
47 * ==================================================================== */
48
49 #include <assert.h>
50 #include <limits.h>
51 #include <string.h>
52
53 #include <openssl/aead.h>
54 #include <openssl/aes.h>
55 #include <openssl/cipher.h>
56 #include <openssl/err.h>
57 #include <openssl/mem.h>
58 #include <openssl/nid.h>
59 #include <openssl/rand.h>
60
61 #include "internal.h"
62 #include "../../internal.h"
63 #include "../aes/internal.h"
64 #include "../modes/internal.h"
65 #include "../service_indicator/internal.h"
66 #include "../delocate.h"
67
68
69 OPENSSL_MSVC_PRAGMA(warning(push))
70 OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) // Unreachable code.
71
72 #define AES_GCM_NONCE_LENGTH 12
73
74 #if defined(BSAES)
vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t * in,uint8_t * out,size_t blocks,const AES_KEY * key,const uint8_t ivec[16])75 static void vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t *in,
76 uint8_t *out, size_t blocks,
77 const AES_KEY *key,
78 const uint8_t ivec[16]) {
79 // |bsaes_ctr32_encrypt_blocks| is faster than |vpaes_ctr32_encrypt_blocks|,
80 // but it takes at least one full 8-block batch to amortize the conversion.
81 if (blocks < 8) {
82 vpaes_ctr32_encrypt_blocks(in, out, blocks, key, ivec);
83 return;
84 }
85
86 size_t bsaes_blocks = blocks;
87 if (bsaes_blocks % 8 < 6) {
88 // |bsaes_ctr32_encrypt_blocks| internally works in 8-block batches. If the
89 // final batch is too small (under six blocks), it is faster to loop over
90 // |vpaes_encrypt|. Round |bsaes_blocks| down to a multiple of 8.
91 bsaes_blocks -= bsaes_blocks % 8;
92 }
93
94 AES_KEY bsaes;
95 vpaes_encrypt_key_to_bsaes(&bsaes, key);
96 bsaes_ctr32_encrypt_blocks(in, out, bsaes_blocks, &bsaes, ivec);
97 OPENSSL_cleanse(&bsaes, sizeof(bsaes));
98
99 in += 16 * bsaes_blocks;
100 out += 16 * bsaes_blocks;
101 blocks -= bsaes_blocks;
102
103 uint8_t new_ivec[16];
104 memcpy(new_ivec, ivec, 12);
105 uint32_t ctr = CRYPTO_load_u32_be(ivec + 12) + bsaes_blocks;
106 CRYPTO_store_u32_be(new_ivec + 12, ctr);
107
108 // Finish any remaining blocks with |vpaes_ctr32_encrypt_blocks|.
109 vpaes_ctr32_encrypt_blocks(in, out, blocks, key, new_ivec);
110 }
111 #endif // BSAES
112
113 typedef struct {
114 union {
115 double align;
116 AES_KEY ks;
117 } ks;
118 block128_f block;
119 union {
120 cbc128_f cbc;
121 ctr128_f ctr;
122 } stream;
123 } EVP_AES_KEY;
124
125 typedef struct {
126 GCM128_CONTEXT gcm;
127 union {
128 double align;
129 AES_KEY ks;
130 } ks; // AES key schedule to use
131 int key_set; // Set if key initialised
132 int iv_set; // Set if an iv is set
133 uint8_t *iv; // Temporary IV store
134 int ivlen; // IV length
135 int taglen;
136 int iv_gen; // It is OK to generate IVs
137 ctr128_f ctr;
138 } EVP_AES_GCM_CTX;
139
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)140 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
141 const uint8_t *iv, int enc) {
142 int ret;
143 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
144 const int mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
145
146 if (mode == EVP_CIPH_CTR_MODE) {
147 switch (ctx->key_len) {
148 case 16:
149 boringssl_fips_inc_counter(fips_counter_evp_aes_128_ctr);
150 break;
151
152 case 32:
153 boringssl_fips_inc_counter(fips_counter_evp_aes_256_ctr);
154 break;
155 }
156 }
157
158 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
159 if (hwaes_capable()) {
160 ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
161 dat->block = aes_hw_decrypt;
162 dat->stream.cbc = NULL;
163 if (mode == EVP_CIPH_CBC_MODE) {
164 dat->stream.cbc = aes_hw_cbc_encrypt;
165 }
166 } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
167 assert(vpaes_capable());
168 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
169 if (ret == 0) {
170 vpaes_decrypt_key_to_bsaes(&dat->ks.ks, &dat->ks.ks);
171 }
172 // If |dat->stream.cbc| is provided, |dat->block| is never used.
173 dat->block = NULL;
174 dat->stream.cbc = bsaes_cbc_encrypt;
175 } else if (vpaes_capable()) {
176 ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
177 dat->block = vpaes_decrypt;
178 dat->stream.cbc = NULL;
179 #if defined(VPAES_CBC)
180 if (mode == EVP_CIPH_CBC_MODE) {
181 dat->stream.cbc = vpaes_cbc_encrypt;
182 }
183 #endif
184 } else {
185 ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
186 dat->block = aes_nohw_decrypt;
187 dat->stream.cbc = NULL;
188 if (mode == EVP_CIPH_CBC_MODE) {
189 dat->stream.cbc = aes_nohw_cbc_encrypt;
190 }
191 }
192 } else if (hwaes_capable()) {
193 ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
194 dat->block = aes_hw_encrypt;
195 dat->stream.cbc = NULL;
196 if (mode == EVP_CIPH_CBC_MODE) {
197 dat->stream.cbc = aes_hw_cbc_encrypt;
198 } else if (mode == EVP_CIPH_CTR_MODE) {
199 dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
200 }
201 } else if (vpaes_capable()) {
202 ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
203 dat->block = vpaes_encrypt;
204 dat->stream.cbc = NULL;
205 #if defined(VPAES_CBC)
206 if (mode == EVP_CIPH_CBC_MODE) {
207 dat->stream.cbc = vpaes_cbc_encrypt;
208 }
209 #endif
210 if (mode == EVP_CIPH_CTR_MODE) {
211 #if defined(BSAES)
212 assert(bsaes_capable());
213 dat->stream.ctr = vpaes_ctr32_encrypt_blocks_with_bsaes;
214 #elif defined(VPAES_CTR32)
215 dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
216 #endif
217 }
218 } else {
219 ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
220 dat->block = aes_nohw_encrypt;
221 dat->stream.cbc = NULL;
222 if (mode == EVP_CIPH_CBC_MODE) {
223 dat->stream.cbc = aes_nohw_cbc_encrypt;
224 }
225 }
226
227 if (ret < 0) {
228 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
229 return 0;
230 }
231
232 return 1;
233 }
234
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)235 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
236 size_t len) {
237 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
238
239 if (dat->stream.cbc) {
240 (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
241 } else if (ctx->encrypt) {
242 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
243 } else {
244 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
245 }
246
247 return 1;
248 }
249
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)250 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
251 size_t len) {
252 size_t bl = ctx->cipher->block_size;
253 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
254
255 if (len < bl) {
256 return 1;
257 }
258
259 len -= bl;
260 for (size_t i = 0; i <= len; i += bl) {
261 (*dat->block)(in + i, out + i, &dat->ks.ks);
262 }
263
264 return 1;
265 }
266
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)267 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
268 size_t len) {
269 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
270
271 if (dat->stream.ctr) {
272 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
273 &ctx->num, dat->stream.ctr);
274 } else {
275 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
276 &ctx->num, dat->block);
277 }
278 return 1;
279 }
280
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)281 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
282 size_t len) {
283 EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
284
285 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
286 dat->block);
287 return 1;
288 }
289
aes_ctr_set_key(AES_KEY * aes_key,GCM128_KEY * gcm_key,block128_f * out_block,const uint8_t * key,size_t key_bytes)290 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
291 block128_f *out_block, const uint8_t *key,
292 size_t key_bytes) {
293 // This function assumes the key length was previously validated.
294 assert(key_bytes == 128 / 8 || key_bytes == 192 / 8 || key_bytes == 256 / 8);
295 if (hwaes_capable()) {
296 aes_hw_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
297 if (gcm_key != NULL) {
298 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
299 }
300 if (out_block) {
301 *out_block = aes_hw_encrypt;
302 }
303 return aes_hw_ctr32_encrypt_blocks;
304 }
305
306 if (vpaes_capable()) {
307 vpaes_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
308 if (out_block) {
309 *out_block = vpaes_encrypt;
310 }
311 if (gcm_key != NULL) {
312 CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
313 }
314 #if defined(BSAES)
315 assert(bsaes_capable());
316 return vpaes_ctr32_encrypt_blocks_with_bsaes;
317 #elif defined(VPAES_CTR32)
318 return vpaes_ctr32_encrypt_blocks;
319 #else
320 return NULL;
321 #endif
322 }
323
324 aes_nohw_set_encrypt_key(key, (int)key_bytes * 8, aes_key);
325 if (gcm_key != NULL) {
326 CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
327 }
328 if (out_block) {
329 *out_block = aes_nohw_encrypt;
330 }
331 return aes_nohw_ctr32_encrypt_blocks;
332 }
333
334 #if defined(OPENSSL_32_BIT)
335 #define EVP_AES_GCM_CTX_PADDING (4+8)
336 #else
337 #define EVP_AES_GCM_CTX_PADDING 8
338 #endif
339
aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX * ctx)340 static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
341 static_assert(
342 alignof(EVP_AES_GCM_CTX) <= 16,
343 "EVP_AES_GCM_CTX needs more alignment than this function provides");
344
345 // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
346 // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
347 assert(ctx->cipher->ctx_size ==
348 sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
349
350 char *ptr = ctx->cipher_data;
351 #if defined(OPENSSL_32_BIT)
352 assert((uintptr_t)ptr % 4 == 0);
353 ptr += (uintptr_t)ptr & 4;
354 #endif
355 assert((uintptr_t)ptr % 8 == 0);
356 ptr += (uintptr_t)ptr & 8;
357 return (EVP_AES_GCM_CTX *)ptr;
358 }
359
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)360 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
361 const uint8_t *iv, int enc) {
362 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
363 if (!iv && !key) {
364 return 1;
365 }
366
367 switch (ctx->key_len) {
368 case 16:
369 boringssl_fips_inc_counter(fips_counter_evp_aes_128_gcm);
370 break;
371
372 case 32:
373 boringssl_fips_inc_counter(fips_counter_evp_aes_256_gcm);
374 break;
375 }
376
377 if (key) {
378 OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
379 gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
380 ctx->key_len);
381 // If we have an iv can set it directly, otherwise use saved IV.
382 if (iv == NULL && gctx->iv_set) {
383 iv = gctx->iv;
384 }
385 if (iv) {
386 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
387 gctx->iv_set = 1;
388 }
389 gctx->key_set = 1;
390 } else {
391 // If key set use IV, otherwise copy
392 if (gctx->key_set) {
393 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
394 } else {
395 OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
396 }
397 gctx->iv_set = 1;
398 gctx->iv_gen = 0;
399 }
400 return 1;
401 }
402
aes_gcm_cleanup(EVP_CIPHER_CTX * c)403 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
404 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
405 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
406 if (gctx->iv != c->iv) {
407 OPENSSL_free(gctx->iv);
408 }
409 }
410
411 // increment counter (64-bit int) by 1
ctr64_inc(uint8_t * counter)412 static void ctr64_inc(uint8_t *counter) {
413 int n = 8;
414 uint8_t c;
415
416 do {
417 --n;
418 c = counter[n];
419 ++c;
420 counter[n] = c;
421 if (c) {
422 return;
423 }
424 } while (n);
425 }
426
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)427 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
428 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
429 switch (type) {
430 case EVP_CTRL_INIT:
431 gctx->key_set = 0;
432 gctx->iv_set = 0;
433 gctx->ivlen = c->cipher->iv_len;
434 gctx->iv = c->iv;
435 gctx->taglen = -1;
436 gctx->iv_gen = 0;
437 return 1;
438
439 case EVP_CTRL_AEAD_SET_IVLEN:
440 if (arg <= 0) {
441 return 0;
442 }
443
444 // Allocate memory for IV if needed
445 if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
446 if (gctx->iv != c->iv) {
447 OPENSSL_free(gctx->iv);
448 }
449 gctx->iv = OPENSSL_malloc(arg);
450 if (!gctx->iv) {
451 return 0;
452 }
453 }
454 gctx->ivlen = arg;
455 return 1;
456
457 case EVP_CTRL_AEAD_SET_TAG:
458 if (arg <= 0 || arg > 16 || c->encrypt) {
459 return 0;
460 }
461 OPENSSL_memcpy(c->buf, ptr, arg);
462 gctx->taglen = arg;
463 return 1;
464
465 case EVP_CTRL_AEAD_GET_TAG:
466 if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
467 return 0;
468 }
469 OPENSSL_memcpy(ptr, c->buf, arg);
470 return 1;
471
472 case EVP_CTRL_AEAD_SET_IV_FIXED:
473 // Special case: -1 length restores whole IV
474 if (arg == -1) {
475 OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
476 gctx->iv_gen = 1;
477 return 1;
478 }
479 // Fixed field must be at least 4 bytes and invocation field
480 // at least 8.
481 if (arg < 4 || (gctx->ivlen - arg) < 8) {
482 return 0;
483 }
484 if (arg) {
485 OPENSSL_memcpy(gctx->iv, ptr, arg);
486 }
487 if (c->encrypt) {
488 // |RAND_bytes| calls within the fipsmodule should be wrapped with state
489 // lock functions to avoid updating the service indicator with the DRBG
490 // functions.
491 FIPS_service_indicator_lock_state();
492 RAND_bytes(gctx->iv + arg, gctx->ivlen - arg);
493 FIPS_service_indicator_unlock_state();
494 }
495 gctx->iv_gen = 1;
496 return 1;
497
498 case EVP_CTRL_GCM_IV_GEN:
499 if (gctx->iv_gen == 0 || gctx->key_set == 0) {
500 return 0;
501 }
502 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
503 if (arg <= 0 || arg > gctx->ivlen) {
504 arg = gctx->ivlen;
505 }
506 OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
507 // Invocation field will be at least 8 bytes in size and
508 // so no need to check wrap around or increment more than
509 // last 8 bytes.
510 ctr64_inc(gctx->iv + gctx->ivlen - 8);
511 gctx->iv_set = 1;
512 return 1;
513
514 case EVP_CTRL_GCM_SET_IV_INV:
515 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
516 return 0;
517 }
518 OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
519 CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
520 gctx->iv_set = 1;
521 return 1;
522
523 case EVP_CTRL_COPY: {
524 EVP_CIPHER_CTX *out = ptr;
525 EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
526 // |EVP_CIPHER_CTX_copy| copies this generically, but we must redo it in
527 // case |out->cipher_data| and |in->cipher_data| are differently aligned.
528 OPENSSL_memcpy(gctx_out, gctx, sizeof(EVP_AES_GCM_CTX));
529 if (gctx->iv == c->iv) {
530 gctx_out->iv = out->iv;
531 } else {
532 gctx_out->iv = OPENSSL_memdup(gctx->iv, gctx->ivlen);
533 if (!gctx_out->iv) {
534 return 0;
535 }
536 }
537 return 1;
538 }
539
540 default:
541 return -1;
542 }
543 }
544
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)545 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
546 size_t len) {
547 EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
548
549 // If not set up, return error
550 if (!gctx->key_set) {
551 return -1;
552 }
553 if (!gctx->iv_set) {
554 return -1;
555 }
556
557 if (len > INT_MAX) {
558 // This function signature can only express up to |INT_MAX| bytes encrypted.
559 //
560 // TODO(https://crbug.com/boringssl/494): Make the internal |EVP_CIPHER|
561 // calling convention |size_t|-clean.
562 return -1;
563 }
564
565 if (in) {
566 if (out == NULL) {
567 if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
568 return -1;
569 }
570 } else if (ctx->encrypt) {
571 if (gctx->ctr) {
572 if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
573 gctx->ctr)) {
574 return -1;
575 }
576 } else {
577 if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
578 return -1;
579 }
580 }
581 } else {
582 if (gctx->ctr) {
583 if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
584 gctx->ctr)) {
585 return -1;
586 }
587 } else {
588 if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
589 return -1;
590 }
591 }
592 }
593 return (int)len;
594 } else {
595 if (!ctx->encrypt) {
596 if (gctx->taglen < 0 ||
597 !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
598 return -1;
599 }
600 gctx->iv_set = 0;
601 return 0;
602 }
603 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
604 gctx->taglen = 16;
605 // Don't reuse the IV
606 gctx->iv_set = 0;
607 return 0;
608 }
609 }
610
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_cbc)611 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_cbc) {
612 memset(out, 0, sizeof(EVP_CIPHER));
613
614 out->nid = NID_aes_128_cbc;
615 out->block_size = 16;
616 out->key_len = 16;
617 out->iv_len = 16;
618 out->ctx_size = sizeof(EVP_AES_KEY);
619 out->flags = EVP_CIPH_CBC_MODE;
620 out->init = aes_init_key;
621 out->cipher = aes_cbc_cipher;
622 }
623
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_ctr)624 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_ctr) {
625 memset(out, 0, sizeof(EVP_CIPHER));
626
627 out->nid = NID_aes_128_ctr;
628 out->block_size = 1;
629 out->key_len = 16;
630 out->iv_len = 16;
631 out->ctx_size = sizeof(EVP_AES_KEY);
632 out->flags = EVP_CIPH_CTR_MODE;
633 out->init = aes_init_key;
634 out->cipher = aes_ctr_cipher;
635 }
636
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)637 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
638 memset(out, 0, sizeof(EVP_CIPHER));
639
640 out->nid = NID_aes_128_ecb;
641 out->block_size = 16;
642 out->key_len = 16;
643 out->ctx_size = sizeof(EVP_AES_KEY);
644 out->flags = EVP_CIPH_ECB_MODE;
645 out->init = aes_init_key;
646 out->cipher = aes_ecb_cipher;
647 }
648
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_ofb)649 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_ofb) {
650 memset(out, 0, sizeof(EVP_CIPHER));
651
652 out->nid = NID_aes_128_ofb128;
653 out->block_size = 1;
654 out->key_len = 16;
655 out->iv_len = 16;
656 out->ctx_size = sizeof(EVP_AES_KEY);
657 out->flags = EVP_CIPH_OFB_MODE;
658 out->init = aes_init_key;
659 out->cipher = aes_ofb_cipher;
660 }
661
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_128_gcm)662 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_128_gcm) {
663 memset(out, 0, sizeof(EVP_CIPHER));
664
665 out->nid = NID_aes_128_gcm;
666 out->block_size = 1;
667 out->key_len = 16;
668 out->iv_len = AES_GCM_NONCE_LENGTH;
669 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
670 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
671 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
672 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
673 out->init = aes_gcm_init_key;
674 out->cipher = aes_gcm_cipher;
675 out->cleanup = aes_gcm_cleanup;
676 out->ctrl = aes_gcm_ctrl;
677 }
678
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_cbc)679 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_cbc) {
680 memset(out, 0, sizeof(EVP_CIPHER));
681
682 out->nid = NID_aes_192_cbc;
683 out->block_size = 16;
684 out->key_len = 24;
685 out->iv_len = 16;
686 out->ctx_size = sizeof(EVP_AES_KEY);
687 out->flags = EVP_CIPH_CBC_MODE;
688 out->init = aes_init_key;
689 out->cipher = aes_cbc_cipher;
690 }
691
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_ctr)692 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_ctr) {
693 memset(out, 0, sizeof(EVP_CIPHER));
694
695 out->nid = NID_aes_192_ctr;
696 out->block_size = 1;
697 out->key_len = 24;
698 out->iv_len = 16;
699 out->ctx_size = sizeof(EVP_AES_KEY);
700 out->flags = EVP_CIPH_CTR_MODE;
701 out->init = aes_init_key;
702 out->cipher = aes_ctr_cipher;
703 }
704
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)705 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
706 memset(out, 0, sizeof(EVP_CIPHER));
707
708 out->nid = NID_aes_192_ecb;
709 out->block_size = 16;
710 out->key_len = 24;
711 out->ctx_size = sizeof(EVP_AES_KEY);
712 out->flags = EVP_CIPH_ECB_MODE;
713 out->init = aes_init_key;
714 out->cipher = aes_ecb_cipher;
715 }
716
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_ofb)717 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_ofb) {
718 memset(out, 0, sizeof(EVP_CIPHER));
719
720 out->nid = NID_aes_192_ofb128;
721 out->block_size = 1;
722 out->key_len = 24;
723 out->iv_len = 16;
724 out->ctx_size = sizeof(EVP_AES_KEY);
725 out->flags = EVP_CIPH_OFB_MODE;
726 out->init = aes_init_key;
727 out->cipher = aes_ofb_cipher;
728 }
729
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_192_gcm)730 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_192_gcm) {
731 memset(out, 0, sizeof(EVP_CIPHER));
732
733 out->nid = NID_aes_192_gcm;
734 out->block_size = 1;
735 out->key_len = 24;
736 out->iv_len = AES_GCM_NONCE_LENGTH;
737 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
738 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
739 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
740 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
741 out->init = aes_gcm_init_key;
742 out->cipher = aes_gcm_cipher;
743 out->cleanup = aes_gcm_cleanup;
744 out->ctrl = aes_gcm_ctrl;
745 }
746
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_cbc)747 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_cbc) {
748 memset(out, 0, sizeof(EVP_CIPHER));
749
750 out->nid = NID_aes_256_cbc;
751 out->block_size = 16;
752 out->key_len = 32;
753 out->iv_len = 16;
754 out->ctx_size = sizeof(EVP_AES_KEY);
755 out->flags = EVP_CIPH_CBC_MODE;
756 out->init = aes_init_key;
757 out->cipher = aes_cbc_cipher;
758 }
759
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_ctr)760 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_ctr) {
761 memset(out, 0, sizeof(EVP_CIPHER));
762
763 out->nid = NID_aes_256_ctr;
764 out->block_size = 1;
765 out->key_len = 32;
766 out->iv_len = 16;
767 out->ctx_size = sizeof(EVP_AES_KEY);
768 out->flags = EVP_CIPH_CTR_MODE;
769 out->init = aes_init_key;
770 out->cipher = aes_ctr_cipher;
771 }
772
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)773 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
774 memset(out, 0, sizeof(EVP_CIPHER));
775
776 out->nid = NID_aes_256_ecb;
777 out->block_size = 16;
778 out->key_len = 32;
779 out->ctx_size = sizeof(EVP_AES_KEY);
780 out->flags = EVP_CIPH_ECB_MODE;
781 out->init = aes_init_key;
782 out->cipher = aes_ecb_cipher;
783 }
784
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_ofb)785 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_ofb) {
786 memset(out, 0, sizeof(EVP_CIPHER));
787
788 out->nid = NID_aes_256_ofb128;
789 out->block_size = 1;
790 out->key_len = 32;
791 out->iv_len = 16;
792 out->ctx_size = sizeof(EVP_AES_KEY);
793 out->flags = EVP_CIPH_OFB_MODE;
794 out->init = aes_init_key;
795 out->cipher = aes_ofb_cipher;
796 }
797
DEFINE_METHOD_FUNCTION(EVP_CIPHER,EVP_aes_256_gcm)798 DEFINE_METHOD_FUNCTION(EVP_CIPHER, EVP_aes_256_gcm) {
799 memset(out, 0, sizeof(EVP_CIPHER));
800
801 out->nid = NID_aes_256_gcm;
802 out->block_size = 1;
803 out->key_len = 32;
804 out->iv_len = AES_GCM_NONCE_LENGTH;
805 out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
806 out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
807 EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
808 EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
809 out->init = aes_gcm_init_key;
810 out->cipher = aes_gcm_cipher;
811 out->cleanup = aes_gcm_cleanup;
812 out->ctrl = aes_gcm_ctrl;
813 }
814
815 #if defined(HWAES_ECB)
816
aes_hw_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)817 static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
818 const uint8_t *in, size_t len) {
819 size_t bl = ctx->cipher->block_size;
820
821 if (len < bl) {
822 return 1;
823 }
824
825 aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
826
827 return 1;
828 }
829
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_128_ecb)830 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
831 memset(out, 0, sizeof(EVP_CIPHER));
832
833 out->nid = NID_aes_128_ecb;
834 out->block_size = 16;
835 out->key_len = 16;
836 out->ctx_size = sizeof(EVP_AES_KEY);
837 out->flags = EVP_CIPH_ECB_MODE;
838 out->init = aes_init_key;
839 out->cipher = aes_hw_ecb_cipher;
840 }
841
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_192_ecb)842 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
843 memset(out, 0, sizeof(EVP_CIPHER));
844
845 out->nid = NID_aes_192_ecb;
846 out->block_size = 16;
847 out->key_len = 24;
848 out->ctx_size = sizeof(EVP_AES_KEY);
849 out->flags = EVP_CIPH_ECB_MODE;
850 out->init = aes_init_key;
851 out->cipher = aes_hw_ecb_cipher;
852 }
853
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_256_ecb)854 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
855 memset(out, 0, sizeof(EVP_CIPHER));
856
857 out->nid = NID_aes_256_ecb;
858 out->block_size = 16;
859 out->key_len = 32;
860 out->ctx_size = sizeof(EVP_AES_KEY);
861 out->flags = EVP_CIPH_ECB_MODE;
862 out->init = aes_init_key;
863 out->cipher = aes_hw_ecb_cipher;
864 }
865
866 #define EVP_ECB_CIPHER_FUNCTION(keybits) \
867 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
868 if (hwaes_capable()) { \
869 return aes_hw_##keybits##_ecb(); \
870 } \
871 return aes_##keybits##_ecb_generic(); \
872 }
873
874 #else
875
876 #define EVP_ECB_CIPHER_FUNCTION(keybits) \
877 const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
878 return aes_##keybits##_ecb_generic(); \
879 }
880
881 #endif // HWAES_ECB
882
883 EVP_ECB_CIPHER_FUNCTION(128)
884 EVP_ECB_CIPHER_FUNCTION(192)
885 EVP_ECB_CIPHER_FUNCTION(256)
886
887
888 #define EVP_AEAD_AES_GCM_TAG_LEN 16
889
890 struct aead_aes_gcm_ctx {
891 union {
892 double align;
893 AES_KEY ks;
894 } ks;
895 GCM128_KEY gcm_key;
896 ctr128_f ctr;
897 };
898
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)899 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
900 size_t *out_tag_len, const uint8_t *key,
901 size_t key_len, size_t tag_len) {
902 const size_t key_bits = key_len * 8;
903
904 switch (key_bits) {
905 case 128:
906 boringssl_fips_inc_counter(fips_counter_evp_aes_128_gcm);
907 break;
908
909 case 256:
910 boringssl_fips_inc_counter(fips_counter_evp_aes_256_gcm);
911 break;
912 }
913
914 if (key_bits != 128 && key_bits != 192 && key_bits != 256) {
915 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
916 return 0; // EVP_AEAD_CTX_init should catch this.
917 }
918
919 if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
920 tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
921 }
922
923 if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
924 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
925 return 0;
926 }
927
928 gcm_ctx->ctr =
929 aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
930 *out_tag_len = tag_len;
931 return 1;
932 }
933
934 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
935 sizeof(struct aead_aes_gcm_ctx),
936 "AEAD state is too small");
937 static_assert(alignof(union evp_aead_ctx_st_state) >=
938 alignof(struct aead_aes_gcm_ctx),
939 "AEAD state has insufficient alignment");
940
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)941 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
942 size_t key_len, size_t requested_tag_len) {
943 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
944
945 size_t actual_tag_len;
946 if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
947 requested_tag_len)) {
948 return 0;
949 }
950
951 ctx->tag_len = actual_tag_len;
952 return 1;
953 }
954
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)955 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
956
aead_aes_gcm_seal_scatter_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len,size_t tag_len)957 static int aead_aes_gcm_seal_scatter_impl(
958 const struct aead_aes_gcm_ctx *gcm_ctx,
959 uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
960 const uint8_t *nonce, size_t nonce_len,
961 const uint8_t *in, size_t in_len,
962 const uint8_t *extra_in, size_t extra_in_len,
963 const uint8_t *ad, size_t ad_len,
964 size_t tag_len) {
965 if (extra_in_len + tag_len < tag_len) {
966 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
967 return 0;
968 }
969 if (max_out_tag_len < extra_in_len + tag_len) {
970 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
971 return 0;
972 }
973 if (nonce_len == 0) {
974 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
975 return 0;
976 }
977
978 const AES_KEY *key = &gcm_ctx->ks.ks;
979
980 GCM128_CONTEXT gcm;
981 OPENSSL_memset(&gcm, 0, sizeof(gcm));
982 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
983 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
984
985 if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
986 return 0;
987 }
988
989 if (gcm_ctx->ctr) {
990 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
991 gcm_ctx->ctr)) {
992 return 0;
993 }
994 } else {
995 if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
996 return 0;
997 }
998 }
999
1000 if (extra_in_len) {
1001 if (gcm_ctx->ctr) {
1002 if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
1003 extra_in_len, gcm_ctx->ctr)) {
1004 return 0;
1005 }
1006 } else {
1007 if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
1008 return 0;
1009 }
1010 }
1011 }
1012
1013 CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, tag_len);
1014 *out_tag_len = tag_len + extra_in_len;
1015
1016 return 1;
1017 }
1018
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1019 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
1020 uint8_t *out_tag, size_t *out_tag_len,
1021 size_t max_out_tag_len,
1022 const uint8_t *nonce, size_t nonce_len,
1023 const uint8_t *in, size_t in_len,
1024 const uint8_t *extra_in,
1025 size_t extra_in_len,
1026 const uint8_t *ad, size_t ad_len) {
1027 const struct aead_aes_gcm_ctx *gcm_ctx =
1028 (const struct aead_aes_gcm_ctx *)&ctx->state;
1029 return aead_aes_gcm_seal_scatter_impl(
1030 gcm_ctx, out, out_tag, out_tag_len, max_out_tag_len, nonce, nonce_len, in,
1031 in_len, extra_in, extra_in_len, ad, ad_len, ctx->tag_len);
1032 }
1033
aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len,size_t tag_len)1034 static int aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx *gcm_ctx,
1035 uint8_t *out,
1036 const uint8_t *nonce, size_t nonce_len,
1037 const uint8_t *in, size_t in_len,
1038 const uint8_t *in_tag,
1039 size_t in_tag_len,
1040 const uint8_t *ad, size_t ad_len,
1041 size_t tag_len) {
1042 uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1043
1044 if (nonce_len == 0) {
1045 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1046 return 0;
1047 }
1048
1049 if (in_tag_len != tag_len) {
1050 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1051 return 0;
1052 }
1053
1054 const AES_KEY *key = &gcm_ctx->ks.ks;
1055
1056 GCM128_CONTEXT gcm;
1057 OPENSSL_memset(&gcm, 0, sizeof(gcm));
1058 OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
1059 CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1060
1061 if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1062 return 0;
1063 }
1064
1065 if (gcm_ctx->ctr) {
1066 if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1067 gcm_ctx->ctr)) {
1068 return 0;
1069 }
1070 } else {
1071 if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1072 return 0;
1073 }
1074 }
1075
1076 CRYPTO_gcm128_tag(&gcm, tag, tag_len);
1077 if (CRYPTO_memcmp(tag, in_tag, tag_len) != 0) {
1078 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1079 return 0;
1080 }
1081
1082 return 1;
1083 }
1084
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1085 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1086 const uint8_t *nonce, size_t nonce_len,
1087 const uint8_t *in, size_t in_len,
1088 const uint8_t *in_tag, size_t in_tag_len,
1089 const uint8_t *ad, size_t ad_len) {
1090 struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *)&ctx->state;
1091 if (!aead_aes_gcm_open_gather_impl(gcm_ctx, out, nonce, nonce_len, in, in_len,
1092 in_tag, in_tag_len, ad, ad_len,
1093 ctx->tag_len)) {
1094 return 0;
1095 }
1096
1097 AEAD_GCM_verify_service_indicator(ctx);
1098 return 1;
1099 }
1100
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1101 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1102 memset(out, 0, sizeof(EVP_AEAD));
1103
1104 out->key_len = 16;
1105 out->nonce_len = AES_GCM_NONCE_LENGTH;
1106 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1107 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1108 out->seal_scatter_supports_extra_in = 1;
1109
1110 out->init = aead_aes_gcm_init;
1111 out->cleanup = aead_aes_gcm_cleanup;
1112 out->seal_scatter = aead_aes_gcm_seal_scatter;
1113 out->open_gather = aead_aes_gcm_open_gather;
1114 }
1115
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_192_gcm)1116 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_192_gcm) {
1117 memset(out, 0, sizeof(EVP_AEAD));
1118
1119 out->key_len = 24;
1120 out->nonce_len = AES_GCM_NONCE_LENGTH;
1121 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1122 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1123 out->seal_scatter_supports_extra_in = 1;
1124
1125 out->init = aead_aes_gcm_init;
1126 out->cleanup = aead_aes_gcm_cleanup;
1127 out->seal_scatter = aead_aes_gcm_seal_scatter;
1128 out->open_gather = aead_aes_gcm_open_gather;
1129 }
1130
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1131 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1132 memset(out, 0, sizeof(EVP_AEAD));
1133
1134 out->key_len = 32;
1135 out->nonce_len = AES_GCM_NONCE_LENGTH;
1136 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1137 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1138 out->seal_scatter_supports_extra_in = 1;
1139
1140 out->init = aead_aes_gcm_init;
1141 out->cleanup = aead_aes_gcm_cleanup;
1142 out->seal_scatter = aead_aes_gcm_seal_scatter;
1143 out->open_gather = aead_aes_gcm_open_gather;
1144 }
1145
aead_aes_gcm_init_randnonce(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1146 static int aead_aes_gcm_init_randnonce(EVP_AEAD_CTX *ctx, const uint8_t *key,
1147 size_t key_len,
1148 size_t requested_tag_len) {
1149 if (requested_tag_len != EVP_AEAD_DEFAULT_TAG_LENGTH) {
1150 if (requested_tag_len < AES_GCM_NONCE_LENGTH) {
1151 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1152 return 0;
1153 }
1154 requested_tag_len -= AES_GCM_NONCE_LENGTH;
1155 }
1156
1157 if (!aead_aes_gcm_init(ctx, key, key_len, requested_tag_len)) {
1158 return 0;
1159 }
1160
1161 ctx->tag_len += AES_GCM_NONCE_LENGTH;
1162 return 1;
1163 }
1164
aead_aes_gcm_seal_scatter_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1165 static int aead_aes_gcm_seal_scatter_randnonce(
1166 const EVP_AEAD_CTX *ctx,
1167 uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
1168 const uint8_t *external_nonce, size_t external_nonce_len,
1169 const uint8_t *in, size_t in_len,
1170 const uint8_t *extra_in, size_t extra_in_len,
1171 const uint8_t *ad, size_t ad_len) {
1172 if (external_nonce_len != 0) {
1173 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1174 return 0;
1175 }
1176
1177 uint8_t nonce[AES_GCM_NONCE_LENGTH];
1178 if (max_out_tag_len < sizeof(nonce)) {
1179 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1180 return 0;
1181 }
1182
1183 // |RAND_bytes| calls within the fipsmodule should be wrapped with state lock
1184 // functions to avoid updating the service indicator with the DRBG functions.
1185 FIPS_service_indicator_lock_state();
1186 RAND_bytes(nonce, sizeof(nonce));
1187 FIPS_service_indicator_unlock_state();
1188
1189 const struct aead_aes_gcm_ctx *gcm_ctx =
1190 (const struct aead_aes_gcm_ctx *)&ctx->state;
1191 if (!aead_aes_gcm_seal_scatter_impl(gcm_ctx, out, out_tag, out_tag_len,
1192 max_out_tag_len - AES_GCM_NONCE_LENGTH,
1193 nonce, sizeof(nonce), in, in_len,
1194 extra_in, extra_in_len, ad, ad_len,
1195 ctx->tag_len - AES_GCM_NONCE_LENGTH)) {
1196 return 0;
1197 }
1198
1199 assert(*out_tag_len + sizeof(nonce) <= max_out_tag_len);
1200 memcpy(out_tag + *out_tag_len, nonce, sizeof(nonce));
1201 *out_tag_len += sizeof(nonce);
1202
1203 AEAD_GCM_verify_service_indicator(ctx);
1204 return 1;
1205 }
1206
aead_aes_gcm_open_gather_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1207 static int aead_aes_gcm_open_gather_randnonce(
1208 const EVP_AEAD_CTX *ctx, uint8_t *out,
1209 const uint8_t *external_nonce, size_t external_nonce_len,
1210 const uint8_t *in, size_t in_len,
1211 const uint8_t *in_tag, size_t in_tag_len,
1212 const uint8_t *ad, size_t ad_len) {
1213 if (external_nonce_len != 0) {
1214 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1215 return 0;
1216 }
1217
1218 if (in_tag_len < AES_GCM_NONCE_LENGTH) {
1219 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1220 return 0;
1221 }
1222 const uint8_t *nonce = in_tag + in_tag_len - AES_GCM_NONCE_LENGTH;
1223
1224 const struct aead_aes_gcm_ctx *gcm_ctx =
1225 (const struct aead_aes_gcm_ctx *)&ctx->state;
1226 if (!aead_aes_gcm_open_gather_impl(
1227 gcm_ctx, out, nonce, AES_GCM_NONCE_LENGTH, in, in_len, in_tag,
1228 in_tag_len - AES_GCM_NONCE_LENGTH, ad, ad_len,
1229 ctx->tag_len - AES_GCM_NONCE_LENGTH)) {
1230 return 0;
1231 }
1232
1233 AEAD_GCM_verify_service_indicator(ctx);
1234 return 1;
1235 }
1236
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_randnonce)1237 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_randnonce) {
1238 memset(out, 0, sizeof(EVP_AEAD));
1239
1240 out->key_len = 16;
1241 out->nonce_len = 0;
1242 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1243 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1244 out->seal_scatter_supports_extra_in = 1;
1245
1246 out->init = aead_aes_gcm_init_randnonce;
1247 out->cleanup = aead_aes_gcm_cleanup;
1248 out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1249 out->open_gather = aead_aes_gcm_open_gather_randnonce;
1250 }
1251
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_randnonce)1252 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_randnonce) {
1253 memset(out, 0, sizeof(EVP_AEAD));
1254
1255 out->key_len = 32;
1256 out->nonce_len = 0;
1257 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1258 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1259 out->seal_scatter_supports_extra_in = 1;
1260
1261 out->init = aead_aes_gcm_init_randnonce;
1262 out->cleanup = aead_aes_gcm_cleanup;
1263 out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1264 out->open_gather = aead_aes_gcm_open_gather_randnonce;
1265 }
1266
1267 struct aead_aes_gcm_tls12_ctx {
1268 struct aead_aes_gcm_ctx gcm_ctx;
1269 uint64_t min_next_nonce;
1270 };
1271
1272 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1273 sizeof(struct aead_aes_gcm_tls12_ctx),
1274 "AEAD state is too small");
1275 static_assert(alignof(union evp_aead_ctx_st_state) >=
1276 alignof(struct aead_aes_gcm_tls12_ctx),
1277 "AEAD state has insufficient alignment");
1278
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1279 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1280 size_t key_len, size_t requested_tag_len) {
1281 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1282 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1283
1284 gcm_ctx->min_next_nonce = 0;
1285
1286 size_t actual_tag_len;
1287 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1288 requested_tag_len)) {
1289 return 0;
1290 }
1291
1292 ctx->tag_len = actual_tag_len;
1293 return 1;
1294 }
1295
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1296 static int aead_aes_gcm_tls12_seal_scatter(
1297 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1298 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1299 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1300 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1301 struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1302 (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1303
1304 if (nonce_len != AES_GCM_NONCE_LENGTH) {
1305 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1306 return 0;
1307 }
1308
1309 // The given nonces must be strictly monotonically increasing.
1310 uint64_t given_counter =
1311 CRYPTO_load_u64_be(nonce + nonce_len - sizeof(uint64_t));
1312 if (given_counter == UINT64_MAX || given_counter < gcm_ctx->min_next_nonce) {
1313 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1314 return 0;
1315 }
1316
1317 gcm_ctx->min_next_nonce = given_counter + 1;
1318
1319 if (!aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1320 max_out_tag_len, nonce, nonce_len, in, in_len,
1321 extra_in, extra_in_len, ad, ad_len)) {
1322 return 0;
1323 }
1324
1325 AEAD_GCM_verify_service_indicator(ctx);
1326 return 1;
1327 }
1328
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1329 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1330 memset(out, 0, sizeof(EVP_AEAD));
1331
1332 out->key_len = 16;
1333 out->nonce_len = AES_GCM_NONCE_LENGTH;
1334 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1335 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1336 out->seal_scatter_supports_extra_in = 1;
1337
1338 out->init = aead_aes_gcm_tls12_init;
1339 out->cleanup = aead_aes_gcm_cleanup;
1340 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1341 out->open_gather = aead_aes_gcm_open_gather;
1342 }
1343
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1344 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1345 memset(out, 0, sizeof(EVP_AEAD));
1346
1347 out->key_len = 32;
1348 out->nonce_len = AES_GCM_NONCE_LENGTH;
1349 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1350 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1351 out->seal_scatter_supports_extra_in = 1;
1352
1353 out->init = aead_aes_gcm_tls12_init;
1354 out->cleanup = aead_aes_gcm_cleanup;
1355 out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1356 out->open_gather = aead_aes_gcm_open_gather;
1357 }
1358
1359 struct aead_aes_gcm_tls13_ctx {
1360 struct aead_aes_gcm_ctx gcm_ctx;
1361 uint64_t min_next_nonce;
1362 uint64_t mask;
1363 uint8_t first;
1364 };
1365
1366 static_assert(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1367 sizeof(struct aead_aes_gcm_tls13_ctx),
1368 "AEAD state is too small");
1369 static_assert(alignof(union evp_aead_ctx_st_state) >=
1370 alignof(struct aead_aes_gcm_tls13_ctx),
1371 "AEAD state has insufficient alignment");
1372
aead_aes_gcm_tls13_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1373 static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1374 size_t key_len, size_t requested_tag_len) {
1375 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1376 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1377
1378 gcm_ctx->min_next_nonce = 0;
1379 gcm_ctx->first = 1;
1380
1381 size_t actual_tag_len;
1382 if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1383 requested_tag_len)) {
1384 return 0;
1385 }
1386
1387 ctx->tag_len = actual_tag_len;
1388 return 1;
1389 }
1390
aead_aes_gcm_tls13_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1391 static int aead_aes_gcm_tls13_seal_scatter(
1392 const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1393 size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1394 size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1395 size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1396 struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1397 (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1398
1399 if (nonce_len != AES_GCM_NONCE_LENGTH) {
1400 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1401 return 0;
1402 }
1403
1404 // The given nonces must be strictly monotonically increasing. See
1405 // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1406 // nonce construction.
1407 uint64_t given_counter =
1408 CRYPTO_load_u64_be(nonce + nonce_len - sizeof(uint64_t));
1409
1410 if (gcm_ctx->first) {
1411 // In the first call the sequence number will be zero and therefore the
1412 // given nonce will be 0 ^ mask = mask.
1413 gcm_ctx->mask = given_counter;
1414 gcm_ctx->first = 0;
1415 }
1416 given_counter ^= gcm_ctx->mask;
1417
1418 if (given_counter == UINT64_MAX ||
1419 given_counter < gcm_ctx->min_next_nonce) {
1420 OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1421 return 0;
1422 }
1423
1424 gcm_ctx->min_next_nonce = given_counter + 1;
1425
1426 if (!aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1427 max_out_tag_len, nonce, nonce_len, in, in_len,
1428 extra_in, extra_in_len, ad, ad_len)) {
1429 return 0;
1430 }
1431
1432 AEAD_GCM_verify_service_indicator(ctx);
1433 return 1;
1434 }
1435
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls13)1436 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1437 memset(out, 0, sizeof(EVP_AEAD));
1438
1439 out->key_len = 16;
1440 out->nonce_len = AES_GCM_NONCE_LENGTH;
1441 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1442 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1443 out->seal_scatter_supports_extra_in = 1;
1444
1445 out->init = aead_aes_gcm_tls13_init;
1446 out->cleanup = aead_aes_gcm_cleanup;
1447 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1448 out->open_gather = aead_aes_gcm_open_gather;
1449 }
1450
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls13)1451 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1452 memset(out, 0, sizeof(EVP_AEAD));
1453
1454 out->key_len = 32;
1455 out->nonce_len = AES_GCM_NONCE_LENGTH;
1456 out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1457 out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1458 out->seal_scatter_supports_extra_in = 1;
1459
1460 out->init = aead_aes_gcm_tls13_init;
1461 out->cleanup = aead_aes_gcm_cleanup;
1462 out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1463 out->open_gather = aead_aes_gcm_open_gather;
1464 }
1465
EVP_has_aes_hardware(void)1466 int EVP_has_aes_hardware(void) {
1467 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1468 return hwaes_capable() && crypto_gcm_clmul_enabled();
1469 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1470 return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1471 #else
1472 return 0;
1473 #endif
1474 }
1475
1476 OPENSSL_MSVC_PRAGMA(warning(pop))
1477