• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <assert.h>
50 #include <string.h>
51 
52 #include <openssl/aead.h>
53 #include <openssl/aes.h>
54 #include <openssl/cipher.h>
55 #include <openssl/cpu.h>
56 #include <openssl/err.h>
57 #include <openssl/mem.h>
58 #include <openssl/nid.h>
59 #include <openssl/rand.h>
60 
61 #include "internal.h"
62 #include "../../internal.h"
63 #include "../aes/internal.h"
64 #include "../modes/internal.h"
65 #include "../delocate.h"
66 
67 
68 OPENSSL_MSVC_PRAGMA(warning(push))
69 OPENSSL_MSVC_PRAGMA(warning(disable: 4702))  // Unreachable code.
70 
71 #define AES_GCM_NONCE_LENGTH 12
72 
73 #if defined(BSAES)
vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t * in,uint8_t * out,size_t blocks,const AES_KEY * key,const uint8_t ivec[16])74 static void vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t *in,
75                                                   uint8_t *out, size_t blocks,
76                                                   const AES_KEY *key,
77                                                   const uint8_t ivec[16]) {
78   // |bsaes_ctr32_encrypt_blocks| is faster than |vpaes_ctr32_encrypt_blocks|,
79   // but it takes at least one full 8-block batch to amortize the conversion.
80   if (blocks < 8) {
81     vpaes_ctr32_encrypt_blocks(in, out, blocks, key, ivec);
82     return;
83   }
84 
85   size_t bsaes_blocks = blocks;
86   if (bsaes_blocks % 8 < 6) {
87     // |bsaes_ctr32_encrypt_blocks| internally works in 8-block batches. If the
88     // final batch is too small (under six blocks), it is faster to loop over
89     // |vpaes_encrypt|. Round |bsaes_blocks| down to a multiple of 8.
90     bsaes_blocks -= bsaes_blocks % 8;
91   }
92 
93   AES_KEY bsaes;
94   vpaes_encrypt_key_to_bsaes(&bsaes, key);
95   bsaes_ctr32_encrypt_blocks(in, out, bsaes_blocks, &bsaes, ivec);
96   OPENSSL_cleanse(&bsaes, sizeof(bsaes));
97 
98   in += 16 * bsaes_blocks;
99   out += 16 * bsaes_blocks;
100   blocks -= bsaes_blocks;
101 
102   union {
103     uint32_t u32[4];
104     uint8_t u8[16];
105   } new_ivec;
106   memcpy(new_ivec.u8, ivec, 16);
107   uint32_t ctr = CRYPTO_bswap4(new_ivec.u32[3]) + bsaes_blocks;
108   new_ivec.u32[3] = CRYPTO_bswap4(ctr);
109 
110   // Finish any remaining blocks with |vpaes_ctr32_encrypt_blocks|.
111   vpaes_ctr32_encrypt_blocks(in, out, blocks, key, new_ivec.u8);
112 }
113 #endif  // BSAES
114 
115 typedef struct {
116   union {
117     double align;
118     AES_KEY ks;
119   } ks;
120   block128_f block;
121   union {
122     cbc128_f cbc;
123     ctr128_f ctr;
124   } stream;
125 } EVP_AES_KEY;
126 
127 typedef struct {
128   GCM128_CONTEXT gcm;
129   union {
130     double align;
131     AES_KEY ks;
132   } ks;         // AES key schedule to use
133   int key_set;  // Set if key initialised
134   int iv_set;   // Set if an iv is set
135   uint8_t *iv;  // Temporary IV store
136   int ivlen;         // IV length
137   int taglen;
138   int iv_gen;      // It is OK to generate IVs
139   ctr128_f ctr;
140 } EVP_AES_GCM_CTX;
141 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)142 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
143                         const uint8_t *iv, int enc) {
144   int ret;
145   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
146   const int mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
147 
148   if (mode == EVP_CIPH_CTR_MODE) {
149     switch (ctx->key_len) {
150       case 16:
151         boringssl_fips_inc_counter(fips_counter_evp_aes_128_ctr);
152         break;
153 
154       case 32:
155         boringssl_fips_inc_counter(fips_counter_evp_aes_256_ctr);
156         break;
157     }
158   }
159 
160   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
161     if (hwaes_capable()) {
162       ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
163       dat->block = aes_hw_decrypt;
164       dat->stream.cbc = NULL;
165       if (mode == EVP_CIPH_CBC_MODE) {
166         dat->stream.cbc = aes_hw_cbc_encrypt;
167       }
168     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
169       assert(vpaes_capable());
170       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
171       if (ret == 0) {
172         vpaes_decrypt_key_to_bsaes(&dat->ks.ks, &dat->ks.ks);
173       }
174       // If |dat->stream.cbc| is provided, |dat->block| is never used.
175       dat->block = NULL;
176       dat->stream.cbc = bsaes_cbc_encrypt;
177     } else if (vpaes_capable()) {
178       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
179       dat->block = vpaes_decrypt;
180       dat->stream.cbc = NULL;
181 #if defined(VPAES_CBC)
182       if (mode == EVP_CIPH_CBC_MODE) {
183         dat->stream.cbc = vpaes_cbc_encrypt;
184       }
185 #endif
186     } else {
187       ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
188       dat->block = aes_nohw_decrypt;
189       dat->stream.cbc = NULL;
190       if (mode == EVP_CIPH_CBC_MODE) {
191         dat->stream.cbc = aes_nohw_cbc_encrypt;
192       }
193     }
194   } else if (hwaes_capable()) {
195     ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
196     dat->block = aes_hw_encrypt;
197     dat->stream.cbc = NULL;
198     if (mode == EVP_CIPH_CBC_MODE) {
199       dat->stream.cbc = aes_hw_cbc_encrypt;
200     } else if (mode == EVP_CIPH_CTR_MODE) {
201       dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
202     }
203   } else if (vpaes_capable()) {
204     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
205     dat->block = vpaes_encrypt;
206     dat->stream.cbc = NULL;
207 #if defined(VPAES_CBC)
208     if (mode == EVP_CIPH_CBC_MODE) {
209       dat->stream.cbc = vpaes_cbc_encrypt;
210     }
211 #endif
212     if (mode == EVP_CIPH_CTR_MODE) {
213 #if defined(BSAES)
214       assert(bsaes_capable());
215       dat->stream.ctr = vpaes_ctr32_encrypt_blocks_with_bsaes;
216 #elif defined(VPAES_CTR32)
217       dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
218 #endif
219     }
220   } else {
221     ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
222     dat->block = aes_nohw_encrypt;
223     dat->stream.cbc = NULL;
224     if (mode == EVP_CIPH_CBC_MODE) {
225       dat->stream.cbc = aes_nohw_cbc_encrypt;
226     }
227   }
228 
229   if (ret < 0) {
230     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
231     return 0;
232   }
233 
234   return 1;
235 }
236 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)237 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
238                           size_t len) {
239   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
240 
241   if (dat->stream.cbc) {
242     (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
243   } else if (ctx->encrypt) {
244     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
245   } else {
246     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
247   }
248 
249   return 1;
250 }
251 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)252 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
253                           size_t len) {
254   size_t bl = ctx->cipher->block_size;
255   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
256 
257   if (len < bl) {
258     return 1;
259   }
260 
261   len -= bl;
262   for (size_t i = 0; i <= len; i += bl) {
263     (*dat->block)(in + i, out + i, &dat->ks.ks);
264   }
265 
266   return 1;
267 }
268 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)269 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
270                           size_t len) {
271   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
272 
273   if (dat->stream.ctr) {
274     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
275                                 &ctx->num, dat->stream.ctr);
276   } else {
277     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
278                           &ctx->num, dat->block);
279   }
280   return 1;
281 }
282 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)283 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
284                           size_t len) {
285   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
286 
287   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
288                         dat->block);
289   return 1;
290 }
291 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_KEY * gcm_key,block128_f * out_block,const uint8_t * key,size_t key_bytes)292 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
293                          block128_f *out_block, const uint8_t *key,
294                          size_t key_bytes) {
295   if (hwaes_capable()) {
296     aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
297     if (gcm_key != NULL) {
298       CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
299     }
300     if (out_block) {
301       *out_block = aes_hw_encrypt;
302     }
303     return aes_hw_ctr32_encrypt_blocks;
304   }
305 
306   if (vpaes_capable()) {
307     vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
308     if (out_block) {
309       *out_block = vpaes_encrypt;
310     }
311     if (gcm_key != NULL) {
312       CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
313     }
314 #if defined(BSAES)
315     assert(bsaes_capable());
316     return vpaes_ctr32_encrypt_blocks_with_bsaes;
317 #elif defined(VPAES_CTR32)
318     return vpaes_ctr32_encrypt_blocks;
319 #else
320     return NULL;
321 #endif
322   }
323 
324   aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
325   if (gcm_key != NULL) {
326     CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
327   }
328   if (out_block) {
329     *out_block = aes_nohw_encrypt;
330   }
331   return aes_nohw_ctr32_encrypt_blocks;
332 }
333 
334 #if defined(OPENSSL_32_BIT)
335 #define EVP_AES_GCM_CTX_PADDING (4+8)
336 #else
337 #define EVP_AES_GCM_CTX_PADDING 8
338 #endif
339 
aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX * ctx)340 static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
341 #if defined(__GNUC__) || defined(__clang__)
342   OPENSSL_STATIC_ASSERT(
343       alignof(EVP_AES_GCM_CTX) <= 16,
344       "EVP_AES_GCM_CTX needs more alignment than this function provides");
345 #endif
346 
347   // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
348   // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
349   assert(ctx->cipher->ctx_size ==
350          sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
351 
352   char *ptr = ctx->cipher_data;
353 #if defined(OPENSSL_32_BIT)
354   assert((uintptr_t)ptr % 4 == 0);
355   ptr += (uintptr_t)ptr & 4;
356 #endif
357   assert((uintptr_t)ptr % 8 == 0);
358   ptr += (uintptr_t)ptr & 8;
359   return (EVP_AES_GCM_CTX *)ptr;
360 }
361 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)362 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
363                             const uint8_t *iv, int enc) {
364   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
365   if (!iv && !key) {
366     return 1;
367   }
368 
369   switch (ctx->key_len) {
370     case 16:
371       boringssl_fips_inc_counter(fips_counter_evp_aes_128_gcm);
372       break;
373 
374     case 32:
375       boringssl_fips_inc_counter(fips_counter_evp_aes_256_gcm);
376       break;
377   }
378 
379   if (key) {
380     OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
381     gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
382                                 ctx->key_len);
383     // If we have an iv can set it directly, otherwise use saved IV.
384     if (iv == NULL && gctx->iv_set) {
385       iv = gctx->iv;
386     }
387     if (iv) {
388       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
389       gctx->iv_set = 1;
390     }
391     gctx->key_set = 1;
392   } else {
393     // If key set use IV, otherwise copy
394     if (gctx->key_set) {
395       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
396     } else {
397       OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
398     }
399     gctx->iv_set = 1;
400     gctx->iv_gen = 0;
401   }
402   return 1;
403 }
404 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)405 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
406   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
407   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
408   if (gctx->iv != c->iv) {
409     OPENSSL_free(gctx->iv);
410   }
411 }
412 
413 // increment counter (64-bit int) by 1
ctr64_inc(uint8_t * counter)414 static void ctr64_inc(uint8_t *counter) {
415   int n = 8;
416   uint8_t c;
417 
418   do {
419     --n;
420     c = counter[n];
421     ++c;
422     counter[n] = c;
423     if (c) {
424       return;
425     }
426   } while (n);
427 }
428 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)429 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
430   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
431   switch (type) {
432     case EVP_CTRL_INIT:
433       gctx->key_set = 0;
434       gctx->iv_set = 0;
435       gctx->ivlen = c->cipher->iv_len;
436       gctx->iv = c->iv;
437       gctx->taglen = -1;
438       gctx->iv_gen = 0;
439       return 1;
440 
441     case EVP_CTRL_AEAD_SET_IVLEN:
442       if (arg <= 0) {
443         return 0;
444       }
445 
446       // Allocate memory for IV if needed
447       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
448         if (gctx->iv != c->iv) {
449           OPENSSL_free(gctx->iv);
450         }
451         gctx->iv = OPENSSL_malloc(arg);
452         if (!gctx->iv) {
453           return 0;
454         }
455       }
456       gctx->ivlen = arg;
457       return 1;
458 
459     case EVP_CTRL_AEAD_SET_TAG:
460       if (arg <= 0 || arg > 16 || c->encrypt) {
461         return 0;
462       }
463       OPENSSL_memcpy(c->buf, ptr, arg);
464       gctx->taglen = arg;
465       return 1;
466 
467     case EVP_CTRL_AEAD_GET_TAG:
468       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
469         return 0;
470       }
471       OPENSSL_memcpy(ptr, c->buf, arg);
472       return 1;
473 
474     case EVP_CTRL_AEAD_SET_IV_FIXED:
475       // Special case: -1 length restores whole IV
476       if (arg == -1) {
477         OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
478         gctx->iv_gen = 1;
479         return 1;
480       }
481       // Fixed field must be at least 4 bytes and invocation field
482       // at least 8.
483       if (arg < 4 || (gctx->ivlen - arg) < 8) {
484         return 0;
485       }
486       if (arg) {
487         OPENSSL_memcpy(gctx->iv, ptr, arg);
488       }
489       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
490         return 0;
491       }
492       gctx->iv_gen = 1;
493       return 1;
494 
495     case EVP_CTRL_GCM_IV_GEN:
496       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
497         return 0;
498       }
499       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
500       if (arg <= 0 || arg > gctx->ivlen) {
501         arg = gctx->ivlen;
502       }
503       OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
504       // Invocation field will be at least 8 bytes in size and
505       // so no need to check wrap around or increment more than
506       // last 8 bytes.
507       ctr64_inc(gctx->iv + gctx->ivlen - 8);
508       gctx->iv_set = 1;
509       return 1;
510 
511     case EVP_CTRL_GCM_SET_IV_INV:
512       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
513         return 0;
514       }
515       OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
516       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
517       gctx->iv_set = 1;
518       return 1;
519 
520     case EVP_CTRL_COPY: {
521       EVP_CIPHER_CTX *out = ptr;
522       EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
523       // |EVP_CIPHER_CTX_copy| copies this generically, but we must redo it in
524       // case |out->cipher_data| and |in->cipher_data| are differently aligned.
525       OPENSSL_memcpy(gctx_out, gctx, sizeof(EVP_AES_GCM_CTX));
526       if (gctx->iv == c->iv) {
527         gctx_out->iv = out->iv;
528       } else {
529         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
530         if (!gctx_out->iv) {
531           return 0;
532         }
533         OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
534       }
535       return 1;
536     }
537 
538     default:
539       return -1;
540   }
541 }
542 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)543 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
544                           size_t len) {
545   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
546 
547   // If not set up, return error
548   if (!gctx->key_set) {
549     return -1;
550   }
551   if (!gctx->iv_set) {
552     return -1;
553   }
554 
555   if (in) {
556     if (out == NULL) {
557       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
558         return -1;
559       }
560     } else if (ctx->encrypt) {
561       if (gctx->ctr) {
562         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
563                                          gctx->ctr)) {
564           return -1;
565         }
566       } else {
567         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
568           return -1;
569         }
570       }
571     } else {
572       if (gctx->ctr) {
573         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
574                                          gctx->ctr)) {
575           return -1;
576         }
577       } else {
578         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
579           return -1;
580         }
581       }
582     }
583     return len;
584   } else {
585     if (!ctx->encrypt) {
586       if (gctx->taglen < 0 ||
587           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
588         return -1;
589       }
590       gctx->iv_set = 0;
591       return 0;
592     }
593     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
594     gctx->taglen = 16;
595     // Don't reuse the IV
596     gctx->iv_set = 0;
597     return 0;
598   }
599 }
600 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_cbc_generic)601 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
602   memset(out, 0, sizeof(EVP_CIPHER));
603 
604   out->nid = NID_aes_128_cbc;
605   out->block_size = 16;
606   out->key_len = 16;
607   out->iv_len = 16;
608   out->ctx_size = sizeof(EVP_AES_KEY);
609   out->flags = EVP_CIPH_CBC_MODE;
610   out->init = aes_init_key;
611   out->cipher = aes_cbc_cipher;
612 }
613 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ctr_generic)614 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
615   memset(out, 0, sizeof(EVP_CIPHER));
616 
617   out->nid = NID_aes_128_ctr;
618   out->block_size = 1;
619   out->key_len = 16;
620   out->iv_len = 16;
621   out->ctx_size = sizeof(EVP_AES_KEY);
622   out->flags = EVP_CIPH_CTR_MODE;
623   out->init = aes_init_key;
624   out->cipher = aes_ctr_cipher;
625 }
626 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)627 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
628   memset(out, 0, sizeof(EVP_CIPHER));
629 
630   out->nid = NID_aes_128_ecb;
631   out->block_size = 16;
632   out->key_len = 16;
633   out->ctx_size = sizeof(EVP_AES_KEY);
634   out->flags = EVP_CIPH_ECB_MODE;
635   out->init = aes_init_key;
636   out->cipher = aes_ecb_cipher;
637 }
638 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ofb_generic)639 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
640   memset(out, 0, sizeof(EVP_CIPHER));
641 
642   out->nid = NID_aes_128_ofb128;
643   out->block_size = 1;
644   out->key_len = 16;
645   out->iv_len = 16;
646   out->ctx_size = sizeof(EVP_AES_KEY);
647   out->flags = EVP_CIPH_OFB_MODE;
648   out->init = aes_init_key;
649   out->cipher = aes_ofb_cipher;
650 }
651 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_gcm_generic)652 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
653   memset(out, 0, sizeof(EVP_CIPHER));
654 
655   out->nid = NID_aes_128_gcm;
656   out->block_size = 1;
657   out->key_len = 16;
658   out->iv_len = AES_GCM_NONCE_LENGTH;
659   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
660   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
661                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
662                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
663   out->init = aes_gcm_init_key;
664   out->cipher = aes_gcm_cipher;
665   out->cleanup = aes_gcm_cleanup;
666   out->ctrl = aes_gcm_ctrl;
667 }
668 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_cbc_generic)669 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
670   memset(out, 0, sizeof(EVP_CIPHER));
671 
672   out->nid = NID_aes_192_cbc;
673   out->block_size = 16;
674   out->key_len = 24;
675   out->iv_len = 16;
676   out->ctx_size = sizeof(EVP_AES_KEY);
677   out->flags = EVP_CIPH_CBC_MODE;
678   out->init = aes_init_key;
679   out->cipher = aes_cbc_cipher;
680 }
681 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ctr_generic)682 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
683   memset(out, 0, sizeof(EVP_CIPHER));
684 
685   out->nid = NID_aes_192_ctr;
686   out->block_size = 1;
687   out->key_len = 24;
688   out->iv_len = 16;
689   out->ctx_size = sizeof(EVP_AES_KEY);
690   out->flags = EVP_CIPH_CTR_MODE;
691   out->init = aes_init_key;
692   out->cipher = aes_ctr_cipher;
693 }
694 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)695 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
696   memset(out, 0, sizeof(EVP_CIPHER));
697 
698   out->nid = NID_aes_192_ecb;
699   out->block_size = 16;
700   out->key_len = 24;
701   out->ctx_size = sizeof(EVP_AES_KEY);
702   out->flags = EVP_CIPH_ECB_MODE;
703   out->init = aes_init_key;
704   out->cipher = aes_ecb_cipher;
705 }
706 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ofb_generic)707 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ofb_generic) {
708   memset(out, 0, sizeof(EVP_CIPHER));
709 
710   out->nid = NID_aes_192_ofb128;
711   out->block_size = 1;
712   out->key_len = 24;
713   out->iv_len = 16;
714   out->ctx_size = sizeof(EVP_AES_KEY);
715   out->flags = EVP_CIPH_OFB_MODE;
716   out->init = aes_init_key;
717   out->cipher = aes_ofb_cipher;
718 }
719 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_gcm_generic)720 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
721   memset(out, 0, sizeof(EVP_CIPHER));
722 
723   out->nid = NID_aes_192_gcm;
724   out->block_size = 1;
725   out->key_len = 24;
726   out->iv_len = AES_GCM_NONCE_LENGTH;
727   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
728   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
729                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
730                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
731   out->init = aes_gcm_init_key;
732   out->cipher = aes_gcm_cipher;
733   out->cleanup = aes_gcm_cleanup;
734   out->ctrl = aes_gcm_ctrl;
735 }
736 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_cbc_generic)737 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
738   memset(out, 0, sizeof(EVP_CIPHER));
739 
740   out->nid = NID_aes_256_cbc;
741   out->block_size = 16;
742   out->key_len = 32;
743   out->iv_len = 16;
744   out->ctx_size = sizeof(EVP_AES_KEY);
745   out->flags = EVP_CIPH_CBC_MODE;
746   out->init = aes_init_key;
747   out->cipher = aes_cbc_cipher;
748 }
749 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ctr_generic)750 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
751   memset(out, 0, sizeof(EVP_CIPHER));
752 
753   out->nid = NID_aes_256_ctr;
754   out->block_size = 1;
755   out->key_len = 32;
756   out->iv_len = 16;
757   out->ctx_size = sizeof(EVP_AES_KEY);
758   out->flags = EVP_CIPH_CTR_MODE;
759   out->init = aes_init_key;
760   out->cipher = aes_ctr_cipher;
761 }
762 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)763 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
764   memset(out, 0, sizeof(EVP_CIPHER));
765 
766   out->nid = NID_aes_256_ecb;
767   out->block_size = 16;
768   out->key_len = 32;
769   out->ctx_size = sizeof(EVP_AES_KEY);
770   out->flags = EVP_CIPH_ECB_MODE;
771   out->init = aes_init_key;
772   out->cipher = aes_ecb_cipher;
773 }
774 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ofb_generic)775 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
776   memset(out, 0, sizeof(EVP_CIPHER));
777 
778   out->nid = NID_aes_256_ofb128;
779   out->block_size = 1;
780   out->key_len = 32;
781   out->iv_len = 16;
782   out->ctx_size = sizeof(EVP_AES_KEY);
783   out->flags = EVP_CIPH_OFB_MODE;
784   out->init = aes_init_key;
785   out->cipher = aes_ofb_cipher;
786 }
787 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_gcm_generic)788 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
789   memset(out, 0, sizeof(EVP_CIPHER));
790 
791   out->nid = NID_aes_256_gcm;
792   out->block_size = 1;
793   out->key_len = 32;
794   out->iv_len = AES_GCM_NONCE_LENGTH;
795   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
796   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
797                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
798                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
799   out->init = aes_gcm_init_key;
800   out->cipher = aes_gcm_cipher;
801   out->cleanup = aes_gcm_cleanup;
802   out->ctrl = aes_gcm_ctrl;
803 }
804 
805 #if defined(HWAES_ECB)
806 
aes_hw_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)807 static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
808                              const uint8_t *in, size_t len) {
809   size_t bl = ctx->cipher->block_size;
810 
811   if (len < bl) {
812     return 1;
813   }
814 
815   aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
816 
817   return 1;
818 }
819 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_128_ecb)820 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
821   memset(out, 0, sizeof(EVP_CIPHER));
822 
823   out->nid = NID_aes_128_ecb;
824   out->block_size = 16;
825   out->key_len = 16;
826   out->ctx_size = sizeof(EVP_AES_KEY);
827   out->flags = EVP_CIPH_ECB_MODE;
828   out->init = aes_init_key;
829   out->cipher = aes_hw_ecb_cipher;
830 }
831 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_192_ecb)832 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
833   memset(out, 0, sizeof(EVP_CIPHER));
834 
835   out->nid = NID_aes_192_ecb;
836   out->block_size = 16;
837   out->key_len = 24;
838   out->ctx_size = sizeof(EVP_AES_KEY);
839   out->flags = EVP_CIPH_ECB_MODE;
840   out->init = aes_init_key;
841   out->cipher = aes_hw_ecb_cipher;
842 }
843 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_256_ecb)844 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
845   memset(out, 0, sizeof(EVP_CIPHER));
846 
847   out->nid = NID_aes_256_ecb;
848   out->block_size = 16;
849   out->key_len = 32;
850   out->ctx_size = sizeof(EVP_AES_KEY);
851   out->flags = EVP_CIPH_ECB_MODE;
852   out->init = aes_init_key;
853   out->cipher = aes_hw_ecb_cipher;
854 }
855 
856 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
857   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
858     if (hwaes_capable()) {                          \
859       return aes_hw_##keybits##_ecb();              \
860     }                                               \
861     return aes_##keybits##_ecb_generic();           \
862   }
863 
864 #else
865 
866 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
867   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
868     return aes_##keybits##_ecb_generic();           \
869   }
870 
871 #endif  // HWAES_ECB
872 
873 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
874   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
875     return aes_##keybits##_##mode##_generic();         \
876   }
877 
878 EVP_CIPHER_FUNCTION(128, cbc)
879 EVP_CIPHER_FUNCTION(128, ctr)
880 EVP_CIPHER_FUNCTION(128, ofb)
881 EVP_CIPHER_FUNCTION(128, gcm)
882 
883 EVP_CIPHER_FUNCTION(192, cbc)
884 EVP_CIPHER_FUNCTION(192, ctr)
885 EVP_CIPHER_FUNCTION(192, ofb)
886 EVP_CIPHER_FUNCTION(192, gcm)
887 
888 EVP_CIPHER_FUNCTION(256, cbc)
889 EVP_CIPHER_FUNCTION(256, ctr)
890 EVP_CIPHER_FUNCTION(256, ofb)
891 EVP_CIPHER_FUNCTION(256, gcm)
892 
893 EVP_ECB_CIPHER_FUNCTION(128)
894 EVP_ECB_CIPHER_FUNCTION(192)
895 EVP_ECB_CIPHER_FUNCTION(256)
896 
897 
898 #define EVP_AEAD_AES_GCM_TAG_LEN 16
899 
900 struct aead_aes_gcm_ctx {
901   union {
902     double align;
903     AES_KEY ks;
904   } ks;
905   GCM128_KEY gcm_key;
906   ctr128_f ctr;
907 };
908 
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)909 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
910                                   size_t *out_tag_len, const uint8_t *key,
911                                   size_t key_len, size_t tag_len) {
912   const size_t key_bits = key_len * 8;
913 
914   if (key_bits != 128 && key_bits != 192 && key_bits != 256) {
915     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
916     return 0;  // EVP_AEAD_CTX_init should catch this.
917   }
918 
919   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
920     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
921   }
922 
923   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
924     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
925     return 0;
926   }
927 
928   gcm_ctx->ctr =
929       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
930   *out_tag_len = tag_len;
931   return 1;
932 }
933 
934 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
935                           sizeof(struct aead_aes_gcm_ctx),
936                       "AEAD state is too small");
937 #if defined(__GNUC__) || defined(__clang__)
938 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
939                           alignof(struct aead_aes_gcm_ctx),
940                       "AEAD state has insufficient alignment");
941 #endif
942 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)943 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
944                              size_t key_len, size_t requested_tag_len) {
945   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
946 
947   size_t actual_tag_len;
948   if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
949                               requested_tag_len)) {
950     return 0;
951   }
952 
953   ctx->tag_len = actual_tag_len;
954   return 1;
955 }
956 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)957 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
958 
aead_aes_gcm_seal_scatter_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len,size_t tag_len)959 static int aead_aes_gcm_seal_scatter_impl(
960     const struct aead_aes_gcm_ctx *gcm_ctx,
961     uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
962     const uint8_t *nonce, size_t nonce_len,
963     const uint8_t *in, size_t in_len,
964     const uint8_t *extra_in, size_t extra_in_len,
965     const uint8_t *ad, size_t ad_len,
966     size_t tag_len) {
967   if (extra_in_len + tag_len < tag_len) {
968     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
969     return 0;
970   }
971   if (max_out_tag_len < extra_in_len + tag_len) {
972     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
973     return 0;
974   }
975   if (nonce_len == 0) {
976     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
977     return 0;
978   }
979 
980   const AES_KEY *key = &gcm_ctx->ks.ks;
981 
982   GCM128_CONTEXT gcm;
983   OPENSSL_memset(&gcm, 0, sizeof(gcm));
984   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
985   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
986 
987   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
988     return 0;
989   }
990 
991   if (gcm_ctx->ctr) {
992     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
993                                      gcm_ctx->ctr)) {
994       return 0;
995     }
996   } else {
997     if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
998       return 0;
999     }
1000   }
1001 
1002   if (extra_in_len) {
1003     if (gcm_ctx->ctr) {
1004       if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
1005                                        extra_in_len, gcm_ctx->ctr)) {
1006         return 0;
1007       }
1008     } else {
1009       if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
1010         return 0;
1011       }
1012     }
1013   }
1014 
1015   CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, tag_len);
1016   *out_tag_len = tag_len + extra_in_len;
1017 
1018   return 1;
1019 }
1020 
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1021 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
1022                                      uint8_t *out_tag, size_t *out_tag_len,
1023                                      size_t max_out_tag_len,
1024                                      const uint8_t *nonce, size_t nonce_len,
1025                                      const uint8_t *in, size_t in_len,
1026                                      const uint8_t *extra_in,
1027                                      size_t extra_in_len,
1028                                      const uint8_t *ad, size_t ad_len) {
1029   const struct aead_aes_gcm_ctx *gcm_ctx =
1030       (const struct aead_aes_gcm_ctx *)&ctx->state;
1031   return aead_aes_gcm_seal_scatter_impl(
1032       gcm_ctx, out, out_tag, out_tag_len, max_out_tag_len, nonce, nonce_len, in,
1033       in_len, extra_in, extra_in_len, ad, ad_len, ctx->tag_len);
1034 }
1035 
aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx * gcm_ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len,size_t tag_len)1036 static int aead_aes_gcm_open_gather_impl(const struct aead_aes_gcm_ctx *gcm_ctx,
1037                                          uint8_t *out,
1038                                          const uint8_t *nonce, size_t nonce_len,
1039                                          const uint8_t *in, size_t in_len,
1040                                          const uint8_t *in_tag,
1041                                          size_t in_tag_len,
1042                                          const uint8_t *ad, size_t ad_len,
1043                                          size_t tag_len) {
1044   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1045 
1046   if (nonce_len == 0) {
1047     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1048     return 0;
1049   }
1050 
1051   if (in_tag_len != tag_len) {
1052     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1053     return 0;
1054   }
1055 
1056   const AES_KEY *key = &gcm_ctx->ks.ks;
1057 
1058   GCM128_CONTEXT gcm;
1059   OPENSSL_memset(&gcm, 0, sizeof(gcm));
1060   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
1061   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1062 
1063   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1064     return 0;
1065   }
1066 
1067   if (gcm_ctx->ctr) {
1068     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1069                                      gcm_ctx->ctr)) {
1070       return 0;
1071     }
1072   } else {
1073     if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1074       return 0;
1075     }
1076   }
1077 
1078   CRYPTO_gcm128_tag(&gcm, tag, tag_len);
1079   if (CRYPTO_memcmp(tag, in_tag, tag_len) != 0) {
1080     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1081     return 0;
1082   }
1083 
1084   return 1;
1085 }
1086 
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1087 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1088                                     const uint8_t *nonce, size_t nonce_len,
1089                                     const uint8_t *in, size_t in_len,
1090                                     const uint8_t *in_tag, size_t in_tag_len,
1091                                     const uint8_t *ad, size_t ad_len) {
1092   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *)&ctx->state;
1093   return aead_aes_gcm_open_gather_impl(gcm_ctx, out, nonce, nonce_len, in,
1094                                        in_len, in_tag, in_tag_len, ad, ad_len,
1095                                        ctx->tag_len);
1096 }
1097 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1098 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1099   memset(out, 0, sizeof(EVP_AEAD));
1100 
1101   out->key_len = 16;
1102   out->nonce_len = AES_GCM_NONCE_LENGTH;
1103   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1104   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1105   out->seal_scatter_supports_extra_in = 1;
1106 
1107   out->init = aead_aes_gcm_init;
1108   out->cleanup = aead_aes_gcm_cleanup;
1109   out->seal_scatter = aead_aes_gcm_seal_scatter;
1110   out->open_gather = aead_aes_gcm_open_gather;
1111 }
1112 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_192_gcm)1113 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_192_gcm) {
1114   memset(out, 0, sizeof(EVP_AEAD));
1115 
1116   out->key_len = 24;
1117   out->nonce_len = AES_GCM_NONCE_LENGTH;
1118   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1119   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1120   out->seal_scatter_supports_extra_in = 1;
1121 
1122   out->init = aead_aes_gcm_init;
1123   out->cleanup = aead_aes_gcm_cleanup;
1124   out->seal_scatter = aead_aes_gcm_seal_scatter;
1125   out->open_gather = aead_aes_gcm_open_gather;
1126 }
1127 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1128 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1129   memset(out, 0, sizeof(EVP_AEAD));
1130 
1131   out->key_len = 32;
1132   out->nonce_len = AES_GCM_NONCE_LENGTH;
1133   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1134   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1135   out->seal_scatter_supports_extra_in = 1;
1136 
1137   out->init = aead_aes_gcm_init;
1138   out->cleanup = aead_aes_gcm_cleanup;
1139   out->seal_scatter = aead_aes_gcm_seal_scatter;
1140   out->open_gather = aead_aes_gcm_open_gather;
1141 }
1142 
aead_aes_gcm_init_randnonce(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1143 static int aead_aes_gcm_init_randnonce(EVP_AEAD_CTX *ctx, const uint8_t *key,
1144                                        size_t key_len,
1145                                        size_t requested_tag_len) {
1146   if (requested_tag_len != EVP_AEAD_DEFAULT_TAG_LENGTH) {
1147     if (requested_tag_len < AES_GCM_NONCE_LENGTH) {
1148       OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1149       return 0;
1150     }
1151     requested_tag_len -= AES_GCM_NONCE_LENGTH;
1152   }
1153 
1154   if (!aead_aes_gcm_init(ctx, key, key_len, requested_tag_len)) {
1155     return 0;
1156   }
1157 
1158   ctx->tag_len += AES_GCM_NONCE_LENGTH;
1159   return 1;
1160 }
1161 
aead_aes_gcm_seal_scatter_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1162 static int aead_aes_gcm_seal_scatter_randnonce(
1163     const EVP_AEAD_CTX *ctx,
1164     uint8_t *out, uint8_t *out_tag, size_t *out_tag_len, size_t max_out_tag_len,
1165     const uint8_t *external_nonce, size_t external_nonce_len,
1166     const uint8_t *in, size_t in_len,
1167     const uint8_t *extra_in, size_t extra_in_len,
1168     const uint8_t *ad, size_t ad_len) {
1169   if (external_nonce_len != 0) {
1170     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1171     return 0;
1172   }
1173 
1174   uint8_t nonce[AES_GCM_NONCE_LENGTH];
1175   if (max_out_tag_len < sizeof(nonce)) {
1176     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1177     return 0;
1178   }
1179 
1180   RAND_bytes(nonce, sizeof(nonce));
1181   const struct aead_aes_gcm_ctx *gcm_ctx =
1182       (const struct aead_aes_gcm_ctx *)&ctx->state;
1183   if (!aead_aes_gcm_seal_scatter_impl(gcm_ctx, out, out_tag, out_tag_len,
1184                                       max_out_tag_len - AES_GCM_NONCE_LENGTH,
1185                                       nonce, sizeof(nonce), in, in_len,
1186                                       extra_in, extra_in_len, ad, ad_len,
1187                                       ctx->tag_len - AES_GCM_NONCE_LENGTH)) {
1188     return 0;
1189   }
1190 
1191   assert(*out_tag_len + sizeof(nonce) <= max_out_tag_len);
1192   memcpy(out_tag + *out_tag_len, nonce, sizeof(nonce));
1193   *out_tag_len += sizeof(nonce);
1194 
1195   return 1;
1196 }
1197 
aead_aes_gcm_open_gather_randnonce(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * external_nonce,size_t external_nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1198 static int aead_aes_gcm_open_gather_randnonce(
1199     const EVP_AEAD_CTX *ctx, uint8_t *out,
1200     const uint8_t *external_nonce, size_t external_nonce_len,
1201     const uint8_t *in, size_t in_len,
1202     const uint8_t *in_tag, size_t in_tag_len,
1203     const uint8_t *ad, size_t ad_len) {
1204   if (external_nonce_len != 0) {
1205     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1206     return 0;
1207   }
1208 
1209   if (in_tag_len < AES_GCM_NONCE_LENGTH) {
1210     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1211     return 0;
1212   }
1213   const uint8_t *nonce = in_tag + in_tag_len - AES_GCM_NONCE_LENGTH;
1214 
1215   const struct aead_aes_gcm_ctx *gcm_ctx =
1216       (const struct aead_aes_gcm_ctx *)&ctx->state;
1217   return aead_aes_gcm_open_gather_impl(
1218       gcm_ctx, out, nonce, AES_GCM_NONCE_LENGTH, in, in_len, in_tag,
1219       in_tag_len - AES_GCM_NONCE_LENGTH, ad, ad_len,
1220       ctx->tag_len - AES_GCM_NONCE_LENGTH);
1221 }
1222 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_randnonce)1223 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_randnonce) {
1224   memset(out, 0, sizeof(EVP_AEAD));
1225 
1226   out->key_len = 16;
1227   out->nonce_len = 0;
1228   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1229   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1230   out->seal_scatter_supports_extra_in = 1;
1231 
1232   out->init = aead_aes_gcm_init_randnonce;
1233   out->cleanup = aead_aes_gcm_cleanup;
1234   out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1235   out->open_gather = aead_aes_gcm_open_gather_randnonce;
1236 }
1237 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_randnonce)1238 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_randnonce) {
1239   memset(out, 0, sizeof(EVP_AEAD));
1240 
1241   out->key_len = 32;
1242   out->nonce_len = 0;
1243   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1244   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN + AES_GCM_NONCE_LENGTH;
1245   out->seal_scatter_supports_extra_in = 1;
1246 
1247   out->init = aead_aes_gcm_init_randnonce;
1248   out->cleanup = aead_aes_gcm_cleanup;
1249   out->seal_scatter = aead_aes_gcm_seal_scatter_randnonce;
1250   out->open_gather = aead_aes_gcm_open_gather_randnonce;
1251 }
1252 
1253 struct aead_aes_gcm_tls12_ctx {
1254   struct aead_aes_gcm_ctx gcm_ctx;
1255   uint64_t min_next_nonce;
1256 };
1257 
1258 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1259                           sizeof(struct aead_aes_gcm_tls12_ctx),
1260                       "AEAD state is too small");
1261 #if defined(__GNUC__) || defined(__clang__)
1262 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1263                           alignof(struct aead_aes_gcm_tls12_ctx),
1264                       "AEAD state has insufficient alignment");
1265 #endif
1266 
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1267 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1268                                    size_t key_len, size_t requested_tag_len) {
1269   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1270       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1271 
1272   gcm_ctx->min_next_nonce = 0;
1273 
1274   size_t actual_tag_len;
1275   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1276                               requested_tag_len)) {
1277     return 0;
1278   }
1279 
1280   ctx->tag_len = actual_tag_len;
1281   return 1;
1282 }
1283 
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1284 static int aead_aes_gcm_tls12_seal_scatter(
1285     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1286     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1287     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1288     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1289   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1290       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1291 
1292   if (nonce_len != AES_GCM_NONCE_LENGTH) {
1293     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1294     return 0;
1295   }
1296 
1297   // The given nonces must be strictly monotonically increasing.
1298   uint64_t given_counter;
1299   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1300                  sizeof(given_counter));
1301   given_counter = CRYPTO_bswap8(given_counter);
1302   if (given_counter == UINT64_MAX ||
1303       given_counter < gcm_ctx->min_next_nonce) {
1304     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1305     return 0;
1306   }
1307 
1308   gcm_ctx->min_next_nonce = given_counter + 1;
1309 
1310   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1311                                    max_out_tag_len, nonce, nonce_len, in,
1312                                    in_len, extra_in, extra_in_len, ad, ad_len);
1313 }
1314 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1315 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1316   memset(out, 0, sizeof(EVP_AEAD));
1317 
1318   out->key_len = 16;
1319   out->nonce_len = AES_GCM_NONCE_LENGTH;
1320   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1321   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1322   out->seal_scatter_supports_extra_in = 1;
1323 
1324   out->init = aead_aes_gcm_tls12_init;
1325   out->cleanup = aead_aes_gcm_cleanup;
1326   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1327   out->open_gather = aead_aes_gcm_open_gather;
1328 }
1329 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1330 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1331   memset(out, 0, sizeof(EVP_AEAD));
1332 
1333   out->key_len = 32;
1334   out->nonce_len = AES_GCM_NONCE_LENGTH;
1335   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1336   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1337   out->seal_scatter_supports_extra_in = 1;
1338 
1339   out->init = aead_aes_gcm_tls12_init;
1340   out->cleanup = aead_aes_gcm_cleanup;
1341   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1342   out->open_gather = aead_aes_gcm_open_gather;
1343 }
1344 
1345 struct aead_aes_gcm_tls13_ctx {
1346   struct aead_aes_gcm_ctx gcm_ctx;
1347   uint64_t min_next_nonce;
1348   uint64_t mask;
1349   uint8_t first;
1350 };
1351 
1352 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1353                           sizeof(struct aead_aes_gcm_tls13_ctx),
1354                       "AEAD state is too small");
1355 #if defined(__GNUC__) || defined(__clang__)
1356 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1357                           alignof(struct aead_aes_gcm_tls13_ctx),
1358                       "AEAD state has insufficient alignment");
1359 #endif
1360 
aead_aes_gcm_tls13_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1361 static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1362                                    size_t key_len, size_t requested_tag_len) {
1363   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1364       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1365 
1366   gcm_ctx->min_next_nonce = 0;
1367   gcm_ctx->first = 1;
1368 
1369   size_t actual_tag_len;
1370   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1371                               requested_tag_len)) {
1372     return 0;
1373   }
1374 
1375   ctx->tag_len = actual_tag_len;
1376   return 1;
1377 }
1378 
aead_aes_gcm_tls13_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1379 static int aead_aes_gcm_tls13_seal_scatter(
1380     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1381     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1382     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1383     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1384   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1385       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1386 
1387   if (nonce_len != AES_GCM_NONCE_LENGTH) {
1388     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1389     return 0;
1390   }
1391 
1392   // The given nonces must be strictly monotonically increasing. See
1393   // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1394   // nonce construction.
1395   uint64_t given_counter;
1396   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1397                  sizeof(given_counter));
1398   given_counter = CRYPTO_bswap8(given_counter);
1399 
1400   if (gcm_ctx->first) {
1401     // In the first call the sequence number will be zero and therefore the
1402     // given nonce will be 0 ^ mask = mask.
1403     gcm_ctx->mask = given_counter;
1404     gcm_ctx->first = 0;
1405   }
1406   given_counter ^= gcm_ctx->mask;
1407 
1408   if (given_counter == UINT64_MAX ||
1409       given_counter < gcm_ctx->min_next_nonce) {
1410     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1411     return 0;
1412   }
1413 
1414   gcm_ctx->min_next_nonce = given_counter + 1;
1415 
1416   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1417                                    max_out_tag_len, nonce, nonce_len, in,
1418                                    in_len, extra_in, extra_in_len, ad, ad_len);
1419 }
1420 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls13)1421 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1422   memset(out, 0, sizeof(EVP_AEAD));
1423 
1424   out->key_len = 16;
1425   out->nonce_len = AES_GCM_NONCE_LENGTH;
1426   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1427   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1428   out->seal_scatter_supports_extra_in = 1;
1429 
1430   out->init = aead_aes_gcm_tls13_init;
1431   out->cleanup = aead_aes_gcm_cleanup;
1432   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1433   out->open_gather = aead_aes_gcm_open_gather;
1434 }
1435 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls13)1436 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1437   memset(out, 0, sizeof(EVP_AEAD));
1438 
1439   out->key_len = 32;
1440   out->nonce_len = AES_GCM_NONCE_LENGTH;
1441   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1442   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1443   out->seal_scatter_supports_extra_in = 1;
1444 
1445   out->init = aead_aes_gcm_tls13_init;
1446   out->cleanup = aead_aes_gcm_cleanup;
1447   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1448   out->open_gather = aead_aes_gcm_open_gather;
1449 }
1450 
EVP_has_aes_hardware(void)1451 int EVP_has_aes_hardware(void) {
1452 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1453   return hwaes_capable() && crypto_gcm_clmul_enabled();
1454 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1455   return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1456 #elif defined(OPENSSL_PPC64LE)
1457   return CRYPTO_is_PPC64LE_vcrypto_capable();
1458 #else
1459   return 0;
1460 #endif
1461 }
1462 
1463 OPENSSL_MSVC_PRAGMA(warning(pop))
1464