• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <assert.h>
50 #include <string.h>
51 
52 #include <openssl/aead.h>
53 #include <openssl/aes.h>
54 #include <openssl/cipher.h>
55 #include <openssl/cpu.h>
56 #include <openssl/err.h>
57 #include <openssl/mem.h>
58 #include <openssl/nid.h>
59 #include <openssl/rand.h>
60 
61 #include "internal.h"
62 #include "../../internal.h"
63 #include "../aes/internal.h"
64 #include "../modes/internal.h"
65 #include "../delocate.h"
66 
67 
68 OPENSSL_MSVC_PRAGMA(warning(push))
69 OPENSSL_MSVC_PRAGMA(warning(disable: 4702))  // Unreachable code.
70 
71 #if defined(BSAES)
vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t * in,uint8_t * out,size_t blocks,const AES_KEY * key,const uint8_t ivec[16])72 static void vpaes_ctr32_encrypt_blocks_with_bsaes(const uint8_t *in,
73                                                   uint8_t *out, size_t blocks,
74                                                   const AES_KEY *key,
75                                                   const uint8_t ivec[16]) {
76   // |bsaes_ctr32_encrypt_blocks| is faster than |vpaes_ctr32_encrypt_blocks|,
77   // but it takes at least one full 8-block batch to amortize the conversion.
78   if (blocks < 8) {
79     vpaes_ctr32_encrypt_blocks(in, out, blocks, key, ivec);
80     return;
81   }
82 
83   size_t bsaes_blocks = blocks;
84   if (bsaes_blocks % 8 < 6) {
85     // |bsaes_ctr32_encrypt_blocks| internally works in 8-block batches. If the
86     // final batch is too small (under six blocks), it is faster to loop over
87     // |vpaes_encrypt|. Round |bsaes_blocks| down to a multiple of 8.
88     bsaes_blocks -= bsaes_blocks % 8;
89   }
90 
91   AES_KEY bsaes;
92   vpaes_encrypt_key_to_bsaes(&bsaes, key);
93   bsaes_ctr32_encrypt_blocks(in, out, bsaes_blocks, &bsaes, ivec);
94   OPENSSL_cleanse(&bsaes, sizeof(bsaes));
95 
96   in += 16 * bsaes_blocks;
97   out += 16 * bsaes_blocks;
98   blocks -= bsaes_blocks;
99 
100   union {
101     uint32_t u32[4];
102     uint8_t u8[16];
103   } new_ivec;
104   memcpy(new_ivec.u8, ivec, 16);
105   uint32_t ctr = CRYPTO_bswap4(new_ivec.u32[3]) + bsaes_blocks;
106   new_ivec.u32[3] = CRYPTO_bswap4(ctr);
107 
108   // Finish any remaining blocks with |vpaes_ctr32_encrypt_blocks|.
109   vpaes_ctr32_encrypt_blocks(in, out, blocks, key, new_ivec.u8);
110 }
111 #endif  // BSAES
112 
113 typedef struct {
114   union {
115     double align;
116     AES_KEY ks;
117   } ks;
118   block128_f block;
119   union {
120     cbc128_f cbc;
121     ctr128_f ctr;
122   } stream;
123 } EVP_AES_KEY;
124 
125 typedef struct {
126   GCM128_CONTEXT gcm;
127   union {
128     double align;
129     AES_KEY ks;
130   } ks;         // AES key schedule to use
131   int key_set;  // Set if key initialised
132   int iv_set;   // Set if an iv is set
133   uint8_t *iv;  // Temporary IV store
134   int ivlen;         // IV length
135   int taglen;
136   int iv_gen;      // It is OK to generate IVs
137   ctr128_f ctr;
138 } EVP_AES_GCM_CTX;
139 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)140 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
141                         const uint8_t *iv, int enc) {
142   int ret, mode;
143   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
144 
145   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
146   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
147     if (hwaes_capable()) {
148       ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
149       dat->block = aes_hw_decrypt;
150       dat->stream.cbc = NULL;
151       if (mode == EVP_CIPH_CBC_MODE) {
152         dat->stream.cbc = aes_hw_cbc_encrypt;
153       }
154     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
155       assert(vpaes_capable());
156       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
157       if (ret == 0) {
158         vpaes_decrypt_key_to_bsaes(&dat->ks.ks, &dat->ks.ks);
159       }
160       // If |dat->stream.cbc| is provided, |dat->block| is never used.
161       dat->block = NULL;
162       dat->stream.cbc = bsaes_cbc_encrypt;
163     } else if (vpaes_capable()) {
164       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
165       dat->block = vpaes_decrypt;
166       dat->stream.cbc = NULL;
167 #if defined(VPAES_CBC)
168       if (mode == EVP_CIPH_CBC_MODE) {
169         dat->stream.cbc = vpaes_cbc_encrypt;
170       }
171 #endif
172     } else {
173       ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
174       dat->block = aes_nohw_decrypt;
175       dat->stream.cbc = NULL;
176 #if defined(AES_NOHW_CBC)
177       if (mode == EVP_CIPH_CBC_MODE) {
178         dat->stream.cbc = aes_nohw_cbc_encrypt;
179       }
180 #endif
181     }
182   } else if (hwaes_capable()) {
183     ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
184     dat->block = aes_hw_encrypt;
185     dat->stream.cbc = NULL;
186     if (mode == EVP_CIPH_CBC_MODE) {
187       dat->stream.cbc = aes_hw_cbc_encrypt;
188     } else if (mode == EVP_CIPH_CTR_MODE) {
189       dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
190     }
191   } else if (vpaes_capable()) {
192     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
193     dat->block = vpaes_encrypt;
194     dat->stream.cbc = NULL;
195 #if defined(VPAES_CBC)
196     if (mode == EVP_CIPH_CBC_MODE) {
197       dat->stream.cbc = vpaes_cbc_encrypt;
198     }
199 #endif
200     if (mode == EVP_CIPH_CTR_MODE) {
201 #if defined(BSAES)
202       assert(bsaes_capable());
203       dat->stream.ctr = vpaes_ctr32_encrypt_blocks_with_bsaes;
204 #elif defined(VPAES_CTR32)
205       dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
206 #endif
207     }
208   } else {
209     ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
210     dat->block = aes_nohw_encrypt;
211     dat->stream.cbc = NULL;
212 #if defined(AES_NOHW_CBC)
213     if (mode == EVP_CIPH_CBC_MODE) {
214       dat->stream.cbc = aes_nohw_cbc_encrypt;
215     }
216 #endif
217   }
218 
219   if (ret < 0) {
220     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
221     return 0;
222   }
223 
224   return 1;
225 }
226 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)227 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
228                           size_t len) {
229   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
230 
231   if (dat->stream.cbc) {
232     (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
233   } else if (ctx->encrypt) {
234     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
235   } else {
236     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
237   }
238 
239   return 1;
240 }
241 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)242 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
243                           size_t len) {
244   size_t bl = ctx->cipher->block_size;
245   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
246 
247   if (len < bl) {
248     return 1;
249   }
250 
251   len -= bl;
252   for (size_t i = 0; i <= len; i += bl) {
253     (*dat->block)(in + i, out + i, &dat->ks.ks);
254   }
255 
256   return 1;
257 }
258 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)259 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
260                           size_t len) {
261   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
262 
263   if (dat->stream.ctr) {
264     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
265                                 &ctx->num, dat->stream.ctr);
266   } else {
267     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
268                           &ctx->num, dat->block);
269   }
270   return 1;
271 }
272 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)273 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
274                           size_t len) {
275   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
276 
277   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
278                         dat->block);
279   return 1;
280 }
281 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_KEY * gcm_key,block128_f * out_block,const uint8_t * key,size_t key_bytes)282 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
283                          block128_f *out_block, const uint8_t *key,
284                          size_t key_bytes) {
285   if (hwaes_capable()) {
286     aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
287     if (gcm_key != NULL) {
288       CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
289     }
290     if (out_block) {
291       *out_block = aes_hw_encrypt;
292     }
293     return aes_hw_ctr32_encrypt_blocks;
294   }
295 
296   if (vpaes_capable()) {
297     vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
298     if (out_block) {
299       *out_block = vpaes_encrypt;
300     }
301     if (gcm_key != NULL) {
302       CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
303     }
304 #if defined(BSAES)
305     assert(bsaes_capable());
306     return vpaes_ctr32_encrypt_blocks_with_bsaes;
307 #elif defined(VPAES_CTR32)
308     return vpaes_ctr32_encrypt_blocks;
309 #else
310     return NULL;
311 #endif
312   }
313 
314   aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
315   if (gcm_key != NULL) {
316     CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
317   }
318   if (out_block) {
319     *out_block = aes_nohw_encrypt;
320   }
321   return NULL;
322 }
323 
324 #if defined(OPENSSL_32_BIT)
325 #define EVP_AES_GCM_CTX_PADDING (4+8)
326 #else
327 #define EVP_AES_GCM_CTX_PADDING 8
328 #endif
329 
aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX * ctx)330 static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
331 #if defined(__GNUC__) || defined(__clang__)
332   OPENSSL_STATIC_ASSERT(
333       alignof(EVP_AES_GCM_CTX) <= 16,
334       "EVP_AES_GCM_CTX needs more alignment than this function provides");
335 #endif
336 
337   // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
338   // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
339   assert(ctx->cipher->ctx_size ==
340          sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
341 
342   char *ptr = ctx->cipher_data;
343 #if defined(OPENSSL_32_BIT)
344   assert((uintptr_t)ptr % 4 == 0);
345   ptr += (uintptr_t)ptr & 4;
346 #endif
347   assert((uintptr_t)ptr % 8 == 0);
348   ptr += (uintptr_t)ptr & 8;
349   return (EVP_AES_GCM_CTX *)ptr;
350 }
351 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)352 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
353                             const uint8_t *iv, int enc) {
354   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
355   if (!iv && !key) {
356     return 1;
357   }
358   if (key) {
359     OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
360     gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
361                                 ctx->key_len);
362     // If we have an iv can set it directly, otherwise use saved IV.
363     if (iv == NULL && gctx->iv_set) {
364       iv = gctx->iv;
365     }
366     if (iv) {
367       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
368       gctx->iv_set = 1;
369     }
370     gctx->key_set = 1;
371   } else {
372     // If key set use IV, otherwise copy
373     if (gctx->key_set) {
374       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
375     } else {
376       OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
377     }
378     gctx->iv_set = 1;
379     gctx->iv_gen = 0;
380   }
381   return 1;
382 }
383 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)384 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
385   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
386   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
387   if (gctx->iv != c->iv) {
388     OPENSSL_free(gctx->iv);
389   }
390 }
391 
392 // increment counter (64-bit int) by 1
ctr64_inc(uint8_t * counter)393 static void ctr64_inc(uint8_t *counter) {
394   int n = 8;
395   uint8_t c;
396 
397   do {
398     --n;
399     c = counter[n];
400     ++c;
401     counter[n] = c;
402     if (c) {
403       return;
404     }
405   } while (n);
406 }
407 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)408 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
409   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
410   switch (type) {
411     case EVP_CTRL_INIT:
412       gctx->key_set = 0;
413       gctx->iv_set = 0;
414       gctx->ivlen = c->cipher->iv_len;
415       gctx->iv = c->iv;
416       gctx->taglen = -1;
417       gctx->iv_gen = 0;
418       return 1;
419 
420     case EVP_CTRL_AEAD_SET_IVLEN:
421       if (arg <= 0) {
422         return 0;
423       }
424 
425       // Allocate memory for IV if needed
426       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
427         if (gctx->iv != c->iv) {
428           OPENSSL_free(gctx->iv);
429         }
430         gctx->iv = OPENSSL_malloc(arg);
431         if (!gctx->iv) {
432           return 0;
433         }
434       }
435       gctx->ivlen = arg;
436       return 1;
437 
438     case EVP_CTRL_AEAD_SET_TAG:
439       if (arg <= 0 || arg > 16 || c->encrypt) {
440         return 0;
441       }
442       OPENSSL_memcpy(c->buf, ptr, arg);
443       gctx->taglen = arg;
444       return 1;
445 
446     case EVP_CTRL_AEAD_GET_TAG:
447       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
448         return 0;
449       }
450       OPENSSL_memcpy(ptr, c->buf, arg);
451       return 1;
452 
453     case EVP_CTRL_AEAD_SET_IV_FIXED:
454       // Special case: -1 length restores whole IV
455       if (arg == -1) {
456         OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
457         gctx->iv_gen = 1;
458         return 1;
459       }
460       // Fixed field must be at least 4 bytes and invocation field
461       // at least 8.
462       if (arg < 4 || (gctx->ivlen - arg) < 8) {
463         return 0;
464       }
465       if (arg) {
466         OPENSSL_memcpy(gctx->iv, ptr, arg);
467       }
468       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
469         return 0;
470       }
471       gctx->iv_gen = 1;
472       return 1;
473 
474     case EVP_CTRL_GCM_IV_GEN:
475       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
476         return 0;
477       }
478       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
479       if (arg <= 0 || arg > gctx->ivlen) {
480         arg = gctx->ivlen;
481       }
482       OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
483       // Invocation field will be at least 8 bytes in size and
484       // so no need to check wrap around or increment more than
485       // last 8 bytes.
486       ctr64_inc(gctx->iv + gctx->ivlen - 8);
487       gctx->iv_set = 1;
488       return 1;
489 
490     case EVP_CTRL_GCM_SET_IV_INV:
491       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
492         return 0;
493       }
494       OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
495       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
496       gctx->iv_set = 1;
497       return 1;
498 
499     case EVP_CTRL_COPY: {
500       EVP_CIPHER_CTX *out = ptr;
501       EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
502       // |EVP_CIPHER_CTX_copy| copies this generically, but we must redo it in
503       // case |out->cipher_data| and |in->cipher_data| are differently aligned.
504       OPENSSL_memcpy(gctx_out, gctx, sizeof(EVP_AES_GCM_CTX));
505       if (gctx->iv == c->iv) {
506         gctx_out->iv = out->iv;
507       } else {
508         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
509         if (!gctx_out->iv) {
510           return 0;
511         }
512         OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
513       }
514       return 1;
515     }
516 
517     default:
518       return -1;
519   }
520 }
521 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)522 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
523                           size_t len) {
524   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
525 
526   // If not set up, return error
527   if (!gctx->key_set) {
528     return -1;
529   }
530   if (!gctx->iv_set) {
531     return -1;
532   }
533 
534   if (in) {
535     if (out == NULL) {
536       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
537         return -1;
538       }
539     } else if (ctx->encrypt) {
540       if (gctx->ctr) {
541         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
542                                          gctx->ctr)) {
543           return -1;
544         }
545       } else {
546         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
547           return -1;
548         }
549       }
550     } else {
551       if (gctx->ctr) {
552         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
553                                          gctx->ctr)) {
554           return -1;
555         }
556       } else {
557         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
558           return -1;
559         }
560       }
561     }
562     return len;
563   } else {
564     if (!ctx->encrypt) {
565       if (gctx->taglen < 0 ||
566           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
567         return -1;
568       }
569       gctx->iv_set = 0;
570       return 0;
571     }
572     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
573     gctx->taglen = 16;
574     // Don't reuse the IV
575     gctx->iv_set = 0;
576     return 0;
577   }
578 }
579 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_cbc_generic)580 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
581   memset(out, 0, sizeof(EVP_CIPHER));
582 
583   out->nid = NID_aes_128_cbc;
584   out->block_size = 16;
585   out->key_len = 16;
586   out->iv_len = 16;
587   out->ctx_size = sizeof(EVP_AES_KEY);
588   out->flags = EVP_CIPH_CBC_MODE;
589   out->init = aes_init_key;
590   out->cipher = aes_cbc_cipher;
591 }
592 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ctr_generic)593 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
594   memset(out, 0, sizeof(EVP_CIPHER));
595 
596   out->nid = NID_aes_128_ctr;
597   out->block_size = 1;
598   out->key_len = 16;
599   out->iv_len = 16;
600   out->ctx_size = sizeof(EVP_AES_KEY);
601   out->flags = EVP_CIPH_CTR_MODE;
602   out->init = aes_init_key;
603   out->cipher = aes_ctr_cipher;
604 }
605 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)606 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
607   memset(out, 0, sizeof(EVP_CIPHER));
608 
609   out->nid = NID_aes_128_ecb;
610   out->block_size = 16;
611   out->key_len = 16;
612   out->ctx_size = sizeof(EVP_AES_KEY);
613   out->flags = EVP_CIPH_ECB_MODE;
614   out->init = aes_init_key;
615   out->cipher = aes_ecb_cipher;
616 }
617 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ofb_generic)618 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
619   memset(out, 0, sizeof(EVP_CIPHER));
620 
621   out->nid = NID_aes_128_ofb128;
622   out->block_size = 1;
623   out->key_len = 16;
624   out->iv_len = 16;
625   out->ctx_size = sizeof(EVP_AES_KEY);
626   out->flags = EVP_CIPH_OFB_MODE;
627   out->init = aes_init_key;
628   out->cipher = aes_ofb_cipher;
629 }
630 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_gcm_generic)631 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
632   memset(out, 0, sizeof(EVP_CIPHER));
633 
634   out->nid = NID_aes_128_gcm;
635   out->block_size = 1;
636   out->key_len = 16;
637   out->iv_len = 12;
638   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
639   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
640                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
641                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
642   out->init = aes_gcm_init_key;
643   out->cipher = aes_gcm_cipher;
644   out->cleanup = aes_gcm_cleanup;
645   out->ctrl = aes_gcm_ctrl;
646 }
647 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_cbc_generic)648 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
649   memset(out, 0, sizeof(EVP_CIPHER));
650 
651   out->nid = NID_aes_192_cbc;
652   out->block_size = 16;
653   out->key_len = 24;
654   out->iv_len = 16;
655   out->ctx_size = sizeof(EVP_AES_KEY);
656   out->flags = EVP_CIPH_CBC_MODE;
657   out->init = aes_init_key;
658   out->cipher = aes_cbc_cipher;
659 }
660 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ctr_generic)661 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
662   memset(out, 0, sizeof(EVP_CIPHER));
663 
664   out->nid = NID_aes_192_ctr;
665   out->block_size = 1;
666   out->key_len = 24;
667   out->iv_len = 16;
668   out->ctx_size = sizeof(EVP_AES_KEY);
669   out->flags = EVP_CIPH_CTR_MODE;
670   out->init = aes_init_key;
671   out->cipher = aes_ctr_cipher;
672 }
673 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)674 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
675   memset(out, 0, sizeof(EVP_CIPHER));
676 
677   out->nid = NID_aes_192_ecb;
678   out->block_size = 16;
679   out->key_len = 24;
680   out->ctx_size = sizeof(EVP_AES_KEY);
681   out->flags = EVP_CIPH_ECB_MODE;
682   out->init = aes_init_key;
683   out->cipher = aes_ecb_cipher;
684 }
685 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ofb_generic)686 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ofb_generic) {
687   memset(out, 0, sizeof(EVP_CIPHER));
688 
689   out->nid = NID_aes_192_ofb128;
690   out->block_size = 1;
691   out->key_len = 24;
692   out->iv_len = 16;
693   out->ctx_size = sizeof(EVP_AES_KEY);
694   out->flags = EVP_CIPH_OFB_MODE;
695   out->init = aes_init_key;
696   out->cipher = aes_ofb_cipher;
697 }
698 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_gcm_generic)699 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
700   memset(out, 0, sizeof(EVP_CIPHER));
701 
702   out->nid = NID_aes_192_gcm;
703   out->block_size = 1;
704   out->key_len = 24;
705   out->iv_len = 12;
706   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
707   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
708                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
709                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
710   out->init = aes_gcm_init_key;
711   out->cipher = aes_gcm_cipher;
712   out->cleanup = aes_gcm_cleanup;
713   out->ctrl = aes_gcm_ctrl;
714 }
715 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_cbc_generic)716 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
717   memset(out, 0, sizeof(EVP_CIPHER));
718 
719   out->nid = NID_aes_256_cbc;
720   out->block_size = 16;
721   out->key_len = 32;
722   out->iv_len = 16;
723   out->ctx_size = sizeof(EVP_AES_KEY);
724   out->flags = EVP_CIPH_CBC_MODE;
725   out->init = aes_init_key;
726   out->cipher = aes_cbc_cipher;
727 }
728 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ctr_generic)729 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
730   memset(out, 0, sizeof(EVP_CIPHER));
731 
732   out->nid = NID_aes_256_ctr;
733   out->block_size = 1;
734   out->key_len = 32;
735   out->iv_len = 16;
736   out->ctx_size = sizeof(EVP_AES_KEY);
737   out->flags = EVP_CIPH_CTR_MODE;
738   out->init = aes_init_key;
739   out->cipher = aes_ctr_cipher;
740 }
741 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)742 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
743   memset(out, 0, sizeof(EVP_CIPHER));
744 
745   out->nid = NID_aes_256_ecb;
746   out->block_size = 16;
747   out->key_len = 32;
748   out->ctx_size = sizeof(EVP_AES_KEY);
749   out->flags = EVP_CIPH_ECB_MODE;
750   out->init = aes_init_key;
751   out->cipher = aes_ecb_cipher;
752 }
753 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ofb_generic)754 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
755   memset(out, 0, sizeof(EVP_CIPHER));
756 
757   out->nid = NID_aes_256_ofb128;
758   out->block_size = 1;
759   out->key_len = 32;
760   out->iv_len = 16;
761   out->ctx_size = sizeof(EVP_AES_KEY);
762   out->flags = EVP_CIPH_OFB_MODE;
763   out->init = aes_init_key;
764   out->cipher = aes_ofb_cipher;
765 }
766 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_gcm_generic)767 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
768   memset(out, 0, sizeof(EVP_CIPHER));
769 
770   out->nid = NID_aes_256_gcm;
771   out->block_size = 1;
772   out->key_len = 32;
773   out->iv_len = 12;
774   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
775   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_CUSTOM_COPY |
776                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
777                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
778   out->init = aes_gcm_init_key;
779   out->cipher = aes_gcm_cipher;
780   out->cleanup = aes_gcm_cleanup;
781   out->ctrl = aes_gcm_ctrl;
782 }
783 
784 #if defined(HWAES_ECB)
785 
aes_hw_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)786 static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
787                              const uint8_t *in, size_t len) {
788   size_t bl = ctx->cipher->block_size;
789 
790   if (len < bl) {
791     return 1;
792   }
793 
794   aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
795 
796   return 1;
797 }
798 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_128_ecb)799 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
800   memset(out, 0, sizeof(EVP_CIPHER));
801 
802   out->nid = NID_aes_128_ecb;
803   out->block_size = 16;
804   out->key_len = 16;
805   out->ctx_size = sizeof(EVP_AES_KEY);
806   out->flags = EVP_CIPH_ECB_MODE;
807   out->init = aes_init_key;
808   out->cipher = aes_hw_ecb_cipher;
809 }
810 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_192_ecb)811 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
812   memset(out, 0, sizeof(EVP_CIPHER));
813 
814   out->nid = NID_aes_192_ecb;
815   out->block_size = 16;
816   out->key_len = 24;
817   out->ctx_size = sizeof(EVP_AES_KEY);
818   out->flags = EVP_CIPH_ECB_MODE;
819   out->init = aes_init_key;
820   out->cipher = aes_hw_ecb_cipher;
821 }
822 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_256_ecb)823 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
824   memset(out, 0, sizeof(EVP_CIPHER));
825 
826   out->nid = NID_aes_256_ecb;
827   out->block_size = 16;
828   out->key_len = 32;
829   out->ctx_size = sizeof(EVP_AES_KEY);
830   out->flags = EVP_CIPH_ECB_MODE;
831   out->init = aes_init_key;
832   out->cipher = aes_hw_ecb_cipher;
833 }
834 
835 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
836   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
837     if (hwaes_capable()) {                          \
838       return aes_hw_##keybits##_ecb();              \
839     }                                               \
840     return aes_##keybits##_ecb_generic();           \
841   }
842 
843 #else
844 
845 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
846   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
847     return aes_##keybits##_ecb_generic();           \
848   }
849 
850 #endif  // HWAES_ECB
851 
852 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
853   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
854     return aes_##keybits##_##mode##_generic();         \
855   }
856 
857 EVP_CIPHER_FUNCTION(128, cbc)
858 EVP_CIPHER_FUNCTION(128, ctr)
859 EVP_CIPHER_FUNCTION(128, ofb)
860 EVP_CIPHER_FUNCTION(128, gcm)
861 
862 EVP_CIPHER_FUNCTION(192, cbc)
863 EVP_CIPHER_FUNCTION(192, ctr)
864 EVP_CIPHER_FUNCTION(192, ofb)
865 EVP_CIPHER_FUNCTION(192, gcm)
866 
867 EVP_CIPHER_FUNCTION(256, cbc)
868 EVP_CIPHER_FUNCTION(256, ctr)
869 EVP_CIPHER_FUNCTION(256, ofb)
870 EVP_CIPHER_FUNCTION(256, gcm)
871 
872 EVP_ECB_CIPHER_FUNCTION(128)
873 EVP_ECB_CIPHER_FUNCTION(192)
874 EVP_ECB_CIPHER_FUNCTION(256)
875 
876 
877 #define EVP_AEAD_AES_GCM_TAG_LEN 16
878 
879 struct aead_aes_gcm_ctx {
880   union {
881     double align;
882     AES_KEY ks;
883   } ks;
884   GCM128_KEY gcm_key;
885   ctr128_f ctr;
886 };
887 
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)888 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
889                                   size_t *out_tag_len, const uint8_t *key,
890                                   size_t key_len, size_t tag_len) {
891   const size_t key_bits = key_len * 8;
892 
893   if (key_bits != 128 && key_bits != 192 && key_bits != 256) {
894     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
895     return 0;  // EVP_AEAD_CTX_init should catch this.
896   }
897 
898   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
899     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
900   }
901 
902   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
903     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
904     return 0;
905   }
906 
907   gcm_ctx->ctr =
908       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
909   *out_tag_len = tag_len;
910   return 1;
911 }
912 
913 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
914                           sizeof(struct aead_aes_gcm_ctx),
915                       "AEAD state is too small");
916 #if defined(__GNUC__) || defined(__clang__)
917 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
918                           alignof(struct aead_aes_gcm_ctx),
919                       "AEAD state has insufficient alignment");
920 #endif
921 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)922 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
923                              size_t key_len, size_t requested_tag_len) {
924   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
925 
926   size_t actual_tag_len;
927   if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
928                               requested_tag_len)) {
929     return 0;
930   }
931 
932   ctx->tag_len = actual_tag_len;
933   return 1;
934 }
935 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)936 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
937 
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)938 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
939                                      uint8_t *out_tag, size_t *out_tag_len,
940                                      size_t max_out_tag_len,
941                                      const uint8_t *nonce, size_t nonce_len,
942                                      const uint8_t *in, size_t in_len,
943                                      const uint8_t *extra_in,
944                                      size_t extra_in_len,
945                                      const uint8_t *ad, size_t ad_len) {
946   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
947 
948   if (extra_in_len + ctx->tag_len < ctx->tag_len) {
949     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
950     return 0;
951   }
952   if (max_out_tag_len < extra_in_len + ctx->tag_len) {
953     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
954     return 0;
955   }
956   if (nonce_len == 0) {
957     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
958     return 0;
959   }
960 
961   const AES_KEY *key = &gcm_ctx->ks.ks;
962 
963   GCM128_CONTEXT gcm;
964   OPENSSL_memset(&gcm, 0, sizeof(gcm));
965   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
966   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
967 
968   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
969     return 0;
970   }
971 
972   if (gcm_ctx->ctr) {
973     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
974                                      gcm_ctx->ctr)) {
975       return 0;
976     }
977   } else {
978     if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
979       return 0;
980     }
981   }
982 
983   if (extra_in_len) {
984     if (gcm_ctx->ctr) {
985       if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
986                                        extra_in_len, gcm_ctx->ctr)) {
987         return 0;
988       }
989     } else {
990       if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
991         return 0;
992       }
993     }
994   }
995 
996   CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
997   *out_tag_len = ctx->tag_len + extra_in_len;
998 
999   return 1;
1000 }
1001 
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1002 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1003                                     const uint8_t *nonce, size_t nonce_len,
1004                                     const uint8_t *in, size_t in_len,
1005                                     const uint8_t *in_tag, size_t in_tag_len,
1006                                     const uint8_t *ad, size_t ad_len) {
1007   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
1008   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1009 
1010   if (nonce_len == 0) {
1011     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1012     return 0;
1013   }
1014 
1015   if (in_tag_len != ctx->tag_len) {
1016     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1017     return 0;
1018   }
1019 
1020   const AES_KEY *key = &gcm_ctx->ks.ks;
1021 
1022   GCM128_CONTEXT gcm;
1023   OPENSSL_memset(&gcm, 0, sizeof(gcm));
1024   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
1025   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1026 
1027   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1028     return 0;
1029   }
1030 
1031   if (gcm_ctx->ctr) {
1032     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1033                                      gcm_ctx->ctr)) {
1034       return 0;
1035     }
1036   } else {
1037     if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1038       return 0;
1039     }
1040   }
1041 
1042   CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
1043   if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
1044     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1045     return 0;
1046   }
1047 
1048   return 1;
1049 }
1050 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1051 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1052   memset(out, 0, sizeof(EVP_AEAD));
1053 
1054   out->key_len = 16;
1055   out->nonce_len = 12;
1056   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1057   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1058   out->seal_scatter_supports_extra_in = 1;
1059 
1060   out->init = aead_aes_gcm_init;
1061   out->cleanup = aead_aes_gcm_cleanup;
1062   out->seal_scatter = aead_aes_gcm_seal_scatter;
1063   out->open_gather = aead_aes_gcm_open_gather;
1064 }
1065 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_192_gcm)1066 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_192_gcm) {
1067   memset(out, 0, sizeof(EVP_AEAD));
1068 
1069   out->key_len = 24;
1070   out->nonce_len = 12;
1071   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1072   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1073   out->seal_scatter_supports_extra_in = 1;
1074 
1075   out->init = aead_aes_gcm_init;
1076   out->cleanup = aead_aes_gcm_cleanup;
1077   out->seal_scatter = aead_aes_gcm_seal_scatter;
1078   out->open_gather = aead_aes_gcm_open_gather;
1079 }
1080 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1081 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1082   memset(out, 0, sizeof(EVP_AEAD));
1083 
1084   out->key_len = 32;
1085   out->nonce_len = 12;
1086   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1087   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1088   out->seal_scatter_supports_extra_in = 1;
1089 
1090   out->init = aead_aes_gcm_init;
1091   out->cleanup = aead_aes_gcm_cleanup;
1092   out->seal_scatter = aead_aes_gcm_seal_scatter;
1093   out->open_gather = aead_aes_gcm_open_gather;
1094 }
1095 
1096 struct aead_aes_gcm_tls12_ctx {
1097   struct aead_aes_gcm_ctx gcm_ctx;
1098   uint64_t min_next_nonce;
1099 };
1100 
1101 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1102                           sizeof(struct aead_aes_gcm_tls12_ctx),
1103                       "AEAD state is too small");
1104 #if defined(__GNUC__) || defined(__clang__)
1105 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1106                           alignof(struct aead_aes_gcm_tls12_ctx),
1107                       "AEAD state has insufficient alignment");
1108 #endif
1109 
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1110 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1111                                    size_t key_len, size_t requested_tag_len) {
1112   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1113       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1114 
1115   gcm_ctx->min_next_nonce = 0;
1116 
1117   size_t actual_tag_len;
1118   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1119                               requested_tag_len)) {
1120     return 0;
1121   }
1122 
1123   ctx->tag_len = actual_tag_len;
1124   return 1;
1125 }
1126 
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1127 static int aead_aes_gcm_tls12_seal_scatter(
1128     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1129     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1130     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1131     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1132   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1133       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1134 
1135   if (nonce_len != 12) {
1136     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1137     return 0;
1138   }
1139 
1140   // The given nonces must be strictly monotonically increasing.
1141   uint64_t given_counter;
1142   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1143                  sizeof(given_counter));
1144   given_counter = CRYPTO_bswap8(given_counter);
1145   if (given_counter == UINT64_MAX ||
1146       given_counter < gcm_ctx->min_next_nonce) {
1147     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1148     return 0;
1149   }
1150 
1151   gcm_ctx->min_next_nonce = given_counter + 1;
1152 
1153   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1154                                    max_out_tag_len, nonce, nonce_len, in,
1155                                    in_len, extra_in, extra_in_len, ad, ad_len);
1156 }
1157 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1158 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1159   memset(out, 0, sizeof(EVP_AEAD));
1160 
1161   out->key_len = 16;
1162   out->nonce_len = 12;
1163   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1164   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1165   out->seal_scatter_supports_extra_in = 1;
1166 
1167   out->init = aead_aes_gcm_tls12_init;
1168   out->cleanup = aead_aes_gcm_cleanup;
1169   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1170   out->open_gather = aead_aes_gcm_open_gather;
1171 }
1172 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1173 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1174   memset(out, 0, sizeof(EVP_AEAD));
1175 
1176   out->key_len = 32;
1177   out->nonce_len = 12;
1178   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1179   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1180   out->seal_scatter_supports_extra_in = 1;
1181 
1182   out->init = aead_aes_gcm_tls12_init;
1183   out->cleanup = aead_aes_gcm_cleanup;
1184   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1185   out->open_gather = aead_aes_gcm_open_gather;
1186 }
1187 
1188 struct aead_aes_gcm_tls13_ctx {
1189   struct aead_aes_gcm_ctx gcm_ctx;
1190   uint64_t min_next_nonce;
1191   uint64_t mask;
1192   uint8_t first;
1193 };
1194 
1195 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1196                           sizeof(struct aead_aes_gcm_tls13_ctx),
1197                       "AEAD state is too small");
1198 #if defined(__GNUC__) || defined(__clang__)
1199 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1200                           alignof(struct aead_aes_gcm_tls13_ctx),
1201                       "AEAD state has insufficient alignment");
1202 #endif
1203 
aead_aes_gcm_tls13_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1204 static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1205                                    size_t key_len, size_t requested_tag_len) {
1206   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1207       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1208 
1209   gcm_ctx->min_next_nonce = 0;
1210   gcm_ctx->first = 1;
1211 
1212   size_t actual_tag_len;
1213   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1214                               requested_tag_len)) {
1215     return 0;
1216   }
1217 
1218   ctx->tag_len = actual_tag_len;
1219   return 1;
1220 }
1221 
aead_aes_gcm_tls13_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1222 static int aead_aes_gcm_tls13_seal_scatter(
1223     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1224     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1225     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1226     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1227   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1228       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1229 
1230   if (nonce_len != 12) {
1231     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1232     return 0;
1233   }
1234 
1235   // The given nonces must be strictly monotonically increasing. See
1236   // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1237   // nonce construction.
1238   uint64_t given_counter;
1239   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1240                  sizeof(given_counter));
1241   given_counter = CRYPTO_bswap8(given_counter);
1242 
1243   if (gcm_ctx->first) {
1244     // In the first call the sequence number will be zero and therefore the
1245     // given nonce will be 0 ^ mask = mask.
1246     gcm_ctx->mask = given_counter;
1247     gcm_ctx->first = 0;
1248   }
1249   given_counter ^= gcm_ctx->mask;
1250 
1251   if (given_counter == UINT64_MAX ||
1252       given_counter < gcm_ctx->min_next_nonce) {
1253     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1254     return 0;
1255   }
1256 
1257   gcm_ctx->min_next_nonce = given_counter + 1;
1258 
1259   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1260                                    max_out_tag_len, nonce, nonce_len, in,
1261                                    in_len, extra_in, extra_in_len, ad, ad_len);
1262 }
1263 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls13)1264 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1265   memset(out, 0, sizeof(EVP_AEAD));
1266 
1267   out->key_len = 16;
1268   out->nonce_len = 12;
1269   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1270   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1271   out->seal_scatter_supports_extra_in = 1;
1272 
1273   out->init = aead_aes_gcm_tls13_init;
1274   out->cleanup = aead_aes_gcm_cleanup;
1275   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1276   out->open_gather = aead_aes_gcm_open_gather;
1277 }
1278 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls13)1279 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1280   memset(out, 0, sizeof(EVP_AEAD));
1281 
1282   out->key_len = 32;
1283   out->nonce_len = 12;
1284   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1285   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1286   out->seal_scatter_supports_extra_in = 1;
1287 
1288   out->init = aead_aes_gcm_tls13_init;
1289   out->cleanup = aead_aes_gcm_cleanup;
1290   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1291   out->open_gather = aead_aes_gcm_open_gather;
1292 }
1293 
EVP_has_aes_hardware(void)1294 int EVP_has_aes_hardware(void) {
1295 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1296   return hwaes_capable() && crypto_gcm_clmul_enabled();
1297 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1298   return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1299 #else
1300   return 0;
1301 #endif
1302 }
1303 
1304 OPENSSL_MSVC_PRAGMA(warning(pop))
1305