• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <assert.h>
50 #include <string.h>
51 
52 #include <openssl/aead.h>
53 #include <openssl/aes.h>
54 #include <openssl/cipher.h>
55 #include <openssl/cpu.h>
56 #include <openssl/err.h>
57 #include <openssl/mem.h>
58 #include <openssl/nid.h>
59 #include <openssl/rand.h>
60 
61 #include "internal.h"
62 #include "../../internal.h"
63 #include "../aes/internal.h"
64 #include "../modes/internal.h"
65 #include "../delocate.h"
66 
67 
68 OPENSSL_MSVC_PRAGMA(warning(push))
69 OPENSSL_MSVC_PRAGMA(warning(disable: 4702))  // Unreachable code.
70 
71 typedef struct {
72   union {
73     double align;
74     AES_KEY ks;
75   } ks;
76   block128_f block;
77   union {
78     cbc128_f cbc;
79     ctr128_f ctr;
80   } stream;
81 } EVP_AES_KEY;
82 
83 typedef struct {
84   GCM128_CONTEXT gcm;
85   union {
86     double align;
87     AES_KEY ks;
88   } ks;         // AES key schedule to use
89   int key_set;  // Set if key initialised
90   int iv_set;   // Set if an iv is set
91   uint8_t *iv;  // Temporary IV store
92   int ivlen;         // IV length
93   int taglen;
94   int iv_gen;      // It is OK to generate IVs
95   ctr128_f ctr;
96 } EVP_AES_GCM_CTX;
97 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)98 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
99                         const uint8_t *iv, int enc) {
100   int ret, mode;
101   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
102 
103   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
104   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
105     if (hwaes_capable()) {
106       ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
107       dat->block = aes_hw_decrypt;
108       dat->stream.cbc = NULL;
109       if (mode == EVP_CIPH_CBC_MODE) {
110         dat->stream.cbc = aes_hw_cbc_encrypt;
111       }
112     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
113       ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
114       // If |dat->stream.cbc| is provided, |dat->block| is never used.
115       dat->block = NULL;
116       dat->stream.cbc = bsaes_cbc_encrypt;
117     } else if (vpaes_capable()) {
118       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
119       dat->block = vpaes_decrypt;
120       dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ? vpaes_cbc_encrypt : NULL;
121     } else {
122       ret = aes_nohw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
123       dat->block = aes_nohw_decrypt;
124       dat->stream.cbc = NULL;
125 #if defined(AES_NOHW_CBC)
126       if (mode == EVP_CIPH_CBC_MODE) {
127         dat->stream.cbc = aes_nohw_cbc_encrypt;
128       }
129 #endif
130     }
131   } else if (hwaes_capable()) {
132     ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
133     dat->block = aes_hw_encrypt;
134     dat->stream.cbc = NULL;
135     if (mode == EVP_CIPH_CBC_MODE) {
136       dat->stream.cbc = aes_hw_cbc_encrypt;
137     } else if (mode == EVP_CIPH_CTR_MODE) {
138       dat->stream.ctr = aes_hw_ctr32_encrypt_blocks;
139     }
140   } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
141     ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
142     // If |dat->stream.ctr| is provided, |dat->block| is never used.
143     dat->block = NULL;
144     dat->stream.ctr = bsaes_ctr32_encrypt_blocks;
145   } else if (vpaes_capable()) {
146     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
147     dat->block = vpaes_encrypt;
148     dat->stream.cbc = NULL;
149     if (mode == EVP_CIPH_CBC_MODE) {
150       dat->stream.cbc = vpaes_cbc_encrypt;
151     }
152 #if defined(VPAES_CTR32)
153     if (mode == EVP_CIPH_CTR_MODE) {
154       dat->stream.ctr = vpaes_ctr32_encrypt_blocks;
155     }
156 #endif
157   } else {
158     ret = aes_nohw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
159     dat->block = aes_nohw_encrypt;
160     dat->stream.cbc = NULL;
161 #if defined(AES_NOHW_CBC)
162     if (mode == EVP_CIPH_CBC_MODE) {
163       dat->stream.cbc = aes_nohw_cbc_encrypt;
164     }
165 #endif
166   }
167 
168   if (ret < 0) {
169     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
170     return 0;
171   }
172 
173   return 1;
174 }
175 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)176 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
177                           size_t len) {
178   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
179 
180   if (dat->stream.cbc) {
181     (*dat->stream.cbc)(in, out, len, &dat->ks.ks, ctx->iv, ctx->encrypt);
182   } else if (ctx->encrypt) {
183     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
184   } else {
185     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks.ks, ctx->iv, dat->block);
186   }
187 
188   return 1;
189 }
190 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)191 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
192                           size_t len) {
193   size_t bl = ctx->cipher->block_size;
194   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
195 
196   if (len < bl) {
197     return 1;
198   }
199 
200   len -= bl;
201   for (size_t i = 0; i <= len; i += bl) {
202     (*dat->block)(in + i, out + i, &dat->ks.ks);
203   }
204 
205   return 1;
206 }
207 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)208 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
209                           size_t len) {
210   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
211 
212   if (dat->stream.ctr) {
213     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
214                                 &ctx->num, dat->stream.ctr);
215   } else {
216     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, ctx->buf,
217                           &ctx->num, dat->block);
218   }
219   return 1;
220 }
221 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)222 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
223                           size_t len) {
224   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
225 
226   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks.ks, ctx->iv, &ctx->num,
227                         dat->block);
228   return 1;
229 }
230 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_KEY * gcm_key,block128_f * out_block,const uint8_t * key,size_t key_bytes)231 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_KEY *gcm_key,
232                          block128_f *out_block, const uint8_t *key,
233                          size_t key_bytes) {
234   if (hwaes_capable()) {
235     aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
236     if (gcm_key != NULL) {
237       CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_hw_encrypt, 1);
238     }
239     if (out_block) {
240       *out_block = aes_hw_encrypt;
241     }
242     return aes_hw_ctr32_encrypt_blocks;
243   }
244 
245   if (bsaes_capable()) {
246     aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
247     if (gcm_key != NULL) {
248       CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
249     }
250     if (out_block) {
251       *out_block = aes_nohw_encrypt;
252     }
253     return bsaes_ctr32_encrypt_blocks;
254   }
255 
256   if (vpaes_capable()) {
257     vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
258     if (out_block) {
259       *out_block = vpaes_encrypt;
260     }
261     if (gcm_key != NULL) {
262       CRYPTO_gcm128_init_key(gcm_key, aes_key, vpaes_encrypt, 0);
263     }
264 #if defined(VPAES_CTR32)
265     return vpaes_ctr32_encrypt_blocks;
266 #else
267     return NULL;
268 #endif
269   }
270 
271   aes_nohw_set_encrypt_key(key, key_bytes * 8, aes_key);
272   if (gcm_key != NULL) {
273     CRYPTO_gcm128_init_key(gcm_key, aes_key, aes_nohw_encrypt, 0);
274   }
275   if (out_block) {
276     *out_block = aes_nohw_encrypt;
277   }
278   return NULL;
279 }
280 
281 #if defined(OPENSSL_32_BIT)
282 #define EVP_AES_GCM_CTX_PADDING (4+8)
283 #else
284 #define EVP_AES_GCM_CTX_PADDING 8
285 #endif
286 
aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX * ctx)287 static EVP_AES_GCM_CTX *aes_gcm_from_cipher_ctx(EVP_CIPHER_CTX *ctx) {
288 #if defined(__GNUC__) || defined(__clang__)
289   OPENSSL_STATIC_ASSERT(
290       alignof(EVP_AES_GCM_CTX) <= 16,
291       "EVP_AES_GCM_CTX needs more alignment than this function provides");
292 #endif
293 
294   // |malloc| guarantees up to 4-byte alignment on 32-bit and 8-byte alignment
295   // on 64-bit systems, so we need to adjust to reach 16-byte alignment.
296   assert(ctx->cipher->ctx_size ==
297          sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING);
298 
299   char *ptr = ctx->cipher_data;
300 #if defined(OPENSSL_32_BIT)
301   assert((uintptr_t)ptr % 4 == 0);
302   ptr += (uintptr_t)ptr & 4;
303 #endif
304   assert((uintptr_t)ptr % 8 == 0);
305   ptr += (uintptr_t)ptr & 8;
306   return (EVP_AES_GCM_CTX *)ptr;
307 }
308 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)309 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
310                             const uint8_t *iv, int enc) {
311   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
312   if (!iv && !key) {
313     return 1;
314   }
315   if (key) {
316     OPENSSL_memset(&gctx->gcm, 0, sizeof(gctx->gcm));
317     gctx->ctr = aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm.gcm_key, NULL, key,
318                                 ctx->key_len);
319     // If we have an iv can set it directly, otherwise use saved IV.
320     if (iv == NULL && gctx->iv_set) {
321       iv = gctx->iv;
322     }
323     if (iv) {
324       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
325       gctx->iv_set = 1;
326     }
327     gctx->key_set = 1;
328   } else {
329     // If key set use IV, otherwise copy
330     if (gctx->key_set) {
331       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
332     } else {
333       OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
334     }
335     gctx->iv_set = 1;
336     gctx->iv_gen = 0;
337   }
338   return 1;
339 }
340 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)341 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
342   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
343   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
344   if (gctx->iv != c->iv) {
345     OPENSSL_free(gctx->iv);
346   }
347 }
348 
349 // increment counter (64-bit int) by 1
ctr64_inc(uint8_t * counter)350 static void ctr64_inc(uint8_t *counter) {
351   int n = 8;
352   uint8_t c;
353 
354   do {
355     --n;
356     c = counter[n];
357     ++c;
358     counter[n] = c;
359     if (c) {
360       return;
361     }
362   } while (n);
363 }
364 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)365 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
366   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(c);
367   switch (type) {
368     case EVP_CTRL_INIT:
369       gctx->key_set = 0;
370       gctx->iv_set = 0;
371       gctx->ivlen = c->cipher->iv_len;
372       gctx->iv = c->iv;
373       gctx->taglen = -1;
374       gctx->iv_gen = 0;
375       return 1;
376 
377     case EVP_CTRL_AEAD_SET_IVLEN:
378       if (arg <= 0) {
379         return 0;
380       }
381 
382       // Allocate memory for IV if needed
383       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
384         if (gctx->iv != c->iv) {
385           OPENSSL_free(gctx->iv);
386         }
387         gctx->iv = OPENSSL_malloc(arg);
388         if (!gctx->iv) {
389           return 0;
390         }
391       }
392       gctx->ivlen = arg;
393       return 1;
394 
395     case EVP_CTRL_AEAD_SET_TAG:
396       if (arg <= 0 || arg > 16 || c->encrypt) {
397         return 0;
398       }
399       OPENSSL_memcpy(c->buf, ptr, arg);
400       gctx->taglen = arg;
401       return 1;
402 
403     case EVP_CTRL_AEAD_GET_TAG:
404       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
405         return 0;
406       }
407       OPENSSL_memcpy(ptr, c->buf, arg);
408       return 1;
409 
410     case EVP_CTRL_AEAD_SET_IV_FIXED:
411       // Special case: -1 length restores whole IV
412       if (arg == -1) {
413         OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
414         gctx->iv_gen = 1;
415         return 1;
416       }
417       // Fixed field must be at least 4 bytes and invocation field
418       // at least 8.
419       if (arg < 4 || (gctx->ivlen - arg) < 8) {
420         return 0;
421       }
422       if (arg) {
423         OPENSSL_memcpy(gctx->iv, ptr, arg);
424       }
425       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
426         return 0;
427       }
428       gctx->iv_gen = 1;
429       return 1;
430 
431     case EVP_CTRL_GCM_IV_GEN:
432       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
433         return 0;
434       }
435       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
436       if (arg <= 0 || arg > gctx->ivlen) {
437         arg = gctx->ivlen;
438       }
439       OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
440       // Invocation field will be at least 8 bytes in size and
441       // so no need to check wrap around or increment more than
442       // last 8 bytes.
443       ctr64_inc(gctx->iv + gctx->ivlen - 8);
444       gctx->iv_set = 1;
445       return 1;
446 
447     case EVP_CTRL_GCM_SET_IV_INV:
448       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
449         return 0;
450       }
451       OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
452       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
453       gctx->iv_set = 1;
454       return 1;
455 
456     case EVP_CTRL_COPY: {
457       EVP_CIPHER_CTX *out = ptr;
458       EVP_AES_GCM_CTX *gctx_out = aes_gcm_from_cipher_ctx(out);
459       if (gctx->iv == c->iv) {
460         gctx_out->iv = out->iv;
461       } else {
462         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
463         if (!gctx_out->iv) {
464           return 0;
465         }
466         OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
467       }
468       return 1;
469     }
470 
471     default:
472       return -1;
473   }
474 }
475 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)476 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
477                           size_t len) {
478   EVP_AES_GCM_CTX *gctx = aes_gcm_from_cipher_ctx(ctx);
479 
480   // If not set up, return error
481   if (!gctx->key_set) {
482     return -1;
483   }
484   if (!gctx->iv_set) {
485     return -1;
486   }
487 
488   if (in) {
489     if (out == NULL) {
490       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
491         return -1;
492       }
493     } else if (ctx->encrypt) {
494       if (gctx->ctr) {
495         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
496                                          gctx->ctr)) {
497           return -1;
498         }
499       } else {
500         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
501           return -1;
502         }
503       }
504     } else {
505       if (gctx->ctr) {
506         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
507                                          gctx->ctr)) {
508           return -1;
509         }
510       } else {
511         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
512           return -1;
513         }
514       }
515     }
516     return len;
517   } else {
518     if (!ctx->encrypt) {
519       if (gctx->taglen < 0 ||
520           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
521         return -1;
522       }
523       gctx->iv_set = 0;
524       return 0;
525     }
526     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
527     gctx->taglen = 16;
528     // Don't reuse the IV
529     gctx->iv_set = 0;
530     return 0;
531   }
532 }
533 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_cbc_generic)534 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
535   memset(out, 0, sizeof(EVP_CIPHER));
536 
537   out->nid = NID_aes_128_cbc;
538   out->block_size = 16;
539   out->key_len = 16;
540   out->iv_len = 16;
541   out->ctx_size = sizeof(EVP_AES_KEY);
542   out->flags = EVP_CIPH_CBC_MODE;
543   out->init = aes_init_key;
544   out->cipher = aes_cbc_cipher;
545 }
546 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ctr_generic)547 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
548   memset(out, 0, sizeof(EVP_CIPHER));
549 
550   out->nid = NID_aes_128_ctr;
551   out->block_size = 1;
552   out->key_len = 16;
553   out->iv_len = 16;
554   out->ctx_size = sizeof(EVP_AES_KEY);
555   out->flags = EVP_CIPH_CTR_MODE;
556   out->init = aes_init_key;
557   out->cipher = aes_ctr_cipher;
558 }
559 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)560 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
561   memset(out, 0, sizeof(EVP_CIPHER));
562 
563   out->nid = NID_aes_128_ecb;
564   out->block_size = 16;
565   out->key_len = 16;
566   out->ctx_size = sizeof(EVP_AES_KEY);
567   out->flags = EVP_CIPH_ECB_MODE;
568   out->init = aes_init_key;
569   out->cipher = aes_ecb_cipher;
570 }
571 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ofb_generic)572 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
573   memset(out, 0, sizeof(EVP_CIPHER));
574 
575   out->nid = NID_aes_128_ofb128;
576   out->block_size = 1;
577   out->key_len = 16;
578   out->iv_len = 16;
579   out->ctx_size = sizeof(EVP_AES_KEY);
580   out->flags = EVP_CIPH_OFB_MODE;
581   out->init = aes_init_key;
582   out->cipher = aes_ofb_cipher;
583 }
584 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_gcm_generic)585 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
586   memset(out, 0, sizeof(EVP_CIPHER));
587 
588   out->nid = NID_aes_128_gcm;
589   out->block_size = 1;
590   out->key_len = 16;
591   out->iv_len = 12;
592   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
593   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
594                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
595                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
596   out->init = aes_gcm_init_key;
597   out->cipher = aes_gcm_cipher;
598   out->cleanup = aes_gcm_cleanup;
599   out->ctrl = aes_gcm_ctrl;
600 }
601 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_cbc_generic)602 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
603   memset(out, 0, sizeof(EVP_CIPHER));
604 
605   out->nid = NID_aes_192_cbc;
606   out->block_size = 16;
607   out->key_len = 24;
608   out->iv_len = 16;
609   out->ctx_size = sizeof(EVP_AES_KEY);
610   out->flags = EVP_CIPH_CBC_MODE;
611   out->init = aes_init_key;
612   out->cipher = aes_cbc_cipher;
613 }
614 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ctr_generic)615 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
616   memset(out, 0, sizeof(EVP_CIPHER));
617 
618   out->nid = NID_aes_192_ctr;
619   out->block_size = 1;
620   out->key_len = 24;
621   out->iv_len = 16;
622   out->ctx_size = sizeof(EVP_AES_KEY);
623   out->flags = EVP_CIPH_CTR_MODE;
624   out->init = aes_init_key;
625   out->cipher = aes_ctr_cipher;
626 }
627 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)628 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
629   memset(out, 0, sizeof(EVP_CIPHER));
630 
631   out->nid = NID_aes_192_ecb;
632   out->block_size = 16;
633   out->key_len = 24;
634   out->ctx_size = sizeof(EVP_AES_KEY);
635   out->flags = EVP_CIPH_ECB_MODE;
636   out->init = aes_init_key;
637   out->cipher = aes_ecb_cipher;
638 }
639 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ofb_generic)640 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ofb_generic) {
641   memset(out, 0, sizeof(EVP_CIPHER));
642 
643   out->nid = NID_aes_192_ofb128;
644   out->block_size = 1;
645   out->key_len = 24;
646   out->iv_len = 16;
647   out->ctx_size = sizeof(EVP_AES_KEY);
648   out->flags = EVP_CIPH_OFB_MODE;
649   out->init = aes_init_key;
650   out->cipher = aes_ofb_cipher;
651 }
652 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_gcm_generic)653 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
654   memset(out, 0, sizeof(EVP_CIPHER));
655 
656   out->nid = NID_aes_192_gcm;
657   out->block_size = 1;
658   out->key_len = 24;
659   out->iv_len = 12;
660   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
661   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
662                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
663                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
664   out->init = aes_gcm_init_key;
665   out->cipher = aes_gcm_cipher;
666   out->cleanup = aes_gcm_cleanup;
667   out->ctrl = aes_gcm_ctrl;
668 }
669 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_cbc_generic)670 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
671   memset(out, 0, sizeof(EVP_CIPHER));
672 
673   out->nid = NID_aes_256_cbc;
674   out->block_size = 16;
675   out->key_len = 32;
676   out->iv_len = 16;
677   out->ctx_size = sizeof(EVP_AES_KEY);
678   out->flags = EVP_CIPH_CBC_MODE;
679   out->init = aes_init_key;
680   out->cipher = aes_cbc_cipher;
681 }
682 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ctr_generic)683 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
684   memset(out, 0, sizeof(EVP_CIPHER));
685 
686   out->nid = NID_aes_256_ctr;
687   out->block_size = 1;
688   out->key_len = 32;
689   out->iv_len = 16;
690   out->ctx_size = sizeof(EVP_AES_KEY);
691   out->flags = EVP_CIPH_CTR_MODE;
692   out->init = aes_init_key;
693   out->cipher = aes_ctr_cipher;
694 }
695 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)696 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
697   memset(out, 0, sizeof(EVP_CIPHER));
698 
699   out->nid = NID_aes_256_ecb;
700   out->block_size = 16;
701   out->key_len = 32;
702   out->ctx_size = sizeof(EVP_AES_KEY);
703   out->flags = EVP_CIPH_ECB_MODE;
704   out->init = aes_init_key;
705   out->cipher = aes_ecb_cipher;
706 }
707 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ofb_generic)708 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
709   memset(out, 0, sizeof(EVP_CIPHER));
710 
711   out->nid = NID_aes_256_ofb128;
712   out->block_size = 1;
713   out->key_len = 32;
714   out->iv_len = 16;
715   out->ctx_size = sizeof(EVP_AES_KEY);
716   out->flags = EVP_CIPH_OFB_MODE;
717   out->init = aes_init_key;
718   out->cipher = aes_ofb_cipher;
719 }
720 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_gcm_generic)721 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
722   memset(out, 0, sizeof(EVP_CIPHER));
723 
724   out->nid = NID_aes_256_gcm;
725   out->block_size = 1;
726   out->key_len = 32;
727   out->iv_len = 12;
728   out->ctx_size = sizeof(EVP_AES_GCM_CTX) + EVP_AES_GCM_CTX_PADDING;
729   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
730                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
731                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
732   out->init = aes_gcm_init_key;
733   out->cipher = aes_gcm_cipher;
734   out->cleanup = aes_gcm_cleanup;
735   out->ctrl = aes_gcm_ctrl;
736 }
737 
738 #if defined(HWAES_ECB)
739 
aes_hw_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)740 static int aes_hw_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
741                              const uint8_t *in, size_t len) {
742   size_t bl = ctx->cipher->block_size;
743 
744   if (len < bl) {
745     return 1;
746   }
747 
748   aes_hw_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
749 
750   return 1;
751 }
752 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_128_ecb)753 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_128_ecb) {
754   memset(out, 0, sizeof(EVP_CIPHER));
755 
756   out->nid = NID_aes_128_ecb;
757   out->block_size = 16;
758   out->key_len = 16;
759   out->ctx_size = sizeof(EVP_AES_KEY);
760   out->flags = EVP_CIPH_ECB_MODE;
761   out->init = aes_init_key;
762   out->cipher = aes_hw_ecb_cipher;
763 }
764 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_192_ecb)765 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_192_ecb) {
766   memset(out, 0, sizeof(EVP_CIPHER));
767 
768   out->nid = NID_aes_192_ecb;
769   out->block_size = 16;
770   out->key_len = 24;
771   out->ctx_size = sizeof(EVP_AES_KEY);
772   out->flags = EVP_CIPH_ECB_MODE;
773   out->init = aes_init_key;
774   out->cipher = aes_hw_ecb_cipher;
775 }
776 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_hw_256_ecb)777 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_hw_256_ecb) {
778   memset(out, 0, sizeof(EVP_CIPHER));
779 
780   out->nid = NID_aes_256_ecb;
781   out->block_size = 16;
782   out->key_len = 32;
783   out->ctx_size = sizeof(EVP_AES_KEY);
784   out->flags = EVP_CIPH_ECB_MODE;
785   out->init = aes_init_key;
786   out->cipher = aes_hw_ecb_cipher;
787 }
788 
789 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
790   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
791     if (hwaes_capable()) {                          \
792       return aes_hw_##keybits##_ecb();              \
793     }                                               \
794     return aes_##keybits##_ecb_generic();           \
795   }
796 
797 #else
798 
799 #define EVP_ECB_CIPHER_FUNCTION(keybits)            \
800   const EVP_CIPHER *EVP_aes_##keybits##_ecb(void) { \
801     return aes_##keybits##_ecb_generic();           \
802   }
803 
804 #endif  // HWAES_ECB
805 
806 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
807   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
808     return aes_##keybits##_##mode##_generic();         \
809   }
810 
811 EVP_CIPHER_FUNCTION(128, cbc)
812 EVP_CIPHER_FUNCTION(128, ctr)
813 EVP_CIPHER_FUNCTION(128, ofb)
814 EVP_CIPHER_FUNCTION(128, gcm)
815 
816 EVP_CIPHER_FUNCTION(192, cbc)
817 EVP_CIPHER_FUNCTION(192, ctr)
818 EVP_CIPHER_FUNCTION(192, ofb)
819 EVP_CIPHER_FUNCTION(192, gcm)
820 
821 EVP_CIPHER_FUNCTION(256, cbc)
822 EVP_CIPHER_FUNCTION(256, ctr)
823 EVP_CIPHER_FUNCTION(256, ofb)
824 EVP_CIPHER_FUNCTION(256, gcm)
825 
826 EVP_ECB_CIPHER_FUNCTION(128)
827 EVP_ECB_CIPHER_FUNCTION(192)
828 EVP_ECB_CIPHER_FUNCTION(256)
829 
830 
831 #define EVP_AEAD_AES_GCM_TAG_LEN 16
832 
833 struct aead_aes_gcm_ctx {
834   union {
835     double align;
836     AES_KEY ks;
837   } ks;
838   GCM128_KEY gcm_key;
839   ctr128_f ctr;
840 };
841 
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)842 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
843                                   size_t *out_tag_len, const uint8_t *key,
844                                   size_t key_len, size_t tag_len) {
845   const size_t key_bits = key_len * 8;
846 
847   if (key_bits != 128 && key_bits != 256) {
848     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
849     return 0;  // EVP_AEAD_CTX_init should catch this.
850   }
851 
852   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
853     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
854   }
855 
856   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
857     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
858     return 0;
859   }
860 
861   gcm_ctx->ctr =
862       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm_key, NULL, key, key_len);
863   *out_tag_len = tag_len;
864   return 1;
865 }
866 
867 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
868                           sizeof(struct aead_aes_gcm_ctx),
869                       "AEAD state is too small");
870 #if defined(__GNUC__) || defined(__clang__)
871 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
872                           alignof(struct aead_aes_gcm_ctx),
873                       "AEAD state has insufficient alignment");
874 #endif
875 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)876 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
877                              size_t key_len, size_t requested_tag_len) {
878   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
879 
880   size_t actual_tag_len;
881   if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
882                               requested_tag_len)) {
883     return 0;
884   }
885 
886   ctx->tag_len = actual_tag_len;
887   return 1;
888 }
889 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)890 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {}
891 
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)892 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
893                                      uint8_t *out_tag, size_t *out_tag_len,
894                                      size_t max_out_tag_len,
895                                      const uint8_t *nonce, size_t nonce_len,
896                                      const uint8_t *in, size_t in_len,
897                                      const uint8_t *extra_in,
898                                      size_t extra_in_len,
899                                      const uint8_t *ad, size_t ad_len) {
900   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
901 
902   if (extra_in_len + ctx->tag_len < ctx->tag_len) {
903     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
904     return 0;
905   }
906   if (max_out_tag_len < extra_in_len + ctx->tag_len) {
907     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
908     return 0;
909   }
910   if (nonce_len == 0) {
911     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
912     return 0;
913   }
914 
915   const AES_KEY *key = &gcm_ctx->ks.ks;
916 
917   GCM128_CONTEXT gcm;
918   OPENSSL_memset(&gcm, 0, sizeof(gcm));
919   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
920   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
921 
922   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
923     return 0;
924   }
925 
926   if (gcm_ctx->ctr) {
927     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
928                                      gcm_ctx->ctr)) {
929       return 0;
930     }
931   } else {
932     if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
933       return 0;
934     }
935   }
936 
937   if (extra_in_len) {
938     if (gcm_ctx->ctr) {
939       if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
940                                        extra_in_len, gcm_ctx->ctr)) {
941         return 0;
942       }
943     } else {
944       if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
945         return 0;
946       }
947     }
948   }
949 
950   CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
951   *out_tag_len = ctx->tag_len + extra_in_len;
952 
953   return 1;
954 }
955 
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)956 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
957                                     const uint8_t *nonce, size_t nonce_len,
958                                     const uint8_t *in, size_t in_len,
959                                     const uint8_t *in_tag, size_t in_tag_len,
960                                     const uint8_t *ad, size_t ad_len) {
961   struct aead_aes_gcm_ctx *gcm_ctx = (struct aead_aes_gcm_ctx *) &ctx->state;
962   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
963 
964   if (nonce_len == 0) {
965     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
966     return 0;
967   }
968 
969   if (in_tag_len != ctx->tag_len) {
970     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
971     return 0;
972   }
973 
974   const AES_KEY *key = &gcm_ctx->ks.ks;
975 
976   GCM128_CONTEXT gcm;
977   OPENSSL_memset(&gcm, 0, sizeof(gcm));
978   OPENSSL_memcpy(&gcm.gcm_key, &gcm_ctx->gcm_key, sizeof(gcm.gcm_key));
979   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
980 
981   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
982     return 0;
983   }
984 
985   if (gcm_ctx->ctr) {
986     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
987                                      gcm_ctx->ctr)) {
988       return 0;
989     }
990   } else {
991     if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
992       return 0;
993     }
994   }
995 
996   CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
997   if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
998     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
999     return 0;
1000   }
1001 
1002   return 1;
1003 }
1004 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1005 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1006   memset(out, 0, sizeof(EVP_AEAD));
1007 
1008   out->key_len = 16;
1009   out->nonce_len = 12;
1010   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1011   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1012   out->seal_scatter_supports_extra_in = 1;
1013 
1014   out->init = aead_aes_gcm_init;
1015   out->cleanup = aead_aes_gcm_cleanup;
1016   out->seal_scatter = aead_aes_gcm_seal_scatter;
1017   out->open_gather = aead_aes_gcm_open_gather;
1018 }
1019 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1020 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1021   memset(out, 0, sizeof(EVP_AEAD));
1022 
1023   out->key_len = 32;
1024   out->nonce_len = 12;
1025   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1026   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1027   out->seal_scatter_supports_extra_in = 1;
1028 
1029   out->init = aead_aes_gcm_init;
1030   out->cleanup = aead_aes_gcm_cleanup;
1031   out->seal_scatter = aead_aes_gcm_seal_scatter;
1032   out->open_gather = aead_aes_gcm_open_gather;
1033 }
1034 
1035 struct aead_aes_gcm_tls12_ctx {
1036   struct aead_aes_gcm_ctx gcm_ctx;
1037   uint64_t min_next_nonce;
1038 };
1039 
1040 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1041                           sizeof(struct aead_aes_gcm_tls12_ctx),
1042                       "AEAD state is too small");
1043 #if defined(__GNUC__) || defined(__clang__)
1044 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1045                           alignof(struct aead_aes_gcm_tls12_ctx),
1046                       "AEAD state has insufficient alignment");
1047 #endif
1048 
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1049 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1050                                    size_t key_len, size_t requested_tag_len) {
1051   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1052       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1053 
1054   gcm_ctx->min_next_nonce = 0;
1055 
1056   size_t actual_tag_len;
1057   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1058                               requested_tag_len)) {
1059     return 0;
1060   }
1061 
1062   ctx->tag_len = actual_tag_len;
1063   return 1;
1064 }
1065 
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1066 static int aead_aes_gcm_tls12_seal_scatter(
1067     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1068     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1069     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1070     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1071   struct aead_aes_gcm_tls12_ctx *gcm_ctx =
1072       (struct aead_aes_gcm_tls12_ctx *) &ctx->state;
1073 
1074   if (nonce_len != 12) {
1075     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1076     return 0;
1077   }
1078 
1079   // The given nonces must be strictly monotonically increasing.
1080   uint64_t given_counter;
1081   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1082                  sizeof(given_counter));
1083   given_counter = CRYPTO_bswap8(given_counter);
1084   if (given_counter == UINT64_MAX ||
1085       given_counter < gcm_ctx->min_next_nonce) {
1086     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1087     return 0;
1088   }
1089 
1090   gcm_ctx->min_next_nonce = given_counter + 1;
1091 
1092   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1093                                    max_out_tag_len, nonce, nonce_len, in,
1094                                    in_len, extra_in, extra_in_len, ad, ad_len);
1095 }
1096 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1097 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1098   memset(out, 0, sizeof(EVP_AEAD));
1099 
1100   out->key_len = 16;
1101   out->nonce_len = 12;
1102   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1103   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1104   out->seal_scatter_supports_extra_in = 1;
1105 
1106   out->init = aead_aes_gcm_tls12_init;
1107   out->cleanup = aead_aes_gcm_cleanup;
1108   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1109   out->open_gather = aead_aes_gcm_open_gather;
1110 }
1111 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1112 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1113   memset(out, 0, sizeof(EVP_AEAD));
1114 
1115   out->key_len = 32;
1116   out->nonce_len = 12;
1117   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1118   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1119   out->seal_scatter_supports_extra_in = 1;
1120 
1121   out->init = aead_aes_gcm_tls12_init;
1122   out->cleanup = aead_aes_gcm_cleanup;
1123   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1124   out->open_gather = aead_aes_gcm_open_gather;
1125 }
1126 
1127 struct aead_aes_gcm_tls13_ctx {
1128   struct aead_aes_gcm_ctx gcm_ctx;
1129   uint64_t min_next_nonce;
1130   uint64_t mask;
1131   uint8_t first;
1132 };
1133 
1134 OPENSSL_STATIC_ASSERT(sizeof(((EVP_AEAD_CTX *)NULL)->state) >=
1135                           sizeof(struct aead_aes_gcm_tls13_ctx),
1136                       "AEAD state is too small");
1137 #if defined(__GNUC__) || defined(__clang__)
1138 OPENSSL_STATIC_ASSERT(alignof(union evp_aead_ctx_st_state) >=
1139                           alignof(struct aead_aes_gcm_tls13_ctx),
1140                       "AEAD state has insufficient alignment");
1141 #endif
1142 
aead_aes_gcm_tls13_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1143 static int aead_aes_gcm_tls13_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1144                                    size_t key_len, size_t requested_tag_len) {
1145   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1146       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1147 
1148   gcm_ctx->min_next_nonce = 0;
1149   gcm_ctx->first = 1;
1150 
1151   size_t actual_tag_len;
1152   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1153                               requested_tag_len)) {
1154     return 0;
1155   }
1156 
1157   ctx->tag_len = actual_tag_len;
1158   return 1;
1159 }
1160 
aead_aes_gcm_tls13_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1161 static int aead_aes_gcm_tls13_seal_scatter(
1162     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1163     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1164     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1165     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1166   struct aead_aes_gcm_tls13_ctx *gcm_ctx =
1167       (struct aead_aes_gcm_tls13_ctx *) &ctx->state;
1168 
1169   if (nonce_len != 12) {
1170     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1171     return 0;
1172   }
1173 
1174   // The given nonces must be strictly monotonically increasing. See
1175   // https://tools.ietf.org/html/rfc8446#section-5.3 for details of the TLS 1.3
1176   // nonce construction.
1177   uint64_t given_counter;
1178   OPENSSL_memcpy(&given_counter, nonce + nonce_len - sizeof(given_counter),
1179                  sizeof(given_counter));
1180   given_counter = CRYPTO_bswap8(given_counter);
1181 
1182   if (gcm_ctx->first) {
1183     // In the first call the sequence number will be zero and therefore the
1184     // given nonce will be 0 ^ mask = mask.
1185     gcm_ctx->mask = given_counter;
1186     gcm_ctx->first = 0;
1187   }
1188   given_counter ^= gcm_ctx->mask;
1189 
1190   if (given_counter == UINT64_MAX ||
1191       given_counter < gcm_ctx->min_next_nonce) {
1192     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1193     return 0;
1194   }
1195 
1196   gcm_ctx->min_next_nonce = given_counter + 1;
1197 
1198   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1199                                    max_out_tag_len, nonce, nonce_len, in,
1200                                    in_len, extra_in, extra_in_len, ad, ad_len);
1201 }
1202 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls13)1203 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls13) {
1204   memset(out, 0, sizeof(EVP_AEAD));
1205 
1206   out->key_len = 16;
1207   out->nonce_len = 12;
1208   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1209   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1210   out->seal_scatter_supports_extra_in = 1;
1211 
1212   out->init = aead_aes_gcm_tls13_init;
1213   out->cleanup = aead_aes_gcm_cleanup;
1214   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1215   out->open_gather = aead_aes_gcm_open_gather;
1216 }
1217 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls13)1218 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls13) {
1219   memset(out, 0, sizeof(EVP_AEAD));
1220 
1221   out->key_len = 32;
1222   out->nonce_len = 12;
1223   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1224   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1225   out->seal_scatter_supports_extra_in = 1;
1226 
1227   out->init = aead_aes_gcm_tls13_init;
1228   out->cleanup = aead_aes_gcm_cleanup;
1229   out->seal_scatter = aead_aes_gcm_tls13_seal_scatter;
1230   out->open_gather = aead_aes_gcm_open_gather;
1231 }
1232 
EVP_has_aes_hardware(void)1233 int EVP_has_aes_hardware(void) {
1234 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1235   return hwaes_capable() && crypto_gcm_clmul_enabled();
1236 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1237   return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1238 #else
1239   return 0;
1240 #endif
1241 }
1242 
1243 OPENSSL_MSVC_PRAGMA(warning(pop))
1244