• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <string.h>
50 
51 #include <openssl/aead.h>
52 #include <openssl/aes.h>
53 #include <openssl/cipher.h>
54 #include <openssl/cpu.h>
55 #include <openssl/err.h>
56 #include <openssl/mem.h>
57 #include <openssl/nid.h>
58 #include <openssl/rand.h>
59 
60 #include "internal.h"
61 #include "../../internal.h"
62 #include "../aes/internal.h"
63 #include "../modes/internal.h"
64 #include "../delocate.h"
65 
66 #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
67 #include <openssl/arm_arch.h>
68 #endif
69 
70 
71 OPENSSL_MSVC_PRAGMA(warning(disable: 4702)) /* Unreachable code. */
72 
73 typedef struct {
74   union {
75     double align;
76     AES_KEY ks;
77   } ks;
78   block128_f block;
79   union {
80     cbc128_f cbc;
81     ctr128_f ctr;
82   } stream;
83 } EVP_AES_KEY;
84 
85 typedef struct {
86   union {
87     double align;
88     AES_KEY ks;
89   } ks;        /* AES key schedule to use */
90   int key_set; /* Set if key initialised */
91   int iv_set;  /* Set if an iv is set */
92   GCM128_CONTEXT gcm;
93   uint8_t *iv; /* Temporary IV store */
94   int ivlen;         /* IV length */
95   int taglen;
96   int iv_gen;      /* It is OK to generate IVs */
97   ctr128_f ctr;
98 } EVP_AES_GCM_CTX;
99 
100 #if !defined(OPENSSL_NO_ASM) && \
101     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
102 #define VPAES
vpaes_capable(void)103 static char vpaes_capable(void) {
104   return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
105 }
106 
107 #if defined(OPENSSL_X86_64)
108 #define BSAES
bsaes_capable(void)109 static char bsaes_capable(void) {
110   return vpaes_capable();
111 }
112 #endif
113 
114 #elif !defined(OPENSSL_NO_ASM) && \
115     (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
116 
117 #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
118 #define BSAES
bsaes_capable(void)119 static char bsaes_capable(void) {
120   return CRYPTO_is_NEON_capable();
121 }
122 #endif
123 
124 #endif
125 
126 
127 #if defined(BSAES)
128 /* On platforms where BSAES gets defined (just above), then these functions are
129  * provided by asm. */
130 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
131                        const AES_KEY *key, uint8_t ivec[16], int enc);
132 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
133                                 const AES_KEY *key, const uint8_t ivec[16]);
134 #else
bsaes_capable(void)135 static char bsaes_capable(void) {
136   return 0;
137 }
138 
139 /* On other platforms, bsaes_capable() will always return false and so the
140  * following will never be called. */
bsaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t ivec[16],int enc)141 static void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
142                               const AES_KEY *key, uint8_t ivec[16], int enc) {
143   abort();
144 }
145 
bsaes_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t len,const AES_KEY * key,const uint8_t ivec[16])146 static void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
147                                        size_t len, const AES_KEY *key,
148                                        const uint8_t ivec[16]) {
149   abort();
150 }
151 #endif
152 
153 #if defined(VPAES)
154 /* On platforms where VPAES gets defined (just above), then these functions are
155  * provided by asm. */
156 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
157 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
158 
159 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
160 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
161 
162 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
163                        const AES_KEY *key, uint8_t *ivec, int enc);
164 #else
vpaes_capable(void)165 static char vpaes_capable(void) {
166   return 0;
167 }
168 
169 /* On other platforms, vpaes_capable() will always return false and so the
170  * following will never be called. */
vpaes_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)171 static int vpaes_set_encrypt_key(const uint8_t *userKey, int bits,
172                                  AES_KEY *key) {
173   abort();
174 }
vpaes_set_decrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)175 static int vpaes_set_decrypt_key(const uint8_t *userKey, int bits,
176                                  AES_KEY *key) {
177   abort();
178 }
vpaes_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)179 static void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
180   abort();
181 }
vpaes_decrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)182 static void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
183   abort();
184 }
vpaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t * ivec,int enc)185 static void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
186                               const AES_KEY *key, uint8_t *ivec, int enc) {
187   abort();
188 }
189 #endif
190 
191 #if !defined(OPENSSL_NO_ASM) && \
192     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
193 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
194 int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
195 
196 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
197 void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
198 
199 void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
200                        const AES_KEY *key, int enc);
201 void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
202                        const AES_KEY *key, uint8_t *ivec, int enc);
203 
204 #else
205 
206 /* On other platforms, aesni_capable() will always return false and so the
207  * following will never be called. */
aesni_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)208 static void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
209   abort();
210 }
aesni_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)211 static int aesni_set_encrypt_key(const uint8_t *userKey, int bits,
212                                  AES_KEY *key) {
213   abort();
214 }
aesni_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t blocks,const void * key,const uint8_t * ivec)215 static void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out,
216                                        size_t blocks, const void *key,
217                                        const uint8_t *ivec) {
218   abort();
219 }
220 
221 #endif
222 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)223 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
224                         const uint8_t *iv, int enc) {
225   int ret, mode;
226   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
227 
228   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
229   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
230     if (hwaes_capable()) {
231       ret = aes_hw_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
232       dat->block = (block128_f)aes_hw_decrypt;
233       dat->stream.cbc = NULL;
234       if (mode == EVP_CIPH_CBC_MODE) {
235         dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
236       }
237     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
238       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
239       dat->block = (block128_f)AES_decrypt;
240       dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
241     } else if (vpaes_capable()) {
242       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
243       dat->block = (block128_f)vpaes_decrypt;
244       dat->stream.cbc =
245           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
246     } else {
247       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
248       dat->block = (block128_f)AES_decrypt;
249       dat->stream.cbc =
250           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
251     }
252   } else if (hwaes_capable()) {
253     ret = aes_hw_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
254     dat->block = (block128_f)aes_hw_encrypt;
255     dat->stream.cbc = NULL;
256     if (mode == EVP_CIPH_CBC_MODE) {
257       dat->stream.cbc = (cbc128_f)aes_hw_cbc_encrypt;
258     } else if (mode == EVP_CIPH_CTR_MODE) {
259       dat->stream.ctr = (ctr128_f)aes_hw_ctr32_encrypt_blocks;
260     }
261   } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
262     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
263     dat->block = (block128_f)AES_encrypt;
264     dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
265   } else if (vpaes_capable()) {
266     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
267     dat->block = (block128_f)vpaes_encrypt;
268     dat->stream.cbc =
269         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
270   } else {
271     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
272     dat->block = (block128_f)AES_encrypt;
273     dat->stream.cbc =
274         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
275   }
276 
277   if (ret < 0) {
278     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
279     return 0;
280   }
281 
282   return 1;
283 }
284 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)285 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
286                           size_t len) {
287   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
288 
289   if (dat->stream.cbc) {
290     (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
291   } else if (ctx->encrypt) {
292     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
293   } else {
294     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
295   }
296 
297   return 1;
298 }
299 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)300 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
301                           size_t len) {
302   size_t bl = ctx->cipher->block_size;
303   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
304 
305   if (len < bl) {
306     return 1;
307   }
308 
309   len -= bl;
310   for (size_t i = 0; i <= len; i += bl) {
311     (*dat->block)(in + i, out + i, &dat->ks);
312   }
313 
314   return 1;
315 }
316 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)317 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
318                           size_t len) {
319   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
320 
321   if (dat->stream.ctr) {
322     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf,
323                                 &ctx->num, dat->stream.ctr);
324   } else {
325     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &ctx->num,
326                           dat->block);
327   }
328   return 1;
329 }
330 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)331 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
332                           size_t len) {
333   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
334 
335   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
336   return 1;
337 }
338 
339 static char aesni_capable(void);
340 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_CONTEXT * gcm_ctx,block128_f * out_block,const uint8_t * key,size_t key_bytes)341 ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
342                          block128_f *out_block, const uint8_t *key,
343                          size_t key_bytes) {
344   if (aesni_capable()) {
345     aesni_set_encrypt_key(key, key_bytes * 8, aes_key);
346     if (gcm_ctx != NULL) {
347       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt, 1);
348     }
349     if (out_block) {
350       *out_block = (block128_f) aesni_encrypt;
351     }
352     return (ctr128_f)aesni_ctr32_encrypt_blocks;
353   }
354 
355   if (hwaes_capable()) {
356     aes_hw_set_encrypt_key(key, key_bytes * 8, aes_key);
357     if (gcm_ctx != NULL) {
358       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_hw_encrypt, 0);
359     }
360     if (out_block) {
361       *out_block = (block128_f) aes_hw_encrypt;
362     }
363     return (ctr128_f)aes_hw_ctr32_encrypt_blocks;
364   }
365 
366   if (bsaes_capable()) {
367     AES_set_encrypt_key(key, key_bytes * 8, aes_key);
368     if (gcm_ctx != NULL) {
369       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
370     }
371     if (out_block) {
372       *out_block = (block128_f) AES_encrypt;
373     }
374     return (ctr128_f)bsaes_ctr32_encrypt_blocks;
375   }
376 
377   if (vpaes_capable()) {
378     vpaes_set_encrypt_key(key, key_bytes * 8, aes_key);
379     if (out_block) {
380       *out_block = (block128_f) vpaes_encrypt;
381     }
382     if (gcm_ctx != NULL) {
383       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt, 0);
384     }
385     return NULL;
386   }
387 
388   AES_set_encrypt_key(key, key_bytes * 8, aes_key);
389   if (gcm_ctx != NULL) {
390     CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt, 0);
391   }
392   if (out_block) {
393     *out_block = (block128_f) AES_encrypt;
394   }
395   return NULL;
396 }
397 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)398 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
399                             const uint8_t *iv, int enc) {
400   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
401   if (!iv && !key) {
402     return 1;
403   }
404   if (key) {
405     gctx->ctr =
406         aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
407     /* If we have an iv can set it directly, otherwise use saved IV. */
408     if (iv == NULL && gctx->iv_set) {
409       iv = gctx->iv;
410     }
411     if (iv) {
412       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
413       gctx->iv_set = 1;
414     }
415     gctx->key_set = 1;
416   } else {
417     /* If key set use IV, otherwise copy */
418     if (gctx->key_set) {
419       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
420     } else {
421       OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
422     }
423     gctx->iv_set = 1;
424     gctx->iv_gen = 0;
425   }
426   return 1;
427 }
428 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)429 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
430   EVP_AES_GCM_CTX *gctx = c->cipher_data;
431   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
432   if (gctx->iv != c->iv) {
433     OPENSSL_free(gctx->iv);
434   }
435 }
436 
437 /* increment counter (64-bit int) by 1 */
ctr64_inc(uint8_t * counter)438 static void ctr64_inc(uint8_t *counter) {
439   int n = 8;
440   uint8_t c;
441 
442   do {
443     --n;
444     c = counter[n];
445     ++c;
446     counter[n] = c;
447     if (c) {
448       return;
449     }
450   } while (n);
451 }
452 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)453 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
454   EVP_AES_GCM_CTX *gctx = c->cipher_data;
455   switch (type) {
456     case EVP_CTRL_INIT:
457       gctx->key_set = 0;
458       gctx->iv_set = 0;
459       gctx->ivlen = c->cipher->iv_len;
460       gctx->iv = c->iv;
461       gctx->taglen = -1;
462       gctx->iv_gen = 0;
463       return 1;
464 
465     case EVP_CTRL_GCM_SET_IVLEN:
466       if (arg <= 0) {
467         return 0;
468       }
469 
470       /* Allocate memory for IV if needed */
471       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
472         if (gctx->iv != c->iv) {
473           OPENSSL_free(gctx->iv);
474         }
475         gctx->iv = OPENSSL_malloc(arg);
476         if (!gctx->iv) {
477           return 0;
478         }
479       }
480       gctx->ivlen = arg;
481       return 1;
482 
483     case EVP_CTRL_GCM_SET_TAG:
484       if (arg <= 0 || arg > 16 || c->encrypt) {
485         return 0;
486       }
487       OPENSSL_memcpy(c->buf, ptr, arg);
488       gctx->taglen = arg;
489       return 1;
490 
491     case EVP_CTRL_GCM_GET_TAG:
492       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
493         return 0;
494       }
495       OPENSSL_memcpy(ptr, c->buf, arg);
496       return 1;
497 
498     case EVP_CTRL_GCM_SET_IV_FIXED:
499       /* Special case: -1 length restores whole IV */
500       if (arg == -1) {
501         OPENSSL_memcpy(gctx->iv, ptr, gctx->ivlen);
502         gctx->iv_gen = 1;
503         return 1;
504       }
505       /* Fixed field must be at least 4 bytes and invocation field
506        * at least 8. */
507       if (arg < 4 || (gctx->ivlen - arg) < 8) {
508         return 0;
509       }
510       if (arg) {
511         OPENSSL_memcpy(gctx->iv, ptr, arg);
512       }
513       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
514         return 0;
515       }
516       gctx->iv_gen = 1;
517       return 1;
518 
519     case EVP_CTRL_GCM_IV_GEN:
520       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
521         return 0;
522       }
523       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
524       if (arg <= 0 || arg > gctx->ivlen) {
525         arg = gctx->ivlen;
526       }
527       OPENSSL_memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
528       /* Invocation field will be at least 8 bytes in size and
529        * so no need to check wrap around or increment more than
530        * last 8 bytes. */
531       ctr64_inc(gctx->iv + gctx->ivlen - 8);
532       gctx->iv_set = 1;
533       return 1;
534 
535     case EVP_CTRL_GCM_SET_IV_INV:
536       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
537         return 0;
538       }
539       OPENSSL_memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
540       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, gctx->iv, gctx->ivlen);
541       gctx->iv_set = 1;
542       return 1;
543 
544     case EVP_CTRL_COPY: {
545       EVP_CIPHER_CTX *out = ptr;
546       EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
547       if (gctx->iv == c->iv) {
548         gctx_out->iv = out->iv;
549       } else {
550         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
551         if (!gctx_out->iv) {
552           return 0;
553         }
554         OPENSSL_memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
555       }
556       return 1;
557     }
558 
559     default:
560       return -1;
561   }
562 }
563 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)564 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
565                           size_t len) {
566   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
567 
568   /* If not set up, return error */
569   if (!gctx->key_set) {
570     return -1;
571   }
572   if (!gctx->iv_set) {
573     return -1;
574   }
575 
576   if (in) {
577     if (out == NULL) {
578       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
579         return -1;
580       }
581     } else if (ctx->encrypt) {
582       if (gctx->ctr) {
583         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
584                                          gctx->ctr)) {
585           return -1;
586         }
587       } else {
588         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
589           return -1;
590         }
591       }
592     } else {
593       if (gctx->ctr) {
594         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, &gctx->ks.ks, in, out, len,
595                                          gctx->ctr)) {
596           return -1;
597         }
598       } else {
599         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, &gctx->ks.ks, in, out, len)) {
600           return -1;
601         }
602       }
603     }
604     return len;
605   } else {
606     if (!ctx->encrypt) {
607       if (gctx->taglen < 0 ||
608           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen)) {
609         return -1;
610       }
611       gctx->iv_set = 0;
612       return 0;
613     }
614     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
615     gctx->taglen = 16;
616     /* Don't reuse the IV */
617     gctx->iv_set = 0;
618     return 0;
619   }
620 }
621 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_cbc_generic)622 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_cbc_generic) {
623   memset(out, 0, sizeof(EVP_CIPHER));
624 
625   out->nid = NID_aes_128_cbc;
626   out->block_size = 16;
627   out->key_len = 16;
628   out->iv_len = 16;
629   out->ctx_size = sizeof(EVP_AES_KEY);
630   out->flags = EVP_CIPH_CBC_MODE;
631   out->init = aes_init_key;
632   out->cipher = aes_cbc_cipher;
633 }
634 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ctr_generic)635 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ctr_generic) {
636   memset(out, 0, sizeof(EVP_CIPHER));
637 
638   out->nid = NID_aes_128_ctr;
639   out->block_size = 1;
640   out->key_len = 16;
641   out->iv_len = 16;
642   out->ctx_size = sizeof(EVP_AES_KEY);
643   out->flags = EVP_CIPH_CTR_MODE;
644   out->init = aes_init_key;
645   out->cipher = aes_ctr_cipher;
646 }
647 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ecb_generic)648 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ecb_generic) {
649   memset(out, 0, sizeof(EVP_CIPHER));
650 
651   out->nid = NID_aes_128_ecb;
652   out->block_size = 16;
653   out->key_len = 16;
654   out->ctx_size = sizeof(EVP_AES_KEY);
655   out->flags = EVP_CIPH_ECB_MODE;
656   out->init = aes_init_key;
657   out->cipher = aes_ecb_cipher;
658 }
659 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_ofb_generic)660 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_ofb_generic) {
661   memset(out, 0, sizeof(EVP_CIPHER));
662 
663   out->nid = NID_aes_128_ofb128;
664   out->block_size = 1;
665   out->key_len = 16;
666   out->iv_len = 16;
667   out->ctx_size = sizeof(EVP_AES_KEY);
668   out->flags = EVP_CIPH_OFB_MODE;
669   out->init = aes_init_key;
670   out->cipher = aes_ofb_cipher;
671 }
672 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_128_gcm_generic)673 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_128_gcm_generic) {
674   memset(out, 0, sizeof(EVP_CIPHER));
675 
676   out->nid = NID_aes_128_gcm;
677   out->block_size = 1;
678   out->key_len = 16;
679   out->iv_len = 12;
680   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
681   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
682                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
683                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
684   out->init = aes_gcm_init_key;
685   out->cipher = aes_gcm_cipher;
686   out->cleanup = aes_gcm_cleanup;
687   out->ctrl = aes_gcm_ctrl;
688 }
689 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_cbc_generic)690 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_cbc_generic) {
691   memset(out, 0, sizeof(EVP_CIPHER));
692 
693   out->nid = NID_aes_192_cbc;
694   out->block_size = 16;
695   out->key_len = 24;
696   out->iv_len = 16;
697   out->ctx_size = sizeof(EVP_AES_KEY);
698   out->flags = EVP_CIPH_CBC_MODE;
699   out->init = aes_init_key;
700   out->cipher = aes_cbc_cipher;
701 }
702 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ctr_generic)703 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ctr_generic) {
704   memset(out, 0, sizeof(EVP_CIPHER));
705 
706   out->nid = NID_aes_192_ctr;
707   out->block_size = 1;
708   out->key_len = 24;
709   out->iv_len = 16;
710   out->ctx_size = sizeof(EVP_AES_KEY);
711   out->flags = EVP_CIPH_CTR_MODE;
712   out->init = aes_init_key;
713   out->cipher = aes_ctr_cipher;
714 }
715 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_ecb_generic)716 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_ecb_generic) {
717   memset(out, 0, sizeof(EVP_CIPHER));
718 
719   out->nid = NID_aes_192_ecb;
720   out->block_size = 16;
721   out->key_len = 24;
722   out->ctx_size = sizeof(EVP_AES_KEY);
723   out->flags = EVP_CIPH_ECB_MODE;
724   out->init = aes_init_key;
725   out->cipher = aes_ecb_cipher;
726 }
727 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_192_gcm_generic)728 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_192_gcm_generic) {
729   memset(out, 0, sizeof(EVP_CIPHER));
730 
731   out->nid = NID_aes_192_gcm;
732   out->block_size = 1;
733   out->key_len = 24;
734   out->iv_len = 12;
735   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
736   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
737                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
738                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
739   out->init = aes_gcm_init_key;
740   out->cipher = aes_gcm_cipher;
741   out->cleanup = aes_gcm_cleanup;
742   out->ctrl = aes_gcm_ctrl;
743 }
744 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_cbc_generic)745 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_cbc_generic) {
746   memset(out, 0, sizeof(EVP_CIPHER));
747 
748   out->nid = NID_aes_256_cbc;
749   out->block_size = 16;
750   out->key_len = 32;
751   out->iv_len = 16;
752   out->ctx_size = sizeof(EVP_AES_KEY);
753   out->flags = EVP_CIPH_CBC_MODE;
754   out->init = aes_init_key;
755   out->cipher = aes_cbc_cipher;
756 }
757 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ctr_generic)758 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ctr_generic) {
759   memset(out, 0, sizeof(EVP_CIPHER));
760 
761   out->nid = NID_aes_256_ctr;
762   out->block_size = 1;
763   out->key_len = 32;
764   out->iv_len = 16;
765   out->ctx_size = sizeof(EVP_AES_KEY);
766   out->flags = EVP_CIPH_CTR_MODE;
767   out->init = aes_init_key;
768   out->cipher = aes_ctr_cipher;
769 }
770 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ecb_generic)771 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ecb_generic) {
772   memset(out, 0, sizeof(EVP_CIPHER));
773 
774   out->nid = NID_aes_256_ecb;
775   out->block_size = 16;
776   out->key_len = 32;
777   out->ctx_size = sizeof(EVP_AES_KEY);
778   out->flags = EVP_CIPH_ECB_MODE;
779   out->init = aes_init_key;
780   out->cipher = aes_ecb_cipher;
781 }
782 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_ofb_generic)783 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_ofb_generic) {
784   memset(out, 0, sizeof(EVP_CIPHER));
785 
786   out->nid = NID_aes_256_ofb128;
787   out->block_size = 1;
788   out->key_len = 32;
789   out->iv_len = 16;
790   out->ctx_size = sizeof(EVP_AES_KEY);
791   out->flags = EVP_CIPH_OFB_MODE;
792   out->init = aes_init_key;
793   out->cipher = aes_ofb_cipher;
794 }
795 
DEFINE_LOCAL_DATA(EVP_CIPHER,aes_256_gcm_generic)796 DEFINE_LOCAL_DATA(EVP_CIPHER, aes_256_gcm_generic) {
797   memset(out, 0, sizeof(EVP_CIPHER));
798 
799   out->nid = NID_aes_256_gcm;
800   out->block_size = 1;
801   out->key_len = 32;
802   out->iv_len = 12;
803   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
804   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
805                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
806                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
807   out->init = aes_gcm_init_key;
808   out->cipher = aes_gcm_cipher;
809   out->cleanup = aes_gcm_cleanup;
810   out->ctrl = aes_gcm_ctrl;
811 }
812 
813 #if !defined(OPENSSL_NO_ASM) && \
814     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
815 
816 /* AES-NI section. */
817 
aesni_capable(void)818 static char aesni_capable(void) {
819   return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
820 }
821 
aesni_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)822 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
823                           const uint8_t *iv, int enc) {
824   int ret, mode;
825   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
826 
827   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
828   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
829     ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
830     dat->block = (block128_f)aesni_decrypt;
831     dat->stream.cbc =
832         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
833   } else {
834     ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
835     dat->block = (block128_f)aesni_encrypt;
836     if (mode == EVP_CIPH_CBC_MODE) {
837       dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
838     } else if (mode == EVP_CIPH_CTR_MODE) {
839       dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
840     } else {
841       dat->stream.cbc = NULL;
842     }
843   }
844 
845   if (ret < 0) {
846     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_AES_KEY_SETUP_FAILED);
847     return 0;
848   }
849 
850   return 1;
851 }
852 
aesni_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)853 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
854                             const uint8_t *in, size_t len) {
855   aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
856 
857   return 1;
858 }
859 
aesni_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)860 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
861                             const uint8_t *in, size_t len) {
862   size_t bl = ctx->cipher->block_size;
863 
864   if (len < bl) {
865     return 1;
866   }
867 
868   aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
869 
870   return 1;
871 }
872 
aesni_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)873 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
874                               const uint8_t *iv, int enc) {
875   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
876   if (!iv && !key) {
877     return 1;
878   }
879   if (key) {
880     aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
881     CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt, 1);
882     gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
883     /* If we have an iv can set it directly, otherwise use
884      * saved IV. */
885     if (iv == NULL && gctx->iv_set) {
886       iv = gctx->iv;
887     }
888     if (iv) {
889       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
890       gctx->iv_set = 1;
891     }
892     gctx->key_set = 1;
893   } else {
894     /* If key set use IV, otherwise copy */
895     if (gctx->key_set) {
896       CRYPTO_gcm128_setiv(&gctx->gcm, &gctx->ks.ks, iv, gctx->ivlen);
897     } else {
898       OPENSSL_memcpy(gctx->iv, iv, gctx->ivlen);
899     }
900     gctx->iv_set = 1;
901     gctx->iv_gen = 0;
902   }
903   return 1;
904 }
905 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_128_cbc)906 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_cbc) {
907   memset(out, 0, sizeof(EVP_CIPHER));
908 
909   out->nid = NID_aes_128_cbc;
910   out->block_size = 16;
911   out->key_len = 16;
912   out->iv_len = 16;
913   out->ctx_size = sizeof(EVP_AES_KEY);
914   out->flags = EVP_CIPH_CBC_MODE;
915   out->init = aesni_init_key;
916   out->cipher = aesni_cbc_cipher;
917 }
918 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_128_ctr)919 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ctr) {
920   memset(out, 0, sizeof(EVP_CIPHER));
921 
922   out->nid = NID_aes_128_ctr;
923   out->block_size = 1;
924   out->key_len = 16;
925   out->iv_len = 16;
926   out->ctx_size = sizeof(EVP_AES_KEY);
927   out->flags = EVP_CIPH_CTR_MODE;
928   out->init = aesni_init_key;
929   out->cipher = aes_ctr_cipher;
930 }
931 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_128_ecb)932 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ecb) {
933   memset(out, 0, sizeof(EVP_CIPHER));
934 
935   out->nid = NID_aes_128_ecb;
936   out->block_size = 16;
937   out->key_len = 16;
938   out->ctx_size = sizeof(EVP_AES_KEY);
939   out->flags = EVP_CIPH_ECB_MODE;
940   out->init = aesni_init_key;
941   out->cipher = aesni_ecb_cipher;
942 }
943 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_128_ofb)944 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_ofb) {
945   memset(out, 0, sizeof(EVP_CIPHER));
946 
947   out->nid = NID_aes_128_ofb128;
948   out->block_size = 1;
949   out->key_len = 16;
950   out->iv_len = 16;
951   out->ctx_size = sizeof(EVP_AES_KEY);
952   out->flags = EVP_CIPH_OFB_MODE;
953   out->init = aesni_init_key;
954   out->cipher = aes_ofb_cipher;
955 }
956 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_128_gcm)957 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_128_gcm) {
958   memset(out, 0, sizeof(EVP_CIPHER));
959 
960   out->nid = NID_aes_128_gcm;
961   out->block_size = 1;
962   out->key_len = 16;
963   out->iv_len = 12;
964   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
965   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
966                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
967                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
968   out->init = aesni_gcm_init_key;
969   out->cipher = aes_gcm_cipher;
970   out->cleanup = aes_gcm_cleanup;
971   out->ctrl = aes_gcm_ctrl;
972 }
973 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_192_cbc)974 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_cbc) {
975   memset(out, 0, sizeof(EVP_CIPHER));
976 
977   out->nid = NID_aes_192_cbc;
978   out->block_size = 16;
979   out->key_len = 24;
980   out->iv_len = 16;
981   out->ctx_size = sizeof(EVP_AES_KEY);
982   out->flags = EVP_CIPH_CBC_MODE;
983   out->init = aesni_init_key;
984   out->cipher = aesni_cbc_cipher;
985 }
986 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_192_ctr)987 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ctr) {
988   memset(out, 0, sizeof(EVP_CIPHER));
989 
990   out->nid = NID_aes_192_ctr;
991   out->block_size = 1;
992   out->key_len = 24;
993   out->iv_len = 16;
994   out->ctx_size = sizeof(EVP_AES_KEY);
995   out->flags = EVP_CIPH_CTR_MODE;
996   out->init = aesni_init_key;
997   out->cipher = aes_ctr_cipher;
998 }
999 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_192_ecb)1000 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_ecb) {
1001   memset(out, 0, sizeof(EVP_CIPHER));
1002 
1003   out->nid = NID_aes_192_ecb;
1004   out->block_size = 16;
1005   out->key_len = 24;
1006   out->ctx_size = sizeof(EVP_AES_KEY);
1007   out->flags = EVP_CIPH_ECB_MODE;
1008   out->init = aesni_init_key;
1009   out->cipher = aesni_ecb_cipher;
1010 }
1011 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_192_gcm)1012 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_192_gcm) {
1013   memset(out, 0, sizeof(EVP_CIPHER));
1014 
1015   out->nid = NID_aes_192_gcm;
1016   out->block_size = 1;
1017   out->key_len = 24;
1018   out->iv_len = 12;
1019   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
1020   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
1021                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
1022                EVP_CIPH_CTRL_INIT | EVP_CIPH_FLAG_AEAD_CIPHER;
1023   out->init = aesni_gcm_init_key;
1024   out->cipher = aes_gcm_cipher;
1025   out->cleanup = aes_gcm_cleanup;
1026   out->ctrl = aes_gcm_ctrl;
1027 }
1028 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_256_cbc)1029 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_cbc) {
1030   memset(out, 0, sizeof(EVP_CIPHER));
1031 
1032   out->nid = NID_aes_256_cbc;
1033   out->block_size = 16;
1034   out->key_len = 32;
1035   out->iv_len = 16;
1036   out->ctx_size = sizeof(EVP_AES_KEY);
1037   out->flags = EVP_CIPH_CBC_MODE;
1038   out->init = aesni_init_key;
1039   out->cipher = aesni_cbc_cipher;
1040 }
1041 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_256_ctr)1042 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ctr) {
1043   memset(out, 0, sizeof(EVP_CIPHER));
1044 
1045   out->nid = NID_aes_256_ctr;
1046   out->block_size = 1;
1047   out->key_len = 32;
1048   out->iv_len = 16;
1049   out->ctx_size = sizeof(EVP_AES_KEY);
1050   out->flags = EVP_CIPH_CTR_MODE;
1051   out->init = aesni_init_key;
1052   out->cipher = aes_ctr_cipher;
1053 }
1054 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_256_ecb)1055 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ecb) {
1056   memset(out, 0, sizeof(EVP_CIPHER));
1057 
1058   out->nid = NID_aes_256_ecb;
1059   out->block_size = 16;
1060   out->key_len = 32;
1061   out->ctx_size = sizeof(EVP_AES_KEY);
1062   out->flags = EVP_CIPH_ECB_MODE;
1063   out->init = aesni_init_key;
1064   out->cipher = aesni_ecb_cipher;
1065 }
1066 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_256_ofb)1067 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_ofb) {
1068   memset(out, 0, sizeof(EVP_CIPHER));
1069 
1070   out->nid = NID_aes_256_ofb128;
1071   out->block_size = 1;
1072   out->key_len = 32;
1073   out->iv_len = 16;
1074   out->ctx_size = sizeof(EVP_AES_KEY);
1075   out->flags = EVP_CIPH_OFB_MODE;
1076   out->init = aesni_init_key;
1077   out->cipher = aes_ofb_cipher;
1078 }
1079 
DEFINE_LOCAL_DATA(EVP_CIPHER,aesni_256_gcm)1080 DEFINE_LOCAL_DATA(EVP_CIPHER, aesni_256_gcm) {
1081   memset(out, 0, sizeof(EVP_CIPHER));
1082 
1083   out->nid = NID_aes_256_gcm;
1084   out->block_size = 1;
1085   out->key_len = 32;
1086   out->iv_len = 12;
1087   out->ctx_size = sizeof(EVP_AES_GCM_CTX);
1088   out->flags = EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV |
1089                EVP_CIPH_FLAG_CUSTOM_CIPHER | EVP_CIPH_ALWAYS_CALL_INIT |
1090                EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
1091                EVP_CIPH_FLAG_AEAD_CIPHER;
1092   out->init = aesni_gcm_init_key;
1093   out->cipher = aes_gcm_cipher;
1094   out->cleanup = aes_gcm_cleanup;
1095   out->ctrl = aes_gcm_ctrl;
1096 }
1097 
1098 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1099   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1100     if (aesni_capable()) {                             \
1101       return aesni_##keybits##_##mode();               \
1102     } else {                                           \
1103       return aes_##keybits##_##mode##_generic();       \
1104     }                                                  \
1105   }
1106 
1107 #else  /* ^^^  OPENSSL_X86_64 || OPENSSL_X86 */
1108 
aesni_capable(void)1109 static char aesni_capable(void) {
1110   return 0;
1111 }
1112 
1113 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1114   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1115     return aes_##keybits##_##mode##_generic();         \
1116   }
1117 
1118 #endif
1119 
1120 EVP_CIPHER_FUNCTION(128, cbc)
1121 EVP_CIPHER_FUNCTION(128, ctr)
1122 EVP_CIPHER_FUNCTION(128, ecb)
1123 EVP_CIPHER_FUNCTION(128, ofb)
1124 EVP_CIPHER_FUNCTION(128, gcm)
1125 
1126 EVP_CIPHER_FUNCTION(192, cbc)
1127 EVP_CIPHER_FUNCTION(192, ctr)
1128 EVP_CIPHER_FUNCTION(192, ecb)
1129 EVP_CIPHER_FUNCTION(192, gcm)
1130 
1131 EVP_CIPHER_FUNCTION(256, cbc)
1132 EVP_CIPHER_FUNCTION(256, ctr)
1133 EVP_CIPHER_FUNCTION(256, ecb)
1134 EVP_CIPHER_FUNCTION(256, ofb)
1135 EVP_CIPHER_FUNCTION(256, gcm)
1136 
1137 
1138 #define EVP_AEAD_AES_GCM_TAG_LEN 16
1139 
1140 struct aead_aes_gcm_ctx {
1141   union {
1142     double align;
1143     AES_KEY ks;
1144   } ks;
1145   GCM128_CONTEXT gcm;
1146   ctr128_f ctr;
1147 };
1148 
1149 struct aead_aes_gcm_tls12_ctx {
1150   struct aead_aes_gcm_ctx gcm_ctx;
1151   uint64_t counter;
1152 };
1153 
aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx * gcm_ctx,size_t * out_tag_len,const uint8_t * key,size_t key_len,size_t tag_len)1154 static int aead_aes_gcm_init_impl(struct aead_aes_gcm_ctx *gcm_ctx,
1155                                   size_t *out_tag_len, const uint8_t *key,
1156                                   size_t key_len, size_t tag_len) {
1157   const size_t key_bits = key_len * 8;
1158 
1159   if (key_bits != 128 && key_bits != 256) {
1160     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_KEY_LENGTH);
1161     return 0; /* EVP_AEAD_CTX_init should catch this. */
1162   }
1163 
1164   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1165     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1166   }
1167 
1168   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1169     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TAG_TOO_LARGE);
1170     return 0;
1171   }
1172 
1173   gcm_ctx->ctr =
1174       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
1175   *out_tag_len = tag_len;
1176   return 1;
1177 }
1178 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1179 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1180                              size_t key_len, size_t requested_tag_len) {
1181   struct aead_aes_gcm_ctx *gcm_ctx;
1182   gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1183   if (gcm_ctx == NULL) {
1184     return 0;
1185   }
1186 
1187   size_t actual_tag_len;
1188   if (!aead_aes_gcm_init_impl(gcm_ctx, &actual_tag_len, key, key_len,
1189                               requested_tag_len)) {
1190     OPENSSL_free(gcm_ctx);
1191     return 0;
1192   }
1193 
1194   ctx->aead_state = gcm_ctx;
1195   ctx->tag_len = actual_tag_len;
1196   return 1;
1197 }
1198 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)1199 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
1200   struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1201   OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
1202   OPENSSL_free(gcm_ctx);
1203 }
1204 
aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1205 static int aead_aes_gcm_seal_scatter(const EVP_AEAD_CTX *ctx, uint8_t *out,
1206                                      uint8_t *out_tag, size_t *out_tag_len,
1207                                      size_t max_out_tag_len,
1208                                      const uint8_t *nonce, size_t nonce_len,
1209                                      const uint8_t *in, size_t in_len,
1210                                      const uint8_t *extra_in,
1211                                      size_t extra_in_len,
1212                                      const uint8_t *ad, size_t ad_len) {
1213   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1214   GCM128_CONTEXT gcm;
1215 
1216   if (extra_in_len + ctx->tag_len < ctx->tag_len) {
1217     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_TOO_LARGE);
1218     return 0;
1219   }
1220   if (max_out_tag_len < ctx->tag_len + extra_in_len) {
1221     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1222     return 0;
1223   }
1224   if (nonce_len == 0) {
1225     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1226     return 0;
1227   }
1228 
1229   if (max_out_tag_len < ctx->tag_len) {
1230     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BUFFER_TOO_SMALL);
1231     return 0;
1232   }
1233 
1234   const AES_KEY *key = &gcm_ctx->ks.ks;
1235 
1236   OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1237   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1238 
1239   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1240     return 0;
1241   }
1242 
1243   if (gcm_ctx->ctr) {
1244     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, in, out, in_len,
1245                                      gcm_ctx->ctr)) {
1246       return 0;
1247     }
1248   } else {
1249     if (!CRYPTO_gcm128_encrypt(&gcm, key, in, out, in_len)) {
1250       return 0;
1251     }
1252   }
1253 
1254   if (extra_in_len) {
1255     if (gcm_ctx->ctr) {
1256       if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, key, extra_in, out_tag,
1257                                        extra_in_len, gcm_ctx->ctr)) {
1258         return 0;
1259       }
1260     } else {
1261       if (!CRYPTO_gcm128_encrypt(&gcm, key, extra_in, out_tag, extra_in_len)) {
1262         return 0;
1263       }
1264     }
1265   }
1266 
1267   CRYPTO_gcm128_tag(&gcm, out_tag + extra_in_len, ctx->tag_len);
1268   *out_tag_len = ctx->tag_len + extra_in_len;
1269 
1270   return 1;
1271 }
1272 
aead_aes_gcm_open_gather(const EVP_AEAD_CTX * ctx,uint8_t * out,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * in_tag,size_t in_tag_len,const uint8_t * ad,size_t ad_len)1273 static int aead_aes_gcm_open_gather(const EVP_AEAD_CTX *ctx, uint8_t *out,
1274                                     const uint8_t *nonce, size_t nonce_len,
1275                                     const uint8_t *in, size_t in_len,
1276                                     const uint8_t *in_tag, size_t in_tag_len,
1277                                     const uint8_t *ad, size_t ad_len) {
1278   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1279   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1280   GCM128_CONTEXT gcm;
1281 
1282   if (nonce_len == 0) {
1283     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE_SIZE);
1284     return 0;
1285   }
1286 
1287   if (in_tag_len != ctx->tag_len) {
1288     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1289     return 0;
1290   }
1291 
1292   const AES_KEY *key = &gcm_ctx->ks.ks;
1293 
1294   OPENSSL_memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1295   CRYPTO_gcm128_setiv(&gcm, key, nonce, nonce_len);
1296 
1297   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1298     return 0;
1299   }
1300 
1301   if (gcm_ctx->ctr) {
1302     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, key, in, out, in_len,
1303                                      gcm_ctx->ctr)) {
1304       return 0;
1305     }
1306   } else {
1307     if (!CRYPTO_gcm128_decrypt(&gcm, key, in, out, in_len)) {
1308       return 0;
1309     }
1310   }
1311 
1312   CRYPTO_gcm128_tag(&gcm, tag, ctx->tag_len);
1313   if (CRYPTO_memcmp(tag, in_tag, ctx->tag_len) != 0) {
1314     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_BAD_DECRYPT);
1315     return 0;
1316   }
1317 
1318   return 1;
1319 }
1320 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm)1321 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm) {
1322   memset(out, 0, sizeof(EVP_AEAD));
1323 
1324   out->key_len = 16;
1325   out->nonce_len = 12;
1326   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1327   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1328   out->seal_scatter_supports_extra_in = 1;
1329 
1330   out->init = aead_aes_gcm_init;
1331   out->cleanup = aead_aes_gcm_cleanup;
1332   out->seal_scatter = aead_aes_gcm_seal_scatter;
1333   out->open_gather = aead_aes_gcm_open_gather;
1334 }
1335 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm)1336 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm) {
1337   memset(out, 0, sizeof(EVP_AEAD));
1338 
1339   out->key_len = 32;
1340   out->nonce_len = 12;
1341   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1342   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1343   out->seal_scatter_supports_extra_in = 1;
1344 
1345   out->init = aead_aes_gcm_init;
1346   out->cleanup = aead_aes_gcm_cleanup;
1347   out->seal_scatter = aead_aes_gcm_seal_scatter;
1348   out->open_gather = aead_aes_gcm_open_gather;
1349 }
1350 
aead_aes_gcm_tls12_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t requested_tag_len)1351 static int aead_aes_gcm_tls12_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1352                                    size_t key_len, size_t requested_tag_len) {
1353   struct aead_aes_gcm_tls12_ctx *gcm_ctx;
1354   gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_tls12_ctx));
1355   if (gcm_ctx == NULL) {
1356     return 0;
1357   }
1358 
1359   gcm_ctx->counter = 0;
1360 
1361   size_t actual_tag_len;
1362   if (!aead_aes_gcm_init_impl(&gcm_ctx->gcm_ctx, &actual_tag_len, key, key_len,
1363                               requested_tag_len)) {
1364     OPENSSL_free(gcm_ctx);
1365     return 0;
1366   }
1367 
1368   ctx->aead_state = gcm_ctx;
1369   ctx->tag_len = actual_tag_len;
1370   return 1;
1371 }
1372 
aead_aes_gcm_tls12_cleanup(EVP_AEAD_CTX * ctx)1373 static void aead_aes_gcm_tls12_cleanup(EVP_AEAD_CTX *ctx) {
1374   struct aead_aes_gcm_tls12_ctx *gcm_ctx = ctx->aead_state;
1375   OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_tls12_ctx));
1376   OPENSSL_free(gcm_ctx);
1377 }
1378 
aead_aes_gcm_tls12_seal_scatter(const EVP_AEAD_CTX * ctx,uint8_t * out,uint8_t * out_tag,size_t * out_tag_len,size_t max_out_tag_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * extra_in,size_t extra_in_len,const uint8_t * ad,size_t ad_len)1379 static int aead_aes_gcm_tls12_seal_scatter(
1380     const EVP_AEAD_CTX *ctx, uint8_t *out, uint8_t *out_tag,
1381     size_t *out_tag_len, size_t max_out_tag_len, const uint8_t *nonce,
1382     size_t nonce_len, const uint8_t *in, size_t in_len, const uint8_t *extra_in,
1383     size_t extra_in_len, const uint8_t *ad, size_t ad_len) {
1384   struct aead_aes_gcm_tls12_ctx *gcm_ctx = ctx->aead_state;
1385   if (gcm_ctx->counter == UINT64_MAX) {
1386     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1387     return 0;
1388   }
1389 
1390   if (nonce_len != 12) {
1391     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1392     return 0;
1393   }
1394 
1395   const uint64_t be_counter = CRYPTO_bswap8(gcm_ctx->counter);
1396   if (OPENSSL_memcmp((uint8_t *)&be_counter, nonce + nonce_len - 8, 8) != 0) {
1397     OPENSSL_PUT_ERROR(CIPHER, CIPHER_R_INVALID_NONCE);
1398     return 0;
1399   }
1400 
1401   gcm_ctx->counter++;
1402 
1403   return aead_aes_gcm_seal_scatter(ctx, out, out_tag, out_tag_len,
1404                                    max_out_tag_len, nonce, nonce_len, in,
1405                                    in_len, extra_in, extra_in_len, ad, ad_len);
1406 }
1407 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_128_gcm_tls12)1408 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_128_gcm_tls12) {
1409   memset(out, 0, sizeof(EVP_AEAD));
1410 
1411   out->key_len = 16;
1412   out->nonce_len = 12;
1413   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1414   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1415   out->seal_scatter_supports_extra_in = 1;
1416 
1417   out->init = aead_aes_gcm_tls12_init;
1418   out->cleanup = aead_aes_gcm_tls12_cleanup;
1419   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1420   out->open_gather = aead_aes_gcm_open_gather;
1421 }
1422 
DEFINE_METHOD_FUNCTION(EVP_AEAD,EVP_aead_aes_256_gcm_tls12)1423 DEFINE_METHOD_FUNCTION(EVP_AEAD, EVP_aead_aes_256_gcm_tls12) {
1424   memset(out, 0, sizeof(EVP_AEAD));
1425 
1426   out->key_len = 32;
1427   out->nonce_len = 12;
1428   out->overhead = EVP_AEAD_AES_GCM_TAG_LEN;
1429   out->max_tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1430   out->seal_scatter_supports_extra_in = 1;
1431 
1432   out->init = aead_aes_gcm_tls12_init;
1433   out->cleanup = aead_aes_gcm_tls12_cleanup;
1434   out->seal_scatter = aead_aes_gcm_tls12_seal_scatter;
1435   out->open_gather = aead_aes_gcm_open_gather;
1436 }
1437 
EVP_has_aes_hardware(void)1438 int EVP_has_aes_hardware(void) {
1439 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1440   return aesni_capable() && crypto_gcm_clmul_enabled();
1441 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1442   return hwaes_capable() && CRYPTO_is_ARMv8_PMULL_capable();
1443 #else
1444   return 0;
1445 #endif
1446 }
1447