• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /* ====================================================================
2  * Copyright (c) 2001-2011 The OpenSSL Project.  All rights reserved.
3  *
4  * Redistribution and use in source and binary forms, with or without
5  * modification, are permitted provided that the following conditions
6  * are met:
7  *
8  * 1. Redistributions of source code must retain the above copyright
9  *    notice, this list of conditions and the following disclaimer.
10  *
11  * 2. Redistributions in binary form must reproduce the above copyright
12  *    notice, this list of conditions and the following disclaimer in
13  *    the documentation and/or other materials provided with the
14  *    distribution.
15  *
16  * 3. All advertising materials mentioning features or use of this
17  *    software must display the following acknowledgment:
18  *    "This product includes software developed by the OpenSSL Project
19  *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
20  *
21  * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
22  *    endorse or promote products derived from this software without
23  *    prior written permission. For written permission, please contact
24  *    openssl-core@openssl.org.
25  *
26  * 5. Products derived from this software may not be called "OpenSSL"
27  *    nor may "OpenSSL" appear in their names without prior written
28  *    permission of the OpenSSL Project.
29  *
30  * 6. Redistributions of any form whatsoever must retain the following
31  *    acknowledgment:
32  *    "This product includes software developed by the OpenSSL Project
33  *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
34  *
35  * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
36  * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
37  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
38  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
39  * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
40  * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
41  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
42  * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
43  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
44  * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
45  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
46  * OF THE POSSIBILITY OF SUCH DAMAGE.
47  * ==================================================================== */
48 
49 #include <string.h>
50 
51 #include <openssl/aead.h>
52 #include <openssl/aes.h>
53 #include <openssl/cipher.h>
54 #include <openssl/cpu.h>
55 #include <openssl/err.h>
56 #include <openssl/mem.h>
57 #include <openssl/modes.h>
58 #include <openssl/obj.h>
59 #include <openssl/rand.h>
60 #include <openssl/sha.h>
61 
62 #include "internal.h"
63 #include "../internal.h"
64 #include "../modes/internal.h"
65 
66 #if defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
67 #include "../arm_arch.h"
68 #endif
69 
70 
71 typedef struct {
72   union {
73     double align;
74     AES_KEY ks;
75   } ks;
76   block128_f block;
77   union {
78     cbc128_f cbc;
79     ctr128_f ctr;
80   } stream;
81 } EVP_AES_KEY;
82 
83 typedef struct {
84   union {
85     double align;
86     AES_KEY ks;
87   } ks;        /* AES key schedule to use */
88   int key_set; /* Set if key initialised */
89   int iv_set;  /* Set if an iv is set */
90   GCM128_CONTEXT gcm;
91   uint8_t *iv; /* Temporary IV store */
92   int ivlen;         /* IV length */
93   int taglen;
94   int iv_gen;      /* It is OK to generate IVs */
95   ctr128_f ctr;
96 } EVP_AES_GCM_CTX;
97 
98 #if !defined(OPENSSL_NO_ASM) && \
99     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
100 #define VPAES
101 extern unsigned int OPENSSL_ia32cap_P[];
102 
vpaes_capable(void)103 static char vpaes_capable(void) {
104   return (OPENSSL_ia32cap_P[1] & (1 << (41 - 32))) != 0;
105 }
106 
107 #if defined(OPENSSL_X86_64)
108 #define BSAES
bsaes_capable(void)109 static char bsaes_capable(void) {
110   return vpaes_capable();
111 }
112 #endif
113 
114 #elif !defined(OPENSSL_NO_ASM) && \
115     (defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64))
116 #include "../arm_arch.h"
117 
118 #if defined(OPENSSL_ARM) && __ARM_MAX_ARCH__ >= 7
119 #define BSAES
bsaes_capable(void)120 static char bsaes_capable(void) {
121   return CRYPTO_is_NEON_capable();
122 }
123 #endif
124 
125 #define HWAES
hwaes_capable(void)126 static char hwaes_capable(void) {
127   return (OPENSSL_armcap_P & ARMV8_AES) != 0;
128 }
129 
130 int aes_v8_set_encrypt_key(const uint8_t *user_key, const int bits,
131                            AES_KEY *key);
132 int aes_v8_set_decrypt_key(const uint8_t *user_key, const int bits,
133                            AES_KEY *key);
134 void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
135 void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
136 void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
137                         const AES_KEY *key, uint8_t *ivec, const int enc);
138 void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
139                                  const AES_KEY *key, const uint8_t ivec[16]);
140 
141 #endif  /* OPENSSL_ARM */
142 
143 #if defined(BSAES)
144 /* On platforms where BSAES gets defined (just above), then these functions are
145  * provided by asm. */
146 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
147                        const AES_KEY *key, uint8_t ivec[16], int enc);
148 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
149                                 const AES_KEY *key, const uint8_t ivec[16]);
150 #else
bsaes_capable(void)151 static char bsaes_capable(void) {
152   return 0;
153 }
154 
155 /* On other platforms, bsaes_capable() will always return false and so the
156  * following will never be called. */
bsaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t ivec[16],int enc)157 void bsaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
158                        const AES_KEY *key, uint8_t ivec[16], int enc) {
159   abort();
160 }
161 
bsaes_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t len,const AES_KEY * key,const uint8_t ivec[16])162 void bsaes_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
163                                 const AES_KEY *key, const uint8_t ivec[16]) {
164   abort();
165 }
166 #endif
167 
168 #if defined(VPAES)
169 /* On platforms where VPAES gets defined (just above), then these functions are
170  * provided by asm. */
171 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
172 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
173 
174 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
175 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
176 
177 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
178                        const AES_KEY *key, uint8_t *ivec, int enc);
179 #else
vpaes_capable(void)180 static char vpaes_capable(void) {
181   return 0;
182 }
183 
184 /* On other platforms, vpaes_capable() will always return false and so the
185  * following will never be called. */
vpaes_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)186 int vpaes_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
187   abort();
188 }
vpaes_set_decrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)189 int vpaes_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
190   abort();
191 }
vpaes_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)192 void vpaes_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
193   abort();
194 }
vpaes_decrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)195 void vpaes_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
196   abort();
197 }
vpaes_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t * ivec,int enc)198 void vpaes_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
199                        const AES_KEY *key, uint8_t *ivec, int enc) {
200   abort();
201 }
202 #endif
203 
204 #if !defined(HWAES)
205 /* If HWAES isn't defined then we provide dummy functions for each of the hwaes
206  * functions. */
hwaes_capable(void)207 int hwaes_capable(void) {
208   return 0;
209 }
210 
aes_v8_set_encrypt_key(const uint8_t * user_key,int bits,AES_KEY * key)211 int aes_v8_set_encrypt_key(const uint8_t *user_key, int bits,
212                            AES_KEY *key) {
213   abort();
214 }
215 
aes_v8_set_decrypt_key(const uint8_t * user_key,int bits,AES_KEY * key)216 int aes_v8_set_decrypt_key(const uint8_t *user_key, int bits, AES_KEY *key) {
217   abort();
218 }
219 
aes_v8_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)220 void aes_v8_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
221   abort();
222 }
223 
aes_v8_decrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)224 void aes_v8_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
225   abort();
226 }
227 
aes_v8_cbc_encrypt(const uint8_t * in,uint8_t * out,size_t length,const AES_KEY * key,uint8_t * ivec,int enc)228 void aes_v8_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
229                         const AES_KEY *key, uint8_t *ivec, int enc) {
230   abort();
231 }
232 
aes_v8_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t len,const AES_KEY * key,const uint8_t ivec[16])233 void aes_v8_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t len,
234                                  const AES_KEY *key, const uint8_t ivec[16]) {
235   abort();
236 }
237 #endif
238 
239 #if !defined(OPENSSL_NO_ASM) && \
240     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
241 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
242 int aesni_set_decrypt_key(const uint8_t *userKey, int bits, AES_KEY *key);
243 
244 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
245 void aesni_decrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key);
246 
247 void aesni_ecb_encrypt(const uint8_t *in, uint8_t *out, size_t length,
248                        const AES_KEY *key, int enc);
249 void aesni_cbc_encrypt(const uint8_t *in, uint8_t *out, size_t length,
250                        const AES_KEY *key, uint8_t *ivec, int enc);
251 
252 void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
253                                 const void *key, const uint8_t *ivec);
254 
255 #if defined(OPENSSL_X86_64)
256 size_t aesni_gcm_encrypt(const uint8_t *in, uint8_t *out, size_t len,
257                          const void *key, uint8_t ivec[16], uint64_t *Xi);
258 #define AES_gcm_encrypt aesni_gcm_encrypt
259 size_t aesni_gcm_decrypt(const uint8_t *in, uint8_t *out, size_t len,
260                          const void *key, uint8_t ivec[16], uint64_t *Xi);
261 #define AES_gcm_decrypt aesni_gcm_decrypt
262 void gcm_ghash_avx(uint64_t Xi[2], const u128 Htable[16], const uint8_t *in,
263                    size_t len);
264 #define AES_GCM_ASM(gctx) \
265   (gctx->ctr == aesni_ctr32_encrypt_blocks && gctx->gcm.ghash == gcm_ghash_avx)
266 #endif  /* OPENSSL_X86_64 */
267 
268 #else
269 
270 /* On other platforms, aesni_capable() will always return false and so the
271  * following will never be called. */
aesni_encrypt(const uint8_t * in,uint8_t * out,const AES_KEY * key)272 void aesni_encrypt(const uint8_t *in, uint8_t *out, const AES_KEY *key) {
273   abort();
274 }
aesni_set_encrypt_key(const uint8_t * userKey,int bits,AES_KEY * key)275 int aesni_set_encrypt_key(const uint8_t *userKey, int bits, AES_KEY *key) {
276   abort();
277 }
aesni_ctr32_encrypt_blocks(const uint8_t * in,uint8_t * out,size_t blocks,const void * key,const uint8_t * ivec)278 void aesni_ctr32_encrypt_blocks(const uint8_t *in, uint8_t *out, size_t blocks,
279                                 const void *key, const uint8_t *ivec) {
280   abort();
281 }
282 
283 #endif
284 
aes_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)285 static int aes_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
286                         const uint8_t *iv, int enc)
287                         OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
288   int ret, mode;
289   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
290 
291   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
292   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
293     if (hwaes_capable()) {
294       ret = aes_v8_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
295       dat->block = (block128_f)aes_v8_decrypt;
296       dat->stream.cbc = NULL;
297       if (mode == EVP_CIPH_CBC_MODE) {
298         dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
299       }
300     } else if (bsaes_capable() && mode == EVP_CIPH_CBC_MODE) {
301       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
302       dat->block = (block128_f)AES_decrypt;
303       dat->stream.cbc = (cbc128_f)bsaes_cbc_encrypt;
304     } else if (vpaes_capable()) {
305       ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
306       dat->block = (block128_f)vpaes_decrypt;
307       dat->stream.cbc =
308           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
309     } else {
310       ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
311       dat->block = (block128_f)AES_decrypt;
312       dat->stream.cbc =
313           mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
314     }
315   } else if (hwaes_capable()) {
316     ret = aes_v8_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
317     dat->block = (block128_f)aes_v8_encrypt;
318     dat->stream.cbc = NULL;
319     if (mode == EVP_CIPH_CBC_MODE) {
320       dat->stream.cbc = (cbc128_f)aes_v8_cbc_encrypt;
321     } else if (mode == EVP_CIPH_CTR_MODE) {
322       dat->stream.ctr = (ctr128_f)aes_v8_ctr32_encrypt_blocks;
323     }
324   } else if (bsaes_capable() && mode == EVP_CIPH_CTR_MODE) {
325     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
326     dat->block = (block128_f)AES_encrypt;
327     dat->stream.ctr = (ctr128_f)bsaes_ctr32_encrypt_blocks;
328   } else if (vpaes_capable()) {
329     ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
330     dat->block = (block128_f)vpaes_encrypt;
331     dat->stream.cbc =
332         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)vpaes_cbc_encrypt : NULL;
333   } else {
334     ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks.ks);
335     dat->block = (block128_f)AES_encrypt;
336     dat->stream.cbc =
337         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)AES_cbc_encrypt : NULL;
338   }
339 
340   if (ret < 0) {
341     OPENSSL_PUT_ERROR(CIPHER, aes_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
342     return 0;
343   }
344 
345   return 1;
346 }
347 
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)348 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
349                           size_t len) {
350   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
351 
352   if (dat->stream.cbc) {
353     (*dat->stream.cbc)(in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
354   } else if (ctx->encrypt) {
355     CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
356   } else {
357     CRYPTO_cbc128_decrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
358   }
359 
360   return 1;
361 }
362 
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)363 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
364                           size_t len) {
365   size_t bl = ctx->cipher->block_size;
366   size_t i;
367   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
368 
369   if (len < bl) {
370     return 1;
371   }
372 
373   for (i = 0, len -= bl; i <= len; i += bl) {
374     (*dat->block)(in + i, out + i, &dat->ks);
375   }
376 
377   return 1;
378 }
379 
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)380 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
381                           size_t len) {
382   unsigned int num = ctx->num;
383   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
384 
385   if (dat->stream.ctr) {
386     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
387                                 dat->stream.ctr);
388   } else {
389     CRYPTO_ctr128_encrypt(in, out, len, &dat->ks, ctx->iv, ctx->buf, &num,
390                           dat->block);
391   }
392   ctx->num = (size_t)num;
393   return 1;
394 }
395 
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)396 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
397                           size_t len) {
398   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
399 
400   CRYPTO_ofb128_encrypt(in, out, len, &dat->ks, ctx->iv, &ctx->num, dat->block);
401   return 1;
402 }
403 
404 static char aesni_capable(void);
405 
aes_ctr_set_key(AES_KEY * aes_key,GCM128_CONTEXT * gcm_ctx,block128_f * out_block,const uint8_t * key,size_t key_len)406 static ctr128_f aes_ctr_set_key(AES_KEY *aes_key, GCM128_CONTEXT *gcm_ctx,
407                                 block128_f *out_block, const uint8_t *key,
408                                 size_t key_len)
409                                 OPENSSL_SUPPRESS_UNREACHABLE_CODE_WARNINGS {
410   if (aesni_capable()) {
411     aesni_set_encrypt_key(key, key_len * 8, aes_key);
412     if (gcm_ctx != NULL) {
413       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aesni_encrypt);
414     }
415     if (out_block) {
416       *out_block = (block128_f) aesni_encrypt;
417     }
418     return (ctr128_f)aesni_ctr32_encrypt_blocks;
419   }
420 
421   if (hwaes_capable()) {
422     aes_v8_set_encrypt_key(key, key_len * 8, aes_key);
423     if (gcm_ctx != NULL) {
424       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)aes_v8_encrypt);
425     }
426     if (out_block) {
427       *out_block = (block128_f) aes_v8_encrypt;
428     }
429     return (ctr128_f)aes_v8_ctr32_encrypt_blocks;
430   }
431 
432   if (bsaes_capable()) {
433     AES_set_encrypt_key(key, key_len * 8, aes_key);
434     if (gcm_ctx != NULL) {
435       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
436     }
437     if (out_block) {
438       *out_block = (block128_f) AES_encrypt;
439     }
440     return (ctr128_f)bsaes_ctr32_encrypt_blocks;
441   }
442 
443   if (vpaes_capable()) {
444     vpaes_set_encrypt_key(key, key_len * 8, aes_key);
445     if (out_block) {
446       *out_block = (block128_f) vpaes_encrypt;
447     }
448     if (gcm_ctx != NULL) {
449       CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)vpaes_encrypt);
450     }
451     return NULL;
452   }
453 
454   AES_set_encrypt_key(key, key_len * 8, aes_key);
455   if (gcm_ctx != NULL) {
456     CRYPTO_gcm128_init(gcm_ctx, aes_key, (block128_f)AES_encrypt);
457   }
458   if (out_block) {
459     *out_block = (block128_f) AES_encrypt;
460   }
461   return NULL;
462 }
463 
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)464 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
465                             const uint8_t *iv, int enc) {
466   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
467   if (!iv && !key) {
468     return 1;
469   }
470   if (key) {
471     gctx->ctr =
472         aes_ctr_set_key(&gctx->ks.ks, &gctx->gcm, NULL, key, ctx->key_len);
473     /* If we have an iv can set it directly, otherwise use saved IV. */
474     if (iv == NULL && gctx->iv_set) {
475       iv = gctx->iv;
476     }
477     if (iv) {
478       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
479       gctx->iv_set = 1;
480     }
481     gctx->key_set = 1;
482   } else {
483     /* If key set use IV, otherwise copy */
484     if (gctx->key_set) {
485       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
486     } else {
487       memcpy(gctx->iv, iv, gctx->ivlen);
488     }
489     gctx->iv_set = 1;
490     gctx->iv_gen = 0;
491   }
492   return 1;
493 }
494 
aes_gcm_cleanup(EVP_CIPHER_CTX * c)495 static void aes_gcm_cleanup(EVP_CIPHER_CTX *c) {
496   EVP_AES_GCM_CTX *gctx = c->cipher_data;
497   OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
498   if (gctx->iv != c->iv) {
499     OPENSSL_free(gctx->iv);
500   }
501 }
502 
503 /* increment counter (64-bit int) by 1 */
ctr64_inc(uint8_t * counter)504 static void ctr64_inc(uint8_t *counter) {
505   int n = 8;
506   uint8_t c;
507 
508   do {
509     --n;
510     c = counter[n];
511     ++c;
512     counter[n] = c;
513     if (c) {
514       return;
515     }
516   } while (n);
517 }
518 
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)519 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr) {
520   EVP_AES_GCM_CTX *gctx = c->cipher_data;
521   switch (type) {
522     case EVP_CTRL_INIT:
523       gctx->key_set = 0;
524       gctx->iv_set = 0;
525       gctx->ivlen = c->cipher->iv_len;
526       gctx->iv = c->iv;
527       gctx->taglen = -1;
528       gctx->iv_gen = 0;
529       return 1;
530 
531     case EVP_CTRL_GCM_SET_IVLEN:
532       if (arg <= 0) {
533         return 0;
534       }
535 
536       /* Allocate memory for IV if needed */
537       if (arg > EVP_MAX_IV_LENGTH && arg > gctx->ivlen) {
538         if (gctx->iv != c->iv) {
539           OPENSSL_free(gctx->iv);
540         }
541         gctx->iv = OPENSSL_malloc(arg);
542         if (!gctx->iv) {
543           return 0;
544         }
545       }
546       gctx->ivlen = arg;
547       return 1;
548 
549     case EVP_CTRL_GCM_SET_TAG:
550       if (arg <= 0 || arg > 16 || c->encrypt) {
551         return 0;
552       }
553       memcpy(c->buf, ptr, arg);
554       gctx->taglen = arg;
555       return 1;
556 
557     case EVP_CTRL_GCM_GET_TAG:
558       if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0) {
559         return 0;
560       }
561       memcpy(ptr, c->buf, arg);
562       return 1;
563 
564     case EVP_CTRL_GCM_SET_IV_FIXED:
565       /* Special case: -1 length restores whole IV */
566       if (arg == -1) {
567         memcpy(gctx->iv, ptr, gctx->ivlen);
568         gctx->iv_gen = 1;
569         return 1;
570       }
571       /* Fixed field must be at least 4 bytes and invocation field
572        * at least 8. */
573       if (arg < 4 || (gctx->ivlen - arg) < 8) {
574         return 0;
575       }
576       if (arg) {
577         memcpy(gctx->iv, ptr, arg);
578       }
579       if (c->encrypt && !RAND_bytes(gctx->iv + arg, gctx->ivlen - arg)) {
580         return 0;
581       }
582       gctx->iv_gen = 1;
583       return 1;
584 
585     case EVP_CTRL_GCM_IV_GEN:
586       if (gctx->iv_gen == 0 || gctx->key_set == 0) {
587         return 0;
588       }
589       CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
590       if (arg <= 0 || arg > gctx->ivlen) {
591         arg = gctx->ivlen;
592       }
593       memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
594       /* Invocation field will be at least 8 bytes in size and
595        * so no need to check wrap around or increment more than
596        * last 8 bytes. */
597       ctr64_inc(gctx->iv + gctx->ivlen - 8);
598       gctx->iv_set = 1;
599       return 1;
600 
601     case EVP_CTRL_GCM_SET_IV_INV:
602       if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt) {
603         return 0;
604       }
605       memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
606       CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
607       gctx->iv_set = 1;
608       return 1;
609 
610     case EVP_CTRL_COPY: {
611       EVP_CIPHER_CTX *out = ptr;
612       EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
613       if (gctx->gcm.key) {
614         if (gctx->gcm.key != &gctx->ks) {
615           return 0;
616         }
617         gctx_out->gcm.key = &gctx_out->ks;
618       }
619       if (gctx->iv == c->iv) {
620         gctx_out->iv = out->iv;
621       } else {
622         gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
623         if (!gctx_out->iv) {
624           return 0;
625         }
626         memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
627       }
628       return 1;
629     }
630 
631     default:
632       return -1;
633   }
634 }
635 
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)636 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out, const uint8_t *in,
637                           size_t len) {
638   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
639 
640   /* If not set up, return error */
641   if (!gctx->key_set) {
642     return -1;
643   }
644   if (!gctx->iv_set) {
645     return -1;
646   }
647 
648   if (in) {
649     if (out == NULL) {
650       if (!CRYPTO_gcm128_aad(&gctx->gcm, in, len)) {
651         return -1;
652       }
653     } else if (ctx->encrypt) {
654       if (gctx->ctr) {
655         size_t bulk = 0;
656 #if defined(AES_GCM_ASM)
657         if (len >= 32 && AES_GCM_ASM(gctx)) {
658           size_t res = (16 - gctx->gcm.mres) % 16;
659 
660           if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res)) {
661             return -1;
662           }
663 
664           bulk = AES_gcm_encrypt(in + res, out + res, len - res, gctx->gcm.key,
665                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
666           gctx->gcm.len.u[1] += bulk;
667           bulk += res;
668         }
669 #endif
670         if (!CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
671                                         len - bulk, gctx->ctr)) {
672           return -1;
673         }
674       } else {
675         size_t bulk = 0;
676         if (!CRYPTO_gcm128_encrypt(&gctx->gcm, in + bulk, out + bulk,
677                                   len - bulk)) {
678           return -1;
679         }
680       }
681     } else {
682       if (gctx->ctr) {
683         size_t bulk = 0;
684 #if defined(AES_GCM_ASM)
685         if (len >= 16 && AES_GCM_ASM(gctx)) {
686           size_t res = (16 - gctx->gcm.mres) % 16;
687 
688           if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res)) {
689             return -1;
690           }
691 
692           bulk = AES_gcm_decrypt(in + res, out + res, len - res, gctx->gcm.key,
693                                  gctx->gcm.Yi.c, gctx->gcm.Xi.u);
694           gctx->gcm.len.u[1] += bulk;
695           bulk += res;
696         }
697 #endif
698         if (!CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm, in + bulk, out + bulk,
699                                         len - bulk, gctx->ctr)) {
700           return -1;
701         }
702       } else {
703         size_t bulk = 0;
704         if (!CRYPTO_gcm128_decrypt(&gctx->gcm, in + bulk, out + bulk,
705                                   len - bulk)) {
706           return -1;
707         }
708       }
709     }
710     return len;
711   } else {
712     if (!ctx->encrypt) {
713       if (gctx->taglen < 0 ||
714           !CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0) {
715         return -1;
716       }
717       gctx->iv_set = 0;
718       return 0;
719     }
720     CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
721     gctx->taglen = 16;
722     /* Don't reuse the IV */
723     gctx->iv_set = 0;
724     return 0;
725   }
726 }
727 
728 static const EVP_CIPHER aes_128_cbc = {
729     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
730     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
731     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
732     NULL /* cleanup */,  NULL /* ctrl */};
733 
734 static const EVP_CIPHER aes_128_ctr = {
735     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
736     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
737     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
738     NULL /* cleanup */,  NULL /* ctrl */};
739 
740 static const EVP_CIPHER aes_128_ecb = {
741     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
742     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
743     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
744     NULL /* cleanup */,  NULL /* ctrl */};
745 
746 static const EVP_CIPHER aes_128_ofb = {
747     NID_aes_128_ofb128,  1 /* block_size */,  16 /* key_size */,
748     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
749     NULL /* app_data */, aes_init_key,        aes_ofb_cipher,
750     NULL /* cleanup */,  NULL /* ctrl */};
751 
752 static const EVP_CIPHER aes_128_gcm = {
753     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
754     sizeof(EVP_AES_GCM_CTX),
755     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
756         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
757         EVP_CIPH_FLAG_AEAD_CIPHER,
758     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
759     aes_gcm_ctrl};
760 
761 
762 static const EVP_CIPHER aes_192_cbc = {
763     NID_aes_192_cbc,     16 /* block_size */, 24 /* key_size */,
764     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
765     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
766     NULL /* cleanup */,  NULL /* ctrl */};
767 
768 static const EVP_CIPHER aes_192_ctr = {
769     NID_aes_192_ctr,     1 /* block_size */,  24 /* key_size */,
770     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
771     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
772     NULL /* cleanup */,  NULL /* ctrl */};
773 
774 static const EVP_CIPHER aes_192_ecb = {
775     NID_aes_192_ecb,     16 /* block_size */, 24 /* key_size */,
776     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
777     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
778     NULL /* cleanup */,  NULL /* ctrl */};
779 
780 static const EVP_CIPHER aes_192_gcm = {
781     NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
782     sizeof(EVP_AES_GCM_CTX),
783     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
784         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
785         EVP_CIPH_FLAG_AEAD_CIPHER,
786     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
787     aes_gcm_ctrl};
788 
789 
790 static const EVP_CIPHER aes_256_cbc = {
791     NID_aes_256_cbc,     16 /* block_size */, 32 /* key_size */,
792     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
793     NULL /* app_data */, aes_init_key,        aes_cbc_cipher,
794     NULL /* cleanup */,  NULL /* ctrl */};
795 
796 static const EVP_CIPHER aes_256_ctr = {
797     NID_aes_256_ctr,     1 /* block_size */,  32 /* key_size */,
798     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
799     NULL /* app_data */, aes_init_key,        aes_ctr_cipher,
800     NULL /* cleanup */,  NULL /* ctrl */};
801 
802 static const EVP_CIPHER aes_256_ecb = {
803     NID_aes_256_ecb,     16 /* block_size */, 32 /* key_size */,
804     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
805     NULL /* app_data */, aes_init_key,        aes_ecb_cipher,
806     NULL /* cleanup */,  NULL /* ctrl */};
807 
808 static const EVP_CIPHER aes_256_ofb = {
809     NID_aes_256_ofb128,  1 /* block_size */,  32 /* key_size */,
810     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
811     NULL /* app_data */, aes_init_key,        aes_ofb_cipher,
812     NULL /* cleanup */,  NULL /* ctrl */};
813 
814 static const EVP_CIPHER aes_256_gcm = {
815     NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
816     sizeof(EVP_AES_GCM_CTX),
817     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
818         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
819         EVP_CIPH_FLAG_AEAD_CIPHER,
820     NULL /* app_data */, aes_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
821     aes_gcm_ctrl};
822 
823 #if !defined(OPENSSL_NO_ASM) && \
824     (defined(OPENSSL_X86_64) || defined(OPENSSL_X86))
825 
826 /* AES-NI section. */
827 
aesni_capable(void)828 static char aesni_capable(void) {
829   return (OPENSSL_ia32cap_P[1] & (1 << (57 - 32))) != 0;
830 }
831 
aesni_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)832 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
833                           const uint8_t *iv, int enc) {
834   int ret, mode;
835   EVP_AES_KEY *dat = (EVP_AES_KEY *)ctx->cipher_data;
836 
837   mode = ctx->cipher->flags & EVP_CIPH_MODE_MASK;
838   if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE) && !enc) {
839     ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
840     dat->block = (block128_f)aesni_decrypt;
841     dat->stream.cbc =
842         mode == EVP_CIPH_CBC_MODE ? (cbc128_f)aesni_cbc_encrypt : NULL;
843   } else {
844     ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
845     dat->block = (block128_f)aesni_encrypt;
846     if (mode == EVP_CIPH_CBC_MODE) {
847       dat->stream.cbc = (cbc128_f)aesni_cbc_encrypt;
848     } else if (mode == EVP_CIPH_CTR_MODE) {
849       dat->stream.ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
850     } else {
851       dat->stream.cbc = NULL;
852     }
853   }
854 
855   if (ret < 0) {
856     OPENSSL_PUT_ERROR(CIPHER, aesni_init_key, CIPHER_R_AES_KEY_SETUP_FAILED);
857     return 0;
858   }
859 
860   return 1;
861 }
862 
aesni_cbc_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)863 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
864                             const uint8_t *in, size_t len) {
865   aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
866 
867   return 1;
868 }
869 
aesni_ecb_cipher(EVP_CIPHER_CTX * ctx,uint8_t * out,const uint8_t * in,size_t len)870 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, uint8_t *out,
871                             const uint8_t *in, size_t len) {
872   size_t bl = ctx->cipher->block_size;
873 
874   if (len < bl) {
875     return 1;
876   }
877 
878   aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
879 
880   return 1;
881 }
882 
aesni_gcm_init_key(EVP_CIPHER_CTX * ctx,const uint8_t * key,const uint8_t * iv,int enc)883 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const uint8_t *key,
884                               const uint8_t *iv, int enc) {
885   EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
886   if (!iv && !key) {
887     return 1;
888   }
889   if (key) {
890     aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
891     CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)aesni_encrypt);
892     gctx->ctr = (ctr128_f)aesni_ctr32_encrypt_blocks;
893     /* If we have an iv can set it directly, otherwise use
894      * saved IV. */
895     if (iv == NULL && gctx->iv_set) {
896       iv = gctx->iv;
897     }
898     if (iv) {
899       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
900       gctx->iv_set = 1;
901     }
902     gctx->key_set = 1;
903   } else {
904     /* If key set use IV, otherwise copy */
905     if (gctx->key_set) {
906       CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
907     } else {
908       memcpy(gctx->iv, iv, gctx->ivlen);
909     }
910     gctx->iv_set = 1;
911     gctx->iv_gen = 0;
912   }
913   return 1;
914 }
915 
916 static const EVP_CIPHER aesni_128_cbc = {
917     NID_aes_128_cbc,     16 /* block_size */, 16 /* key_size */,
918     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
919     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
920     NULL /* cleanup */,  NULL /* ctrl */};
921 
922 static const EVP_CIPHER aesni_128_ctr = {
923     NID_aes_128_ctr,     1 /* block_size */,  16 /* key_size */,
924     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
925     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
926     NULL /* cleanup */,  NULL /* ctrl */};
927 
928 static const EVP_CIPHER aesni_128_ecb = {
929     NID_aes_128_ecb,     16 /* block_size */, 16 /* key_size */,
930     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
931     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
932     NULL /* cleanup */,  NULL /* ctrl */};
933 
934 static const EVP_CIPHER aesni_128_ofb = {
935     NID_aes_128_ofb128,  1 /* block_size */,  16 /* key_size */,
936     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
937     NULL /* app_data */, aesni_init_key,      aes_ofb_cipher,
938     NULL /* cleanup */,  NULL /* ctrl */};
939 
940 static const EVP_CIPHER aesni_128_gcm = {
941     NID_aes_128_gcm, 1 /* block_size */, 16 /* key_size */, 12 /* iv_len */,
942     sizeof(EVP_AES_GCM_CTX),
943     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
944         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
945         EVP_CIPH_FLAG_AEAD_CIPHER,
946     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
947     aes_gcm_ctrl};
948 
949 
950 static const EVP_CIPHER aesni_192_cbc = {
951     NID_aes_192_cbc,     16 /* block_size */, 24 /* key_size */,
952     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
953     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
954     NULL /* cleanup */,  NULL /* ctrl */};
955 
956 static const EVP_CIPHER aesni_192_ctr = {
957     NID_aes_192_ctr,     1 /* block_size */,  24 /* key_size */,
958     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
959     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
960     NULL /* cleanup */,  NULL /* ctrl */};
961 
962 static const EVP_CIPHER aesni_192_ecb = {
963     NID_aes_192_ecb,     16 /* block_size */, 24 /* key_size */,
964     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
965     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
966     NULL /* cleanup */,  NULL /* ctrl */};
967 
968 static const EVP_CIPHER aesni_192_gcm = {
969     NID_aes_192_gcm, 1 /* block_size */, 24 /* key_size */, 12 /* iv_len */,
970     sizeof(EVP_AES_GCM_CTX),
971     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
972         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT |
973         EVP_CIPH_FLAG_AEAD_CIPHER,
974     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
975     aes_gcm_ctrl};
976 
977 
978 static const EVP_CIPHER aesni_256_cbc = {
979     NID_aes_256_cbc,     16 /* block_size */, 32 /* key_size */,
980     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CBC_MODE,
981     NULL /* app_data */, aesni_init_key,      aesni_cbc_cipher,
982     NULL /* cleanup */,  NULL /* ctrl */};
983 
984 static const EVP_CIPHER aesni_256_ctr = {
985     NID_aes_256_ctr,     1 /* block_size */,  32 /* key_size */,
986     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_CTR_MODE,
987     NULL /* app_data */, aesni_init_key,      aes_ctr_cipher,
988     NULL /* cleanup */,  NULL /* ctrl */};
989 
990 static const EVP_CIPHER aesni_256_ecb = {
991     NID_aes_256_ecb,     16 /* block_size */, 32 /* key_size */,
992     0 /* iv_len */,      sizeof(EVP_AES_KEY), EVP_CIPH_ECB_MODE,
993     NULL /* app_data */, aesni_init_key,      aesni_ecb_cipher,
994     NULL /* cleanup */,  NULL /* ctrl */};
995 
996 static const EVP_CIPHER aesni_256_ofb = {
997     NID_aes_256_ofb128,  1 /* block_size */,  32 /* key_size */,
998     16 /* iv_len */,     sizeof(EVP_AES_KEY), EVP_CIPH_OFB_MODE,
999     NULL /* app_data */, aesni_init_key,      aes_ofb_cipher,
1000     NULL /* cleanup */,  NULL /* ctrl */};
1001 
1002 static const EVP_CIPHER aesni_256_gcm = {
1003     NID_aes_256_gcm, 1 /* block_size */, 32 /* key_size */, 12 /* iv_len */,
1004     sizeof(EVP_AES_GCM_CTX),
1005     EVP_CIPH_GCM_MODE | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER |
1006         EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT | EVP_CIPH_CUSTOM_COPY |
1007         EVP_CIPH_FLAG_AEAD_CIPHER,
1008     NULL /* app_data */, aesni_gcm_init_key, aes_gcm_cipher, aes_gcm_cleanup,
1009     aes_gcm_ctrl};
1010 
1011 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1012   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1013     if (aesni_capable()) {                             \
1014       return &aesni_##keybits##_##mode;                \
1015     } else {                                           \
1016       return &aes_##keybits##_##mode;                  \
1017     }                                                  \
1018   }
1019 
1020 #else  /* ^^^  OPENSSL_X86_64 || OPENSSL_X86 */
1021 
aesni_capable(void)1022 static char aesni_capable(void) {
1023   return 0;
1024 }
1025 
1026 #define EVP_CIPHER_FUNCTION(keybits, mode)             \
1027   const EVP_CIPHER *EVP_aes_##keybits##_##mode(void) { \
1028     return &aes_##keybits##_##mode;                    \
1029   }
1030 
1031 #endif
1032 
1033 EVP_CIPHER_FUNCTION(128, cbc)
1034 EVP_CIPHER_FUNCTION(128, ctr)
1035 EVP_CIPHER_FUNCTION(128, ecb)
1036 EVP_CIPHER_FUNCTION(128, ofb)
1037 EVP_CIPHER_FUNCTION(128, gcm)
1038 
1039 EVP_CIPHER_FUNCTION(192, cbc)
1040 EVP_CIPHER_FUNCTION(192, ctr)
1041 EVP_CIPHER_FUNCTION(192, ecb)
1042 EVP_CIPHER_FUNCTION(192, gcm)
1043 
1044 EVP_CIPHER_FUNCTION(256, cbc)
1045 EVP_CIPHER_FUNCTION(256, ctr)
1046 EVP_CIPHER_FUNCTION(256, ecb)
1047 EVP_CIPHER_FUNCTION(256, ofb)
1048 EVP_CIPHER_FUNCTION(256, gcm)
1049 
1050 
1051 #define EVP_AEAD_AES_GCM_TAG_LEN 16
1052 
1053 struct aead_aes_gcm_ctx {
1054   union {
1055     double align;
1056     AES_KEY ks;
1057   } ks;
1058   GCM128_CONTEXT gcm;
1059   ctr128_f ctr;
1060   uint8_t tag_len;
1061 };
1062 
aead_aes_gcm_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1063 static int aead_aes_gcm_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1064                              size_t key_len, size_t tag_len) {
1065   struct aead_aes_gcm_ctx *gcm_ctx;
1066   const size_t key_bits = key_len * 8;
1067 
1068   if (key_bits != 128 && key_bits != 256) {
1069     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_BAD_KEY_LENGTH);
1070     return 0; /* EVP_AEAD_CTX_init should catch this. */
1071   }
1072 
1073   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1074     tag_len = EVP_AEAD_AES_GCM_TAG_LEN;
1075   }
1076 
1077   if (tag_len > EVP_AEAD_AES_GCM_TAG_LEN) {
1078     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_init, CIPHER_R_TAG_TOO_LARGE);
1079     return 0;
1080   }
1081 
1082   gcm_ctx = OPENSSL_malloc(sizeof(struct aead_aes_gcm_ctx));
1083   if (gcm_ctx == NULL) {
1084     return 0;
1085   }
1086 
1087   gcm_ctx->ctr =
1088       aes_ctr_set_key(&gcm_ctx->ks.ks, &gcm_ctx->gcm, NULL, key, key_len);
1089   gcm_ctx->tag_len = tag_len;
1090   ctx->aead_state = gcm_ctx;
1091 
1092   return 1;
1093 }
1094 
aead_aes_gcm_cleanup(EVP_AEAD_CTX * ctx)1095 static void aead_aes_gcm_cleanup(EVP_AEAD_CTX *ctx) {
1096   struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1097   OPENSSL_cleanse(gcm_ctx, sizeof(struct aead_aes_gcm_ctx));
1098   OPENSSL_free(gcm_ctx);
1099 }
1100 
aead_aes_gcm_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1101 static int aead_aes_gcm_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1102                              size_t *out_len, size_t max_out_len,
1103                              const uint8_t *nonce, size_t nonce_len,
1104                              const uint8_t *in, size_t in_len,
1105                              const uint8_t *ad, size_t ad_len) {
1106   size_t bulk = 0;
1107   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1108   GCM128_CONTEXT gcm;
1109 
1110   if (in_len + gcm_ctx->tag_len < in_len) {
1111     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_TOO_LARGE);
1112     return 0;
1113   }
1114 
1115   if (max_out_len < in_len + gcm_ctx->tag_len) {
1116     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_seal, CIPHER_R_BUFFER_TOO_SMALL);
1117     return 0;
1118   }
1119 
1120   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1121   CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1122 
1123   if (ad_len > 0 && !CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1124     return 0;
1125   }
1126 
1127   if (gcm_ctx->ctr) {
1128     if (!CRYPTO_gcm128_encrypt_ctr32(&gcm, in + bulk, out + bulk, in_len - bulk,
1129                                      gcm_ctx->ctr)) {
1130       return 0;
1131     }
1132   } else {
1133     if (!CRYPTO_gcm128_encrypt(&gcm, in + bulk, out + bulk, in_len - bulk)) {
1134       return 0;
1135     }
1136   }
1137 
1138   CRYPTO_gcm128_tag(&gcm, out + in_len, gcm_ctx->tag_len);
1139   *out_len = in_len + gcm_ctx->tag_len;
1140   return 1;
1141 }
1142 
aead_aes_gcm_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1143 static int aead_aes_gcm_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1144                              size_t *out_len, size_t max_out_len,
1145                              const uint8_t *nonce, size_t nonce_len,
1146                              const uint8_t *in, size_t in_len,
1147                              const uint8_t *ad, size_t ad_len) {
1148   size_t bulk = 0;
1149   const struct aead_aes_gcm_ctx *gcm_ctx = ctx->aead_state;
1150   uint8_t tag[EVP_AEAD_AES_GCM_TAG_LEN];
1151   size_t plaintext_len;
1152   GCM128_CONTEXT gcm;
1153 
1154   if (in_len < gcm_ctx->tag_len) {
1155     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
1156     return 0;
1157   }
1158 
1159   plaintext_len = in_len - gcm_ctx->tag_len;
1160 
1161   if (max_out_len < plaintext_len) {
1162     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BUFFER_TOO_SMALL);
1163     return 0;
1164   }
1165 
1166   memcpy(&gcm, &gcm_ctx->gcm, sizeof(gcm));
1167   CRYPTO_gcm128_setiv(&gcm, nonce, nonce_len);
1168 
1169   if (!CRYPTO_gcm128_aad(&gcm, ad, ad_len)) {
1170     return 0;
1171   }
1172 
1173   if (gcm_ctx->ctr) {
1174     if (!CRYPTO_gcm128_decrypt_ctr32(&gcm, in + bulk, out + bulk,
1175                                      in_len - bulk - gcm_ctx->tag_len,
1176                                      gcm_ctx->ctr)) {
1177       return 0;
1178     }
1179   } else {
1180     if (!CRYPTO_gcm128_decrypt(&gcm, in + bulk, out + bulk,
1181                                in_len - bulk - gcm_ctx->tag_len)) {
1182       return 0;
1183     }
1184   }
1185 
1186   CRYPTO_gcm128_tag(&gcm, tag, gcm_ctx->tag_len);
1187   if (CRYPTO_memcmp(tag, in + plaintext_len, gcm_ctx->tag_len) != 0) {
1188     OPENSSL_PUT_ERROR(CIPHER, aead_aes_gcm_open, CIPHER_R_BAD_DECRYPT);
1189     return 0;
1190   }
1191 
1192   *out_len = plaintext_len;
1193   return 1;
1194 }
1195 
1196 static const EVP_AEAD aead_aes_128_gcm = {
1197     16,                       /* key len */
1198     12,                       /* nonce len */
1199     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1200     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
1201     aead_aes_gcm_init,
1202     NULL, /* init_with_direction */
1203     aead_aes_gcm_cleanup,
1204     aead_aes_gcm_seal,
1205     aead_aes_gcm_open,
1206     NULL,                     /* get_rc4_state */
1207 };
1208 
1209 static const EVP_AEAD aead_aes_256_gcm = {
1210     32,                       /* key len */
1211     12,                       /* nonce len */
1212     EVP_AEAD_AES_GCM_TAG_LEN, /* overhead */
1213     EVP_AEAD_AES_GCM_TAG_LEN, /* max tag length */
1214     aead_aes_gcm_init,
1215     NULL, /* init_with_direction */
1216     aead_aes_gcm_cleanup,
1217     aead_aes_gcm_seal,
1218     aead_aes_gcm_open,
1219     NULL,                     /* get_rc4_state */
1220 };
1221 
EVP_aead_aes_128_gcm(void)1222 const EVP_AEAD *EVP_aead_aes_128_gcm(void) { return &aead_aes_128_gcm; }
1223 
EVP_aead_aes_256_gcm(void)1224 const EVP_AEAD *EVP_aead_aes_256_gcm(void) { return &aead_aes_256_gcm; }
1225 
1226 
1227 /* AES Key Wrap is specified in
1228  * http://csrc.nist.gov/groups/ST/toolkit/documents/kms/key-wrap.pdf
1229  * or https://tools.ietf.org/html/rfc3394 */
1230 
1231 struct aead_aes_key_wrap_ctx {
1232   uint8_t key[32];
1233   unsigned key_bits;
1234 };
1235 
aead_aes_key_wrap_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1236 static int aead_aes_key_wrap_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1237                                   size_t key_len, size_t tag_len) {
1238   struct aead_aes_key_wrap_ctx *kw_ctx;
1239   const size_t key_bits = key_len * 8;
1240 
1241   if (key_bits != 128 && key_bits != 256) {
1242     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init, CIPHER_R_BAD_KEY_LENGTH);
1243     return 0; /* EVP_AEAD_CTX_init should catch this. */
1244   }
1245 
1246   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1247     tag_len = 8;
1248   }
1249 
1250   if (tag_len != 8) {
1251     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init,
1252                       CIPHER_R_UNSUPPORTED_TAG_SIZE);
1253     return 0;
1254   }
1255 
1256   kw_ctx = OPENSSL_malloc(sizeof(struct aead_aes_key_wrap_ctx));
1257   if (kw_ctx == NULL) {
1258     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_init, ERR_R_MALLOC_FAILURE);
1259     return 0;
1260   }
1261 
1262   memcpy(kw_ctx->key, key, key_len);
1263   kw_ctx->key_bits = key_bits;
1264 
1265   ctx->aead_state = kw_ctx;
1266   return 1;
1267 }
1268 
aead_aes_key_wrap_cleanup(EVP_AEAD_CTX * ctx)1269 static void aead_aes_key_wrap_cleanup(EVP_AEAD_CTX *ctx) {
1270   struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1271   OPENSSL_cleanse(kw_ctx, sizeof(struct aead_aes_key_wrap_ctx));
1272   OPENSSL_free(kw_ctx);
1273 }
1274 
1275 /* kDefaultAESKeyWrapNonce is the default nonce value given in 2.2.3.1. */
1276 static const uint8_t kDefaultAESKeyWrapNonce[8] = {0xa6, 0xa6, 0xa6, 0xa6,
1277                                                    0xa6, 0xa6, 0xa6, 0xa6};
1278 
1279 
aead_aes_key_wrap_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1280 static int aead_aes_key_wrap_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1281                                   size_t *out_len, size_t max_out_len,
1282                                   const uint8_t *nonce, size_t nonce_len,
1283                                   const uint8_t *in, size_t in_len,
1284                                   const uint8_t *ad, size_t ad_len) {
1285   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1286   union {
1287     double align;
1288     AES_KEY ks;
1289   } ks;
1290   /* Variables in this function match up with the variables in the second half
1291    * of section 2.2.1. */
1292   unsigned i, j, n;
1293   uint8_t A[AES_BLOCK_SIZE];
1294 
1295   if (ad_len != 0) {
1296     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1297                       CIPHER_R_UNSUPPORTED_AD_SIZE);
1298     return 0;
1299   }
1300 
1301   if (nonce_len == 0) {
1302     nonce = kDefaultAESKeyWrapNonce;
1303     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1304   }
1305 
1306   if (nonce_len != 8) {
1307     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1308                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1309     return 0;
1310   }
1311 
1312   if (in_len % 8 != 0) {
1313     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1314                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1315     return 0;
1316   }
1317 
1318   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1319    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1320    * conservatively cap it to 2^32-16 to stop 32-bit platforms complaining that
1321    * a comparison is always true. */
1322   if (in_len > 0xfffffff0) {
1323     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal, CIPHER_R_TOO_LARGE);
1324     return 0;
1325   }
1326 
1327   n = in_len / 8;
1328 
1329   if (n < 2) {
1330     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1331                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1332     return 0;
1333   }
1334 
1335   if (in_len + 8 < in_len) {
1336     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal, CIPHER_R_TOO_LARGE);
1337     return 0;
1338   }
1339 
1340   if (max_out_len < in_len + 8) {
1341     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1342                       CIPHER_R_BUFFER_TOO_SMALL);
1343     return 0;
1344   }
1345 
1346   if (AES_set_encrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
1347     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_seal,
1348                       CIPHER_R_AES_KEY_SETUP_FAILED);
1349     return 0;
1350   }
1351 
1352   memmove(out + 8, in, in_len);
1353   memcpy(A, nonce, 8);
1354 
1355   for (j = 0; j < 6; j++) {
1356     for (i = 1; i <= n; i++) {
1357       uint32_t t;
1358 
1359       memcpy(A + 8, out + 8 * i, 8);
1360       AES_encrypt(A, A, &ks.ks);
1361       t = n * j + i;
1362       A[7] ^= t & 0xff;
1363       A[6] ^= (t >> 8) & 0xff;
1364       A[5] ^= (t >> 16) & 0xff;
1365       A[4] ^= (t >> 24) & 0xff;
1366       memcpy(out + 8 * i, A + 8, 8);
1367     }
1368   }
1369 
1370   memcpy(out, A, 8);
1371   *out_len = in_len + 8;
1372   return 1;
1373 }
1374 
aead_aes_key_wrap_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1375 static int aead_aes_key_wrap_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1376                                   size_t *out_len, size_t max_out_len,
1377                                   const uint8_t *nonce, size_t nonce_len,
1378                                   const uint8_t *in, size_t in_len,
1379                                   const uint8_t *ad, size_t ad_len) {
1380   const struct aead_aes_key_wrap_ctx *kw_ctx = ctx->aead_state;
1381   union {
1382     double align;
1383     AES_KEY ks;
1384   } ks;
1385   /* Variables in this function match up with the variables in the second half
1386    * of section 2.2.1. */
1387   unsigned i, j, n;
1388   uint8_t A[AES_BLOCK_SIZE];
1389 
1390   if (ad_len != 0) {
1391     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
1392                       CIPHER_R_UNSUPPORTED_AD_SIZE);
1393     return 0;
1394   }
1395 
1396   if (nonce_len == 0) {
1397     nonce = kDefaultAESKeyWrapNonce;
1398     nonce_len = sizeof(kDefaultAESKeyWrapNonce);
1399   }
1400 
1401   if (nonce_len != 8) {
1402     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
1403                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1404     return 0;
1405   }
1406 
1407   if (in_len % 8 != 0) {
1408     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
1409                       CIPHER_R_UNSUPPORTED_INPUT_SIZE);
1410     return 0;
1411   }
1412 
1413   /* The code below only handles a 32-bit |t| thus 6*|n| must be less than
1414    * 2^32, where |n| is |in_len| / 8. So in_len < 4/3 * 2^32 and we
1415    * conservatively cap it to 2^32-8 to stop 32-bit platforms complaining that
1416    * a comparison is always true. */
1417   if (in_len > 0xfffffff8) {
1418     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open, CIPHER_R_TOO_LARGE);
1419     return 0;
1420   }
1421 
1422   if (in_len < 24) {
1423     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open, CIPHER_R_BAD_DECRYPT);
1424     return 0;
1425   }
1426 
1427   n = (in_len / 8) - 1;
1428 
1429   if (max_out_len < in_len - 8) {
1430     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
1431                       CIPHER_R_BUFFER_TOO_SMALL);
1432     return 0;
1433   }
1434 
1435   if (AES_set_decrypt_key(kw_ctx->key, kw_ctx->key_bits, &ks.ks) < 0) {
1436     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open,
1437                       CIPHER_R_AES_KEY_SETUP_FAILED);
1438     return 0;
1439   }
1440 
1441   memcpy(A, in, 8);
1442   memmove(out, in + 8, in_len - 8);
1443 
1444   for (j = 5; j < 6; j--) {
1445     for (i = n; i > 0; i--) {
1446       uint32_t t;
1447 
1448       t = n * j + i;
1449       A[7] ^= t & 0xff;
1450       A[6] ^= (t >> 8) & 0xff;
1451       A[5] ^= (t >> 16) & 0xff;
1452       A[4] ^= (t >> 24) & 0xff;
1453       memcpy(A + 8, out + 8 * (i - 1), 8);
1454       AES_decrypt(A, A, &ks.ks);
1455       memcpy(out + 8 * (i - 1), A + 8, 8);
1456     }
1457   }
1458 
1459   if (CRYPTO_memcmp(A, nonce, 8) != 0) {
1460     OPENSSL_PUT_ERROR(CIPHER, aead_aes_key_wrap_open, CIPHER_R_BAD_DECRYPT);
1461     return 0;
1462   }
1463 
1464   *out_len = in_len - 8;
1465   return 1;
1466 }
1467 
1468 static const EVP_AEAD aead_aes_128_key_wrap = {
1469     16, /* key len */
1470     8,  /* nonce len */
1471     8,  /* overhead */
1472     8,  /* max tag length */
1473     aead_aes_key_wrap_init,
1474     NULL, /* init_with_direction */
1475     aead_aes_key_wrap_cleanup,
1476     aead_aes_key_wrap_seal,
1477     aead_aes_key_wrap_open,
1478     NULL,  /* get_rc4_state */
1479 };
1480 
1481 static const EVP_AEAD aead_aes_256_key_wrap = {
1482     32, /* key len */
1483     8,  /* nonce len */
1484     8,  /* overhead */
1485     8,  /* max tag length */
1486     aead_aes_key_wrap_init,
1487     NULL, /* init_with_direction */
1488     aead_aes_key_wrap_cleanup,
1489     aead_aes_key_wrap_seal,
1490     aead_aes_key_wrap_open,
1491     NULL,  /* get_rc4_state */
1492 };
1493 
EVP_aead_aes_128_key_wrap(void)1494 const EVP_AEAD *EVP_aead_aes_128_key_wrap(void) { return &aead_aes_128_key_wrap; }
1495 
EVP_aead_aes_256_key_wrap(void)1496 const EVP_AEAD *EVP_aead_aes_256_key_wrap(void) { return &aead_aes_256_key_wrap; }
1497 
1498 
1499 #define EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN SHA256_DIGEST_LENGTH
1500 #define EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN 12
1501 
1502 struct aead_aes_ctr_hmac_sha256_ctx {
1503   union {
1504     double align;
1505     AES_KEY ks;
1506   } ks;
1507   ctr128_f ctr;
1508   block128_f block;
1509   SHA256_CTX inner_init_state;
1510   SHA256_CTX outer_init_state;
1511   uint8_t tag_len;
1512 };
1513 
hmac_init(SHA256_CTX * out_inner,SHA256_CTX * out_outer,const uint8_t hmac_key[32])1514 static void hmac_init(SHA256_CTX *out_inner, SHA256_CTX *out_outer,
1515                       const uint8_t hmac_key[32]) {
1516   static const size_t hmac_key_len = 32;
1517   uint8_t block[SHA256_CBLOCK];
1518   memcpy(block, hmac_key, hmac_key_len);
1519   memset(block + hmac_key_len, 0x36, sizeof(block) - hmac_key_len);
1520 
1521   unsigned i;
1522   for (i = 0; i < hmac_key_len; i++) {
1523     block[i] ^= 0x36;
1524   }
1525 
1526   SHA256_Init(out_inner);
1527   SHA256_Update(out_inner, block, sizeof(block));
1528 
1529   memset(block + hmac_key_len, 0x5c, sizeof(block) - hmac_key_len);
1530   for (i = 0; i < hmac_key_len; i++) {
1531     block[i] ^= (0x36 ^ 0x5c);
1532   }
1533 
1534   SHA256_Init(out_outer);
1535   SHA256_Update(out_outer, block, sizeof(block));
1536 }
1537 
aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX * ctx,const uint8_t * key,size_t key_len,size_t tag_len)1538 static int aead_aes_ctr_hmac_sha256_init(EVP_AEAD_CTX *ctx, const uint8_t *key,
1539                                          size_t key_len, size_t tag_len) {
1540   struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx;
1541   static const size_t hmac_key_len = 32;
1542 
1543   if (key_len < hmac_key_len) {
1544     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_init,
1545                       CIPHER_R_BAD_KEY_LENGTH);
1546     return 0; /* EVP_AEAD_CTX_init should catch this. */
1547   }
1548 
1549   const size_t aes_key_len = key_len - hmac_key_len;
1550   if (aes_key_len != 16 && aes_key_len != 32) {
1551     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_init,
1552                       CIPHER_R_BAD_KEY_LENGTH);
1553     return 0; /* EVP_AEAD_CTX_init should catch this. */
1554   }
1555 
1556   if (tag_len == EVP_AEAD_DEFAULT_TAG_LENGTH) {
1557     tag_len = EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN;
1558   }
1559 
1560   if (tag_len > EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN) {
1561     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_init,
1562                       CIPHER_R_TAG_TOO_LARGE);
1563     return 0;
1564   }
1565 
1566   aes_ctx = OPENSSL_malloc(sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1567   if (aes_ctx == NULL) {
1568     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_init,
1569                       ERR_R_MALLOC_FAILURE);
1570     return 0;
1571   }
1572 
1573   aes_ctx->ctr =
1574       aes_ctr_set_key(&aes_ctx->ks.ks, NULL, &aes_ctx->block, key, aes_key_len);
1575   aes_ctx->tag_len = tag_len;
1576   hmac_init(&aes_ctx->inner_init_state, &aes_ctx->outer_init_state,
1577             key + aes_key_len);
1578 
1579   ctx->aead_state = aes_ctx;
1580 
1581   return 1;
1582 }
1583 
aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX * ctx)1584 static void aead_aes_ctr_hmac_sha256_cleanup(EVP_AEAD_CTX *ctx) {
1585   struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1586   OPENSSL_cleanse(aes_ctx, sizeof(struct aead_aes_ctr_hmac_sha256_ctx));
1587   OPENSSL_free(aes_ctx);
1588 }
1589 
hmac_update_uint64(SHA256_CTX * sha256,uint64_t value)1590 static void hmac_update_uint64(SHA256_CTX *sha256, uint64_t value) {
1591   unsigned i;
1592   uint8_t bytes[8];
1593 
1594   for (i = 0; i < sizeof(bytes); i++) {
1595     bytes[i] = value & 0xff;
1596     value >>= 8;
1597   }
1598   SHA256_Update(sha256, bytes, sizeof(bytes));
1599 }
1600 
hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],const SHA256_CTX * inner_init_state,const SHA256_CTX * outer_init_state,const uint8_t * ad,size_t ad_len,const uint8_t * nonce,const uint8_t * ciphertext,size_t ciphertext_len)1601 static void hmac_calculate(uint8_t out[SHA256_DIGEST_LENGTH],
1602                            const SHA256_CTX *inner_init_state,
1603                            const SHA256_CTX *outer_init_state,
1604                            const uint8_t *ad, size_t ad_len,
1605                            const uint8_t *nonce, const uint8_t *ciphertext,
1606                            size_t ciphertext_len) {
1607   SHA256_CTX sha256;
1608   memcpy(&sha256, inner_init_state, sizeof(sha256));
1609   hmac_update_uint64(&sha256, ad_len);
1610   hmac_update_uint64(&sha256, ciphertext_len);
1611   SHA256_Update(&sha256, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1612   SHA256_Update(&sha256, ad, ad_len);
1613 
1614   /* Pad with zeros to the end of the SHA-256 block. */
1615   const unsigned num_padding =
1616       (SHA256_CBLOCK - ((sizeof(uint64_t)*2 +
1617                          EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN + ad_len) %
1618                         SHA256_CBLOCK)) %
1619       SHA256_CBLOCK;
1620   uint8_t padding[SHA256_CBLOCK];
1621   memset(padding, 0, num_padding);
1622   SHA256_Update(&sha256, padding, num_padding);
1623 
1624   SHA256_Update(&sha256, ciphertext, ciphertext_len);
1625 
1626   uint8_t inner_digest[SHA256_DIGEST_LENGTH];
1627   SHA256_Final(inner_digest, &sha256);
1628 
1629   memcpy(&sha256, outer_init_state, sizeof(sha256));
1630   SHA256_Update(&sha256, inner_digest, sizeof(inner_digest));
1631   SHA256_Final(out, &sha256);
1632 }
1633 
aead_aes_ctr_hmac_sha256_crypt(const struct aead_aes_ctr_hmac_sha256_ctx * aes_ctx,uint8_t * out,const uint8_t * in,size_t len,const uint8_t * nonce)1634 static void aead_aes_ctr_hmac_sha256_crypt(
1635     const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx, uint8_t *out,
1636     const uint8_t *in, size_t len, const uint8_t *nonce) {
1637   /* Since the AEAD operation is one-shot, keeping a buffer of unused keystream
1638    * bytes is pointless. However, |CRYPTO_ctr128_encrypt| requires it. */
1639   uint8_t partial_block_buffer[AES_BLOCK_SIZE];
1640   unsigned partial_block_offset = 0;
1641   memset(partial_block_buffer, 0, sizeof(partial_block_buffer));
1642 
1643   uint8_t counter[AES_BLOCK_SIZE];
1644   memcpy(counter, nonce, EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN);
1645   memset(counter + EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN, 0, 4);
1646 
1647   if (aes_ctx->ctr) {
1648     CRYPTO_ctr128_encrypt_ctr32(in, out, len, &aes_ctx->ks.ks, counter,
1649                                 partial_block_buffer, &partial_block_offset,
1650                                 aes_ctx->ctr);
1651   } else {
1652     CRYPTO_ctr128_encrypt(in, out, len, &aes_ctx->ks.ks, counter,
1653                           partial_block_buffer, &partial_block_offset,
1654                           aes_ctx->block);
1655   }
1656 }
1657 
aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1658 static int aead_aes_ctr_hmac_sha256_seal(const EVP_AEAD_CTX *ctx, uint8_t *out,
1659                                          size_t *out_len, size_t max_out_len,
1660                                          const uint8_t *nonce, size_t nonce_len,
1661                                          const uint8_t *in, size_t in_len,
1662                                          const uint8_t *ad, size_t ad_len) {
1663   const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1664   const uint64_t in_len_64 = in_len;
1665 
1666   if (in_len + aes_ctx->tag_len < in_len ||
1667       /* This input is so large it would overflow the 32-bit block counter. */
1668       in_len_64 >= (OPENSSL_U64(1) << 32) * AES_BLOCK_SIZE) {
1669     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_seal,
1670                       CIPHER_R_TOO_LARGE);
1671     return 0;
1672   }
1673 
1674   if (max_out_len < in_len + aes_ctx->tag_len) {
1675     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_seal,
1676                       CIPHER_R_BUFFER_TOO_SMALL);
1677     return 0;
1678   }
1679 
1680   if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
1681     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_seal,
1682                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1683     return 0;
1684   }
1685 
1686   aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, in_len, nonce);
1687 
1688   uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1689   hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1690                  &aes_ctx->outer_init_state, ad, ad_len, nonce, out, in_len);
1691   memcpy(out + in_len, hmac_result, aes_ctx->tag_len);
1692   *out_len = in_len + aes_ctx->tag_len;
1693 
1694   return 1;
1695 }
1696 
aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX * ctx,uint8_t * out,size_t * out_len,size_t max_out_len,const uint8_t * nonce,size_t nonce_len,const uint8_t * in,size_t in_len,const uint8_t * ad,size_t ad_len)1697 static int aead_aes_ctr_hmac_sha256_open(const EVP_AEAD_CTX *ctx, uint8_t *out,
1698                                          size_t *out_len, size_t max_out_len,
1699                                          const uint8_t *nonce, size_t nonce_len,
1700                                          const uint8_t *in, size_t in_len,
1701                                          const uint8_t *ad, size_t ad_len) {
1702   const struct aead_aes_ctr_hmac_sha256_ctx *aes_ctx = ctx->aead_state;
1703   size_t plaintext_len;
1704 
1705   if (in_len < aes_ctx->tag_len) {
1706     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_open,
1707                       CIPHER_R_BAD_DECRYPT);
1708     return 0;
1709   }
1710 
1711   plaintext_len = in_len - aes_ctx->tag_len;
1712 
1713   if (max_out_len < plaintext_len) {
1714     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_open,
1715                       CIPHER_R_BUFFER_TOO_SMALL);
1716     return 0;
1717   }
1718 
1719   if (nonce_len != EVP_AEAD_AES_CTR_HMAC_SHA256_NONCE_LEN) {
1720     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_open,
1721                       CIPHER_R_UNSUPPORTED_NONCE_SIZE);
1722     return 0;
1723   }
1724 
1725   uint8_t hmac_result[SHA256_DIGEST_LENGTH];
1726   hmac_calculate(hmac_result, &aes_ctx->inner_init_state,
1727                  &aes_ctx->outer_init_state, ad, ad_len, nonce, in,
1728                  plaintext_len);
1729   if (CRYPTO_memcmp(hmac_result, in + plaintext_len, aes_ctx->tag_len) != 0) {
1730     OPENSSL_PUT_ERROR(CIPHER, aead_aes_ctr_hmac_sha256_open,
1731                       CIPHER_R_BAD_DECRYPT);
1732     return 0;
1733   }
1734 
1735   aead_aes_ctr_hmac_sha256_crypt(aes_ctx, out, in, plaintext_len, nonce);
1736 
1737   *out_len = plaintext_len;
1738   return 1;
1739 }
1740 
1741 static const EVP_AEAD aead_aes_128_ctr_hmac_sha256 = {
1742     16 /* AES key */ + 32 /* HMAC key */,
1743     12,                                       /* nonce length */
1744     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* overhead */
1745     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* max tag length */
1746 
1747     aead_aes_ctr_hmac_sha256_init,
1748     NULL /* init_with_direction */,
1749     aead_aes_ctr_hmac_sha256_cleanup,
1750     aead_aes_ctr_hmac_sha256_seal,
1751     aead_aes_ctr_hmac_sha256_open,
1752     NULL /* get_rc4_state */,
1753 };
1754 
1755 static const EVP_AEAD aead_aes_256_ctr_hmac_sha256 = {
1756     32 /* AES key */ + 32 /* HMAC key */,
1757     12,                                       /* nonce length */
1758     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* overhead */
1759     EVP_AEAD_AES_CTR_HMAC_SHA256_TAG_LEN,     /* max tag length */
1760 
1761     aead_aes_ctr_hmac_sha256_init,
1762     NULL /* init_with_direction */,
1763     aead_aes_ctr_hmac_sha256_cleanup,
1764     aead_aes_ctr_hmac_sha256_seal,
1765     aead_aes_ctr_hmac_sha256_open,
1766     NULL /* get_rc4_state */,
1767 };
1768 
EVP_aead_aes_128_ctr_hmac_sha256(void)1769 const EVP_AEAD *EVP_aead_aes_128_ctr_hmac_sha256(void) {
1770   return &aead_aes_128_ctr_hmac_sha256;
1771 }
1772 
EVP_aead_aes_256_ctr_hmac_sha256(void)1773 const EVP_AEAD *EVP_aead_aes_256_ctr_hmac_sha256(void) {
1774   return &aead_aes_256_ctr_hmac_sha256;
1775 }
1776 
EVP_has_aes_hardware(void)1777 int EVP_has_aes_hardware(void) {
1778 #if defined(OPENSSL_X86) || defined(OPENSSL_X86_64)
1779   return aesni_capable() && crypto_gcm_clmul_enabled();
1780 #elif defined(OPENSSL_ARM) || defined(OPENSSL_AARCH64)
1781   return hwaes_capable() && (OPENSSL_armcap_P & ARMV8_PMULL);
1782 #else
1783   return 0;
1784 #endif
1785 }
1786