1 /*
2 * Copyright 2001-2020 The OpenSSL Project Authors. All Rights Reserved.
3 *
4 * Licensed under the OpenSSL license (the "License"). You may not use
5 * this file except in compliance with the License. You can obtain a copy
6 * in the file LICENSE in the source distribution or at
7 * https://www.openssl.org/source/license.html
8 */
9
10 #include <openssl/opensslconf.h>
11 #include <openssl/crypto.h>
12 #include <openssl/evp.h>
13 #include <openssl/err.h>
14 #include <string.h>
15 #include <assert.h>
16 #include <openssl/aes.h>
17 #include "crypto/evp.h"
18 #include "modes_local.h"
19 #include <openssl/rand.h>
20 #include "evp_local.h"
21
22 typedef struct {
23 union {
24 double align;
25 AES_KEY ks;
26 } ks;
27 block128_f block;
28 union {
29 cbc128_f cbc;
30 ctr128_f ctr;
31 } stream;
32 } EVP_AES_KEY;
33
34 typedef struct {
35 union {
36 double align;
37 AES_KEY ks;
38 } ks; /* AES key schedule to use */
39 int key_set; /* Set if key initialised */
40 int iv_set; /* Set if an iv is set */
41 GCM128_CONTEXT gcm;
42 unsigned char *iv; /* Temporary IV store */
43 int ivlen; /* IV length */
44 int taglen;
45 int iv_gen; /* It is OK to generate IVs */
46 int tls_aad_len; /* TLS AAD length */
47 ctr128_f ctr;
48 } EVP_AES_GCM_CTX;
49
50 typedef struct {
51 union {
52 double align;
53 AES_KEY ks;
54 } ks1, ks2; /* AES key schedules to use */
55 XTS128_CONTEXT xts;
56 void (*stream) (const unsigned char *in,
57 unsigned char *out, size_t length,
58 const AES_KEY *key1, const AES_KEY *key2,
59 const unsigned char iv[16]);
60 } EVP_AES_XTS_CTX;
61
62 typedef struct {
63 union {
64 double align;
65 AES_KEY ks;
66 } ks; /* AES key schedule to use */
67 int key_set; /* Set if key initialised */
68 int iv_set; /* Set if an iv is set */
69 int tag_set; /* Set if tag is valid */
70 int len_set; /* Set if message length set */
71 int L, M; /* L and M parameters from RFC3610 */
72 int tls_aad_len; /* TLS AAD length */
73 CCM128_CONTEXT ccm;
74 ccm128_f str;
75 } EVP_AES_CCM_CTX;
76
77 #ifndef OPENSSL_NO_OCB
78 typedef struct {
79 union {
80 double align;
81 AES_KEY ks;
82 } ksenc; /* AES key schedule to use for encryption */
83 union {
84 double align;
85 AES_KEY ks;
86 } ksdec; /* AES key schedule to use for decryption */
87 int key_set; /* Set if key initialised */
88 int iv_set; /* Set if an iv is set */
89 OCB128_CONTEXT ocb;
90 unsigned char *iv; /* Temporary IV store */
91 unsigned char tag[16];
92 unsigned char data_buf[16]; /* Store partial data blocks */
93 unsigned char aad_buf[16]; /* Store partial AAD blocks */
94 int data_buf_len;
95 int aad_buf_len;
96 int ivlen; /* IV length */
97 int taglen;
98 } EVP_AES_OCB_CTX;
99 #endif
100
101 #define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
102
103 #ifdef VPAES_ASM
104 int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
105 AES_KEY *key);
106 int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
107 AES_KEY *key);
108
109 void vpaes_encrypt(const unsigned char *in, unsigned char *out,
110 const AES_KEY *key);
111 void vpaes_decrypt(const unsigned char *in, unsigned char *out,
112 const AES_KEY *key);
113
114 void vpaes_cbc_encrypt(const unsigned char *in,
115 unsigned char *out,
116 size_t length,
117 const AES_KEY *key, unsigned char *ivec, int enc);
118 #endif
119 #ifdef BSAES_ASM
120 void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
121 size_t length, const AES_KEY *key,
122 unsigned char ivec[16], int enc);
123 void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
124 size_t len, const AES_KEY *key,
125 const unsigned char ivec[16]);
126 void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
127 size_t len, const AES_KEY *key1,
128 const AES_KEY *key2, const unsigned char iv[16]);
129 void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
130 size_t len, const AES_KEY *key1,
131 const AES_KEY *key2, const unsigned char iv[16]);
132 #endif
133 #ifdef AES_CTR_ASM
134 void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
135 size_t blocks, const AES_KEY *key,
136 const unsigned char ivec[AES_BLOCK_SIZE]);
137 #endif
138 #ifdef AES_XTS_ASM
139 void AES_xts_encrypt(const unsigned char *inp, unsigned char *out, size_t len,
140 const AES_KEY *key1, const AES_KEY *key2,
141 const unsigned char iv[16]);
142 void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len,
143 const AES_KEY *key1, const AES_KEY *key2,
144 const unsigned char iv[16]);
145 #endif
146
147 /* increment counter (64-bit int) by 1 */
ctr64_inc(unsigned char * counter)148 static void ctr64_inc(unsigned char *counter)
149 {
150 int n = 8;
151 unsigned char c;
152
153 do {
154 --n;
155 c = counter[n];
156 ++c;
157 counter[n] = c;
158 if (c)
159 return;
160 } while (n);
161 }
162
163 #if defined(OPENSSL_CPUID_OBJ) && (defined(__powerpc__) || defined(__ppc__) || defined(_ARCH_PPC))
164 # include "ppc_arch.h"
165 # ifdef VPAES_ASM
166 # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC)
167 # endif
168 # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207)
169 # define HWAES_set_encrypt_key aes_p8_set_encrypt_key
170 # define HWAES_set_decrypt_key aes_p8_set_decrypt_key
171 # define HWAES_encrypt aes_p8_encrypt
172 # define HWAES_decrypt aes_p8_decrypt
173 # define HWAES_cbc_encrypt aes_p8_cbc_encrypt
174 # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks
175 # define HWAES_xts_encrypt aes_p8_xts_encrypt
176 # define HWAES_xts_decrypt aes_p8_xts_decrypt
177 #endif
178
179 #if defined(OPENSSL_CPUID_OBJ) && ( \
180 ((defined(__i386) || defined(__i386__) || \
181 defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
182 defined(__x86_64) || defined(__x86_64__) || \
183 defined(_M_AMD64) || defined(_M_X64) )
184
185 extern unsigned int OPENSSL_ia32cap_P[];
186
187 # ifdef VPAES_ASM
188 # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
189 # endif
190 # ifdef BSAES_ASM
191 # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
192 # endif
193 /*
194 * AES-NI section
195 */
196 # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
197
198 int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
199 AES_KEY *key);
200 int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
201 AES_KEY *key);
202
203 void aesni_encrypt(const unsigned char *in, unsigned char *out,
204 const AES_KEY *key);
205 void aesni_decrypt(const unsigned char *in, unsigned char *out,
206 const AES_KEY *key);
207
208 void aesni_ecb_encrypt(const unsigned char *in,
209 unsigned char *out,
210 size_t length, const AES_KEY *key, int enc);
211 void aesni_cbc_encrypt(const unsigned char *in,
212 unsigned char *out,
213 size_t length,
214 const AES_KEY *key, unsigned char *ivec, int enc);
215
216 void aesni_ctr32_encrypt_blocks(const unsigned char *in,
217 unsigned char *out,
218 size_t blocks,
219 const void *key, const unsigned char *ivec);
220
221 void aesni_xts_encrypt(const unsigned char *in,
222 unsigned char *out,
223 size_t length,
224 const AES_KEY *key1, const AES_KEY *key2,
225 const unsigned char iv[16]);
226
227 void aesni_xts_decrypt(const unsigned char *in,
228 unsigned char *out,
229 size_t length,
230 const AES_KEY *key1, const AES_KEY *key2,
231 const unsigned char iv[16]);
232
233 void aesni_ccm64_encrypt_blocks(const unsigned char *in,
234 unsigned char *out,
235 size_t blocks,
236 const void *key,
237 const unsigned char ivec[16],
238 unsigned char cmac[16]);
239
240 void aesni_ccm64_decrypt_blocks(const unsigned char *in,
241 unsigned char *out,
242 size_t blocks,
243 const void *key,
244 const unsigned char ivec[16],
245 unsigned char cmac[16]);
246
247 # if defined(__x86_64) || defined(__x86_64__) || defined(_M_AMD64) || defined(_M_X64)
248 size_t aesni_gcm_encrypt(const unsigned char *in,
249 unsigned char *out,
250 size_t len,
251 const void *key, unsigned char ivec[16], u64 *Xi);
252 # define AES_gcm_encrypt aesni_gcm_encrypt
253 size_t aesni_gcm_decrypt(const unsigned char *in,
254 unsigned char *out,
255 size_t len,
256 const void *key, unsigned char ivec[16], u64 *Xi);
257 # define AES_gcm_decrypt aesni_gcm_decrypt
258 void gcm_ghash_avx(u64 Xi[2], const u128 Htable[16], const u8 *in,
259 size_t len);
260 # define AES_GCM_ASM(gctx) (gctx->ctr==aesni_ctr32_encrypt_blocks && \
261 gctx->gcm.ghash==gcm_ghash_avx)
262 # define AES_GCM_ASM2(gctx) (gctx->gcm.block==(block128_f)aesni_encrypt && \
263 gctx->gcm.ghash==gcm_ghash_avx)
264 # undef AES_GCM_ASM2 /* minor size optimization */
265 # endif
266
aesni_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)267 static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
268 const unsigned char *iv, int enc)
269 {
270 int ret, mode;
271 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
272
273 mode = EVP_CIPHER_CTX_mode(ctx);
274 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
275 && !enc) {
276 ret = aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
277 &dat->ks.ks);
278 dat->block = (block128_f) aesni_decrypt;
279 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
280 (cbc128_f) aesni_cbc_encrypt : NULL;
281 } else {
282 ret = aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
283 &dat->ks.ks);
284 dat->block = (block128_f) aesni_encrypt;
285 if (mode == EVP_CIPH_CBC_MODE)
286 dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
287 else if (mode == EVP_CIPH_CTR_MODE)
288 dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
289 else
290 dat->stream.cbc = NULL;
291 }
292
293 if (ret < 0) {
294 EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
295 return 0;
296 }
297
298 return 1;
299 }
300
aesni_cbc_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)301 static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
302 const unsigned char *in, size_t len)
303 {
304 aesni_cbc_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
305 EVP_CIPHER_CTX_iv_noconst(ctx),
306 EVP_CIPHER_CTX_encrypting(ctx));
307
308 return 1;
309 }
310
aesni_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)311 static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
312 const unsigned char *in, size_t len)
313 {
314 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
315
316 if (len < bl)
317 return 1;
318
319 aesni_ecb_encrypt(in, out, len, &EVP_C_DATA(EVP_AES_KEY,ctx)->ks.ks,
320 EVP_CIPHER_CTX_encrypting(ctx));
321
322 return 1;
323 }
324
325 # define aesni_ofb_cipher aes_ofb_cipher
326 static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
327 const unsigned char *in, size_t len);
328
329 # define aesni_cfb_cipher aes_cfb_cipher
330 static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
331 const unsigned char *in, size_t len);
332
333 # define aesni_cfb8_cipher aes_cfb8_cipher
334 static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
335 const unsigned char *in, size_t len);
336
337 # define aesni_cfb1_cipher aes_cfb1_cipher
338 static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
339 const unsigned char *in, size_t len);
340
341 # define aesni_ctr_cipher aes_ctr_cipher
342 static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
343 const unsigned char *in, size_t len);
344
aesni_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)345 static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
346 const unsigned char *iv, int enc)
347 {
348 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
349 if (!iv && !key)
350 return 1;
351 if (key) {
352 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
353 &gctx->ks.ks);
354 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
355 gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
356 /*
357 * If we have an iv can set it directly, otherwise use saved IV.
358 */
359 if (iv == NULL && gctx->iv_set)
360 iv = gctx->iv;
361 if (iv) {
362 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
363 gctx->iv_set = 1;
364 }
365 gctx->key_set = 1;
366 } else {
367 /* If key set use IV, otherwise copy */
368 if (gctx->key_set)
369 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
370 else
371 memcpy(gctx->iv, iv, gctx->ivlen);
372 gctx->iv_set = 1;
373 gctx->iv_gen = 0;
374 }
375 return 1;
376 }
377
378 # define aesni_gcm_cipher aes_gcm_cipher
379 static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
380 const unsigned char *in, size_t len);
381
aesni_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)382 static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
383 const unsigned char *iv, int enc)
384 {
385 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
386
387 if (!iv && !key)
388 return 1;
389
390 if (key) {
391 /* The key is two half length keys in reality */
392 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
393
394 /*
395 * Verify that the two keys are different.
396 *
397 * This addresses Rogaway's vulnerability.
398 * See comment in aes_xts_init_key() below.
399 */
400 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
401 EVPerr(EVP_F_AESNI_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
402 return 0;
403 }
404
405 /* key_len is two AES keys */
406 if (enc) {
407 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
408 &xctx->ks1.ks);
409 xctx->xts.block1 = (block128_f) aesni_encrypt;
410 xctx->stream = aesni_xts_encrypt;
411 } else {
412 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
413 &xctx->ks1.ks);
414 xctx->xts.block1 = (block128_f) aesni_decrypt;
415 xctx->stream = aesni_xts_decrypt;
416 }
417
418 aesni_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
419 EVP_CIPHER_CTX_key_length(ctx) * 4,
420 &xctx->ks2.ks);
421 xctx->xts.block2 = (block128_f) aesni_encrypt;
422
423 xctx->xts.key1 = &xctx->ks1;
424 }
425
426 if (iv) {
427 xctx->xts.key2 = &xctx->ks2;
428 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
429 }
430
431 return 1;
432 }
433
434 # define aesni_xts_cipher aes_xts_cipher
435 static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
436 const unsigned char *in, size_t len);
437
aesni_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)438 static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
439 const unsigned char *iv, int enc)
440 {
441 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
442 if (!iv && !key)
443 return 1;
444 if (key) {
445 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
446 &cctx->ks.ks);
447 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
448 &cctx->ks, (block128_f) aesni_encrypt);
449 cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
450 (ccm128_f) aesni_ccm64_decrypt_blocks;
451 cctx->key_set = 1;
452 }
453 if (iv) {
454 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
455 cctx->iv_set = 1;
456 }
457 return 1;
458 }
459
460 # define aesni_ccm_cipher aes_ccm_cipher
461 static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
462 const unsigned char *in, size_t len);
463
464 # ifndef OPENSSL_NO_OCB
465 void aesni_ocb_encrypt(const unsigned char *in, unsigned char *out,
466 size_t blocks, const void *key,
467 size_t start_block_num,
468 unsigned char offset_i[16],
469 const unsigned char L_[][16],
470 unsigned char checksum[16]);
471 void aesni_ocb_decrypt(const unsigned char *in, unsigned char *out,
472 size_t blocks, const void *key,
473 size_t start_block_num,
474 unsigned char offset_i[16],
475 const unsigned char L_[][16],
476 unsigned char checksum[16]);
477
aesni_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)478 static int aesni_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
479 const unsigned char *iv, int enc)
480 {
481 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
482 if (!iv && !key)
483 return 1;
484 if (key) {
485 do {
486 /*
487 * We set both the encrypt and decrypt key here because decrypt
488 * needs both. We could possibly optimise to remove setting the
489 * decrypt for an encryption operation.
490 */
491 aesni_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
492 &octx->ksenc.ks);
493 aesni_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
494 &octx->ksdec.ks);
495 if (!CRYPTO_ocb128_init(&octx->ocb,
496 &octx->ksenc.ks, &octx->ksdec.ks,
497 (block128_f) aesni_encrypt,
498 (block128_f) aesni_decrypt,
499 enc ? aesni_ocb_encrypt
500 : aesni_ocb_decrypt))
501 return 0;
502 }
503 while (0);
504
505 /*
506 * If we have an iv we can set it directly, otherwise use saved IV.
507 */
508 if (iv == NULL && octx->iv_set)
509 iv = octx->iv;
510 if (iv) {
511 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
512 != 1)
513 return 0;
514 octx->iv_set = 1;
515 }
516 octx->key_set = 1;
517 } else {
518 /* If key set use IV, otherwise copy */
519 if (octx->key_set)
520 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
521 else
522 memcpy(octx->iv, iv, octx->ivlen);
523 octx->iv_set = 1;
524 }
525 return 1;
526 }
527
528 # define aesni_ocb_cipher aes_ocb_cipher
529 static int aesni_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
530 const unsigned char *in, size_t len);
531 # endif /* OPENSSL_NO_OCB */
532
533 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
534 static const EVP_CIPHER aesni_##keylen##_##mode = { \
535 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
536 flags|EVP_CIPH_##MODE##_MODE, \
537 aesni_init_key, \
538 aesni_##mode##_cipher, \
539 NULL, \
540 sizeof(EVP_AES_KEY), \
541 NULL,NULL,NULL,NULL }; \
542 static const EVP_CIPHER aes_##keylen##_##mode = { \
543 nid##_##keylen##_##nmode,blocksize, \
544 keylen/8,ivlen, \
545 flags|EVP_CIPH_##MODE##_MODE, \
546 aes_init_key, \
547 aes_##mode##_cipher, \
548 NULL, \
549 sizeof(EVP_AES_KEY), \
550 NULL,NULL,NULL,NULL }; \
551 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
552 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
553
554 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
555 static const EVP_CIPHER aesni_##keylen##_##mode = { \
556 nid##_##keylen##_##mode,blocksize, \
557 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
558 flags|EVP_CIPH_##MODE##_MODE, \
559 aesni_##mode##_init_key, \
560 aesni_##mode##_cipher, \
561 aes_##mode##_cleanup, \
562 sizeof(EVP_AES_##MODE##_CTX), \
563 NULL,NULL,aes_##mode##_ctrl,NULL }; \
564 static const EVP_CIPHER aes_##keylen##_##mode = { \
565 nid##_##keylen##_##mode,blocksize, \
566 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
567 flags|EVP_CIPH_##MODE##_MODE, \
568 aes_##mode##_init_key, \
569 aes_##mode##_cipher, \
570 aes_##mode##_cleanup, \
571 sizeof(EVP_AES_##MODE##_CTX), \
572 NULL,NULL,aes_##mode##_ctrl,NULL }; \
573 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
574 { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
575
576 #elif defined(AES_ASM) && (defined(__sparc) || defined(__sparc__))
577
578 # include "sparc_arch.h"
579
580 extern unsigned int OPENSSL_sparcv9cap_P[];
581
582 /*
583 * Initial Fujitsu SPARC64 X support
584 */
585 # define HWAES_CAPABLE (OPENSSL_sparcv9cap_P[0] & SPARCV9_FJAESX)
586 # define HWAES_set_encrypt_key aes_fx_set_encrypt_key
587 # define HWAES_set_decrypt_key aes_fx_set_decrypt_key
588 # define HWAES_encrypt aes_fx_encrypt
589 # define HWAES_decrypt aes_fx_decrypt
590 # define HWAES_cbc_encrypt aes_fx_cbc_encrypt
591 # define HWAES_ctr32_encrypt_blocks aes_fx_ctr32_encrypt_blocks
592
593 # define SPARC_AES_CAPABLE (OPENSSL_sparcv9cap_P[1] & CFR_AES)
594
595 void aes_t4_set_encrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
596 void aes_t4_set_decrypt_key(const unsigned char *key, int bits, AES_KEY *ks);
597 void aes_t4_encrypt(const unsigned char *in, unsigned char *out,
598 const AES_KEY *key);
599 void aes_t4_decrypt(const unsigned char *in, unsigned char *out,
600 const AES_KEY *key);
601 /*
602 * Key-length specific subroutines were chosen for following reason.
603 * Each SPARC T4 core can execute up to 8 threads which share core's
604 * resources. Loading as much key material to registers allows to
605 * minimize references to shared memory interface, as well as amount
606 * of instructions in inner loops [much needed on T4]. But then having
607 * non-key-length specific routines would require conditional branches
608 * either in inner loops or on subroutines' entries. Former is hardly
609 * acceptable, while latter means code size increase to size occupied
610 * by multiple key-length specific subroutines, so why fight?
611 */
612 void aes128_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
613 size_t len, const AES_KEY *key,
614 unsigned char *ivec);
615 void aes128_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
616 size_t len, const AES_KEY *key,
617 unsigned char *ivec);
618 void aes192_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
619 size_t len, const AES_KEY *key,
620 unsigned char *ivec);
621 void aes192_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
622 size_t len, const AES_KEY *key,
623 unsigned char *ivec);
624 void aes256_t4_cbc_encrypt(const unsigned char *in, unsigned char *out,
625 size_t len, const AES_KEY *key,
626 unsigned char *ivec);
627 void aes256_t4_cbc_decrypt(const unsigned char *in, unsigned char *out,
628 size_t len, const AES_KEY *key,
629 unsigned char *ivec);
630 void aes128_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
631 size_t blocks, const AES_KEY *key,
632 unsigned char *ivec);
633 void aes192_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
634 size_t blocks, const AES_KEY *key,
635 unsigned char *ivec);
636 void aes256_t4_ctr32_encrypt(const unsigned char *in, unsigned char *out,
637 size_t blocks, const AES_KEY *key,
638 unsigned char *ivec);
639 void aes128_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
640 size_t blocks, const AES_KEY *key1,
641 const AES_KEY *key2, const unsigned char *ivec);
642 void aes128_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
643 size_t blocks, const AES_KEY *key1,
644 const AES_KEY *key2, const unsigned char *ivec);
645 void aes256_t4_xts_encrypt(const unsigned char *in, unsigned char *out,
646 size_t blocks, const AES_KEY *key1,
647 const AES_KEY *key2, const unsigned char *ivec);
648 void aes256_t4_xts_decrypt(const unsigned char *in, unsigned char *out,
649 size_t blocks, const AES_KEY *key1,
650 const AES_KEY *key2, const unsigned char *ivec);
651
aes_t4_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)652 static int aes_t4_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
653 const unsigned char *iv, int enc)
654 {
655 int ret, mode, bits;
656 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
657
658 mode = EVP_CIPHER_CTX_mode(ctx);
659 bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
660 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
661 && !enc) {
662 ret = 0;
663 aes_t4_set_decrypt_key(key, bits, &dat->ks.ks);
664 dat->block = (block128_f) aes_t4_decrypt;
665 switch (bits) {
666 case 128:
667 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
668 (cbc128_f) aes128_t4_cbc_decrypt : NULL;
669 break;
670 case 192:
671 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
672 (cbc128_f) aes192_t4_cbc_decrypt : NULL;
673 break;
674 case 256:
675 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
676 (cbc128_f) aes256_t4_cbc_decrypt : NULL;
677 break;
678 default:
679 ret = -1;
680 }
681 } else {
682 ret = 0;
683 aes_t4_set_encrypt_key(key, bits, &dat->ks.ks);
684 dat->block = (block128_f) aes_t4_encrypt;
685 switch (bits) {
686 case 128:
687 if (mode == EVP_CIPH_CBC_MODE)
688 dat->stream.cbc = (cbc128_f) aes128_t4_cbc_encrypt;
689 else if (mode == EVP_CIPH_CTR_MODE)
690 dat->stream.ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
691 else
692 dat->stream.cbc = NULL;
693 break;
694 case 192:
695 if (mode == EVP_CIPH_CBC_MODE)
696 dat->stream.cbc = (cbc128_f) aes192_t4_cbc_encrypt;
697 else if (mode == EVP_CIPH_CTR_MODE)
698 dat->stream.ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
699 else
700 dat->stream.cbc = NULL;
701 break;
702 case 256:
703 if (mode == EVP_CIPH_CBC_MODE)
704 dat->stream.cbc = (cbc128_f) aes256_t4_cbc_encrypt;
705 else if (mode == EVP_CIPH_CTR_MODE)
706 dat->stream.ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
707 else
708 dat->stream.cbc = NULL;
709 break;
710 default:
711 ret = -1;
712 }
713 }
714
715 if (ret < 0) {
716 EVPerr(EVP_F_AES_T4_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
717 return 0;
718 }
719
720 return 1;
721 }
722
723 # define aes_t4_cbc_cipher aes_cbc_cipher
724 static int aes_t4_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
725 const unsigned char *in, size_t len);
726
727 # define aes_t4_ecb_cipher aes_ecb_cipher
728 static int aes_t4_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
729 const unsigned char *in, size_t len);
730
731 # define aes_t4_ofb_cipher aes_ofb_cipher
732 static int aes_t4_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
733 const unsigned char *in, size_t len);
734
735 # define aes_t4_cfb_cipher aes_cfb_cipher
736 static int aes_t4_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
737 const unsigned char *in, size_t len);
738
739 # define aes_t4_cfb8_cipher aes_cfb8_cipher
740 static int aes_t4_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
741 const unsigned char *in, size_t len);
742
743 # define aes_t4_cfb1_cipher aes_cfb1_cipher
744 static int aes_t4_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
745 const unsigned char *in, size_t len);
746
747 # define aes_t4_ctr_cipher aes_ctr_cipher
748 static int aes_t4_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
749 const unsigned char *in, size_t len);
750
aes_t4_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)751 static int aes_t4_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
752 const unsigned char *iv, int enc)
753 {
754 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
755 if (!iv && !key)
756 return 1;
757 if (key) {
758 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
759 aes_t4_set_encrypt_key(key, bits, &gctx->ks.ks);
760 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
761 (block128_f) aes_t4_encrypt);
762 switch (bits) {
763 case 128:
764 gctx->ctr = (ctr128_f) aes128_t4_ctr32_encrypt;
765 break;
766 case 192:
767 gctx->ctr = (ctr128_f) aes192_t4_ctr32_encrypt;
768 break;
769 case 256:
770 gctx->ctr = (ctr128_f) aes256_t4_ctr32_encrypt;
771 break;
772 default:
773 return 0;
774 }
775 /*
776 * If we have an iv can set it directly, otherwise use saved IV.
777 */
778 if (iv == NULL && gctx->iv_set)
779 iv = gctx->iv;
780 if (iv) {
781 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
782 gctx->iv_set = 1;
783 }
784 gctx->key_set = 1;
785 } else {
786 /* If key set use IV, otherwise copy */
787 if (gctx->key_set)
788 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
789 else
790 memcpy(gctx->iv, iv, gctx->ivlen);
791 gctx->iv_set = 1;
792 gctx->iv_gen = 0;
793 }
794 return 1;
795 }
796
797 # define aes_t4_gcm_cipher aes_gcm_cipher
798 static int aes_t4_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
799 const unsigned char *in, size_t len);
800
aes_t4_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)801 static int aes_t4_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
802 const unsigned char *iv, int enc)
803 {
804 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
805
806 if (!iv && !key)
807 return 1;
808
809 if (key) {
810 /* The key is two half length keys in reality */
811 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
812 const int bits = bytes * 8;
813
814 /*
815 * Verify that the two keys are different.
816 *
817 * This addresses Rogaway's vulnerability.
818 * See comment in aes_xts_init_key() below.
819 */
820 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
821 EVPerr(EVP_F_AES_T4_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
822 return 0;
823 }
824
825 xctx->stream = NULL;
826 /* key_len is two AES keys */
827 if (enc) {
828 aes_t4_set_encrypt_key(key, bits, &xctx->ks1.ks);
829 xctx->xts.block1 = (block128_f) aes_t4_encrypt;
830 switch (bits) {
831 case 128:
832 xctx->stream = aes128_t4_xts_encrypt;
833 break;
834 case 256:
835 xctx->stream = aes256_t4_xts_encrypt;
836 break;
837 default:
838 return 0;
839 }
840 } else {
841 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
842 &xctx->ks1.ks);
843 xctx->xts.block1 = (block128_f) aes_t4_decrypt;
844 switch (bits) {
845 case 128:
846 xctx->stream = aes128_t4_xts_decrypt;
847 break;
848 case 256:
849 xctx->stream = aes256_t4_xts_decrypt;
850 break;
851 default:
852 return 0;
853 }
854 }
855
856 aes_t4_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
857 EVP_CIPHER_CTX_key_length(ctx) * 4,
858 &xctx->ks2.ks);
859 xctx->xts.block2 = (block128_f) aes_t4_encrypt;
860
861 xctx->xts.key1 = &xctx->ks1;
862 }
863
864 if (iv) {
865 xctx->xts.key2 = &xctx->ks2;
866 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
867 }
868
869 return 1;
870 }
871
872 # define aes_t4_xts_cipher aes_xts_cipher
873 static int aes_t4_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
874 const unsigned char *in, size_t len);
875
aes_t4_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)876 static int aes_t4_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
877 const unsigned char *iv, int enc)
878 {
879 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
880 if (!iv && !key)
881 return 1;
882 if (key) {
883 int bits = EVP_CIPHER_CTX_key_length(ctx) * 8;
884 aes_t4_set_encrypt_key(key, bits, &cctx->ks.ks);
885 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
886 &cctx->ks, (block128_f) aes_t4_encrypt);
887 cctx->str = NULL;
888 cctx->key_set = 1;
889 }
890 if (iv) {
891 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
892 cctx->iv_set = 1;
893 }
894 return 1;
895 }
896
897 # define aes_t4_ccm_cipher aes_ccm_cipher
898 static int aes_t4_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
899 const unsigned char *in, size_t len);
900
901 # ifndef OPENSSL_NO_OCB
aes_t4_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)902 static int aes_t4_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
903 const unsigned char *iv, int enc)
904 {
905 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
906 if (!iv && !key)
907 return 1;
908 if (key) {
909 do {
910 /*
911 * We set both the encrypt and decrypt key here because decrypt
912 * needs both. We could possibly optimise to remove setting the
913 * decrypt for an encryption operation.
914 */
915 aes_t4_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
916 &octx->ksenc.ks);
917 aes_t4_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
918 &octx->ksdec.ks);
919 if (!CRYPTO_ocb128_init(&octx->ocb,
920 &octx->ksenc.ks, &octx->ksdec.ks,
921 (block128_f) aes_t4_encrypt,
922 (block128_f) aes_t4_decrypt,
923 NULL))
924 return 0;
925 }
926 while (0);
927
928 /*
929 * If we have an iv we can set it directly, otherwise use saved IV.
930 */
931 if (iv == NULL && octx->iv_set)
932 iv = octx->iv;
933 if (iv) {
934 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
935 != 1)
936 return 0;
937 octx->iv_set = 1;
938 }
939 octx->key_set = 1;
940 } else {
941 /* If key set use IV, otherwise copy */
942 if (octx->key_set)
943 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
944 else
945 memcpy(octx->iv, iv, octx->ivlen);
946 octx->iv_set = 1;
947 }
948 return 1;
949 }
950
951 # define aes_t4_ocb_cipher aes_ocb_cipher
952 static int aes_t4_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
953 const unsigned char *in, size_t len);
954 # endif /* OPENSSL_NO_OCB */
955
956 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
957 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
958 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
959 flags|EVP_CIPH_##MODE##_MODE, \
960 aes_t4_init_key, \
961 aes_t4_##mode##_cipher, \
962 NULL, \
963 sizeof(EVP_AES_KEY), \
964 NULL,NULL,NULL,NULL }; \
965 static const EVP_CIPHER aes_##keylen##_##mode = { \
966 nid##_##keylen##_##nmode,blocksize, \
967 keylen/8,ivlen, \
968 flags|EVP_CIPH_##MODE##_MODE, \
969 aes_init_key, \
970 aes_##mode##_cipher, \
971 NULL, \
972 sizeof(EVP_AES_KEY), \
973 NULL,NULL,NULL,NULL }; \
974 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
975 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
976
977 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
978 static const EVP_CIPHER aes_t4_##keylen##_##mode = { \
979 nid##_##keylen##_##mode,blocksize, \
980 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
981 flags|EVP_CIPH_##MODE##_MODE, \
982 aes_t4_##mode##_init_key, \
983 aes_t4_##mode##_cipher, \
984 aes_##mode##_cleanup, \
985 sizeof(EVP_AES_##MODE##_CTX), \
986 NULL,NULL,aes_##mode##_ctrl,NULL }; \
987 static const EVP_CIPHER aes_##keylen##_##mode = { \
988 nid##_##keylen##_##mode,blocksize, \
989 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
990 flags|EVP_CIPH_##MODE##_MODE, \
991 aes_##mode##_init_key, \
992 aes_##mode##_cipher, \
993 aes_##mode##_cleanup, \
994 sizeof(EVP_AES_##MODE##_CTX), \
995 NULL,NULL,aes_##mode##_ctrl,NULL }; \
996 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
997 { return SPARC_AES_CAPABLE?&aes_t4_##keylen##_##mode:&aes_##keylen##_##mode; }
998
999 #elif defined(OPENSSL_CPUID_OBJ) && defined(__s390__)
1000 /*
1001 * IBM S390X support
1002 */
1003 # include "s390x_arch.h"
1004
1005 typedef struct {
1006 union {
1007 double align;
1008 /*-
1009 * KM-AES parameter block - begin
1010 * (see z/Architecture Principles of Operation >= SA22-7832-06)
1011 */
1012 struct {
1013 unsigned char k[32];
1014 } param;
1015 /* KM-AES parameter block - end */
1016 } km;
1017 unsigned int fc;
1018 } S390X_AES_ECB_CTX;
1019
1020 typedef struct {
1021 union {
1022 double align;
1023 /*-
1024 * KMO-AES parameter block - begin
1025 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1026 */
1027 struct {
1028 unsigned char cv[16];
1029 unsigned char k[32];
1030 } param;
1031 /* KMO-AES parameter block - end */
1032 } kmo;
1033 unsigned int fc;
1034
1035 int res;
1036 } S390X_AES_OFB_CTX;
1037
1038 typedef struct {
1039 union {
1040 double align;
1041 /*-
1042 * KMF-AES parameter block - begin
1043 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1044 */
1045 struct {
1046 unsigned char cv[16];
1047 unsigned char k[32];
1048 } param;
1049 /* KMF-AES parameter block - end */
1050 } kmf;
1051 unsigned int fc;
1052
1053 int res;
1054 } S390X_AES_CFB_CTX;
1055
1056 typedef struct {
1057 union {
1058 double align;
1059 /*-
1060 * KMA-GCM-AES parameter block - begin
1061 * (see z/Architecture Principles of Operation >= SA22-7832-11)
1062 */
1063 struct {
1064 unsigned char reserved[12];
1065 union {
1066 unsigned int w;
1067 unsigned char b[4];
1068 } cv;
1069 union {
1070 unsigned long long g[2];
1071 unsigned char b[16];
1072 } t;
1073 unsigned char h[16];
1074 unsigned long long taadl;
1075 unsigned long long tpcl;
1076 union {
1077 unsigned long long g[2];
1078 unsigned int w[4];
1079 } j0;
1080 unsigned char k[32];
1081 } param;
1082 /* KMA-GCM-AES parameter block - end */
1083 } kma;
1084 unsigned int fc;
1085 int key_set;
1086
1087 unsigned char *iv;
1088 int ivlen;
1089 int iv_set;
1090 int iv_gen;
1091
1092 int taglen;
1093
1094 unsigned char ares[16];
1095 unsigned char mres[16];
1096 unsigned char kres[16];
1097 int areslen;
1098 int mreslen;
1099 int kreslen;
1100
1101 int tls_aad_len;
1102 } S390X_AES_GCM_CTX;
1103
1104 typedef struct {
1105 union {
1106 double align;
1107 /*-
1108 * Padding is chosen so that ccm.kmac_param.k overlaps with key.k and
1109 * ccm.fc with key.k.rounds. Remember that on s390x, an AES_KEY's
1110 * rounds field is used to store the function code and that the key
1111 * schedule is not stored (if aes hardware support is detected).
1112 */
1113 struct {
1114 unsigned char pad[16];
1115 AES_KEY k;
1116 } key;
1117
1118 struct {
1119 /*-
1120 * KMAC-AES parameter block - begin
1121 * (see z/Architecture Principles of Operation >= SA22-7832-08)
1122 */
1123 struct {
1124 union {
1125 unsigned long long g[2];
1126 unsigned char b[16];
1127 } icv;
1128 unsigned char k[32];
1129 } kmac_param;
1130 /* KMAC-AES parameter block - end */
1131
1132 union {
1133 unsigned long long g[2];
1134 unsigned char b[16];
1135 } nonce;
1136 union {
1137 unsigned long long g[2];
1138 unsigned char b[16];
1139 } buf;
1140
1141 unsigned long long blocks;
1142 int l;
1143 int m;
1144 int tls_aad_len;
1145 int iv_set;
1146 int tag_set;
1147 int len_set;
1148 int key_set;
1149
1150 unsigned char pad[140];
1151 unsigned int fc;
1152 } ccm;
1153 } aes;
1154 } S390X_AES_CCM_CTX;
1155
1156 /* Convert key size to function code: [16,24,32] -> [18,19,20]. */
1157 # define S390X_AES_FC(keylen) (S390X_AES_128 + ((((keylen) << 3) - 128) >> 6))
1158
1159 /* Most modes of operation need km for partial block processing. */
1160 # define S390X_aes_128_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1161 S390X_CAPBIT(S390X_AES_128))
1162 # define S390X_aes_192_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1163 S390X_CAPBIT(S390X_AES_192))
1164 # define S390X_aes_256_CAPABLE (OPENSSL_s390xcap_P.km[0] & \
1165 S390X_CAPBIT(S390X_AES_256))
1166
1167 # define s390x_aes_init_key aes_init_key
1168 static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
1169 const unsigned char *iv, int enc);
1170
1171 # define S390X_aes_128_cbc_CAPABLE 1 /* checked by callee */
1172 # define S390X_aes_192_cbc_CAPABLE 1
1173 # define S390X_aes_256_cbc_CAPABLE 1
1174 # define S390X_AES_CBC_CTX EVP_AES_KEY
1175
1176 # define s390x_aes_cbc_init_key aes_init_key
1177
1178 # define s390x_aes_cbc_cipher aes_cbc_cipher
1179 static int s390x_aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1180 const unsigned char *in, size_t len);
1181
1182 # define S390X_aes_128_ecb_CAPABLE S390X_aes_128_CAPABLE
1183 # define S390X_aes_192_ecb_CAPABLE S390X_aes_192_CAPABLE
1184 # define S390X_aes_256_ecb_CAPABLE S390X_aes_256_CAPABLE
1185
s390x_aes_ecb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)1186 static int s390x_aes_ecb_init_key(EVP_CIPHER_CTX *ctx,
1187 const unsigned char *key,
1188 const unsigned char *iv, int enc)
1189 {
1190 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1191 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1192
1193 cctx->fc = S390X_AES_FC(keylen);
1194 if (!enc)
1195 cctx->fc |= S390X_DECRYPT;
1196
1197 memcpy(cctx->km.param.k, key, keylen);
1198 return 1;
1199 }
1200
s390x_aes_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1201 static int s390x_aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1202 const unsigned char *in, size_t len)
1203 {
1204 S390X_AES_ECB_CTX *cctx = EVP_C_DATA(S390X_AES_ECB_CTX, ctx);
1205
1206 s390x_km(in, len, out, cctx->fc, &cctx->km.param);
1207 return 1;
1208 }
1209
1210 # define S390X_aes_128_ofb_CAPABLE (S390X_aes_128_CAPABLE && \
1211 (OPENSSL_s390xcap_P.kmo[0] & \
1212 S390X_CAPBIT(S390X_AES_128)))
1213 # define S390X_aes_192_ofb_CAPABLE (S390X_aes_192_CAPABLE && \
1214 (OPENSSL_s390xcap_P.kmo[0] & \
1215 S390X_CAPBIT(S390X_AES_192)))
1216 # define S390X_aes_256_ofb_CAPABLE (S390X_aes_256_CAPABLE && \
1217 (OPENSSL_s390xcap_P.kmo[0] & \
1218 S390X_CAPBIT(S390X_AES_256)))
1219
s390x_aes_ofb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1220 static int s390x_aes_ofb_init_key(EVP_CIPHER_CTX *ctx,
1221 const unsigned char *key,
1222 const unsigned char *ivec, int enc)
1223 {
1224 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1225 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1226 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1227 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1228
1229 memcpy(cctx->kmo.param.cv, iv, ivlen);
1230 memcpy(cctx->kmo.param.k, key, keylen);
1231 cctx->fc = S390X_AES_FC(keylen);
1232 cctx->res = 0;
1233 return 1;
1234 }
1235
s390x_aes_ofb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1236 static int s390x_aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1237 const unsigned char *in, size_t len)
1238 {
1239 S390X_AES_OFB_CTX *cctx = EVP_C_DATA(S390X_AES_OFB_CTX, ctx);
1240 int n = cctx->res;
1241 int rem;
1242
1243 while (n && len) {
1244 *out = *in ^ cctx->kmo.param.cv[n];
1245 n = (n + 1) & 0xf;
1246 --len;
1247 ++in;
1248 ++out;
1249 }
1250
1251 rem = len & 0xf;
1252
1253 len &= ~(size_t)0xf;
1254 if (len) {
1255 s390x_kmo(in, len, out, cctx->fc, &cctx->kmo.param);
1256
1257 out += len;
1258 in += len;
1259 }
1260
1261 if (rem) {
1262 s390x_km(cctx->kmo.param.cv, 16, cctx->kmo.param.cv, cctx->fc,
1263 cctx->kmo.param.k);
1264
1265 while (rem--) {
1266 out[n] = in[n] ^ cctx->kmo.param.cv[n];
1267 ++n;
1268 }
1269 }
1270
1271 cctx->res = n;
1272 return 1;
1273 }
1274
1275 # define S390X_aes_128_cfb_CAPABLE (S390X_aes_128_CAPABLE && \
1276 (OPENSSL_s390xcap_P.kmf[0] & \
1277 S390X_CAPBIT(S390X_AES_128)))
1278 # define S390X_aes_192_cfb_CAPABLE (S390X_aes_192_CAPABLE && \
1279 (OPENSSL_s390xcap_P.kmf[0] & \
1280 S390X_CAPBIT(S390X_AES_192)))
1281 # define S390X_aes_256_cfb_CAPABLE (S390X_aes_256_CAPABLE && \
1282 (OPENSSL_s390xcap_P.kmf[0] & \
1283 S390X_CAPBIT(S390X_AES_256)))
1284
s390x_aes_cfb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1285 static int s390x_aes_cfb_init_key(EVP_CIPHER_CTX *ctx,
1286 const unsigned char *key,
1287 const unsigned char *ivec, int enc)
1288 {
1289 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1290 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1291 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1292 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1293
1294 cctx->fc = S390X_AES_FC(keylen);
1295 cctx->fc |= 16 << 24; /* 16 bytes cipher feedback */
1296 if (!enc)
1297 cctx->fc |= S390X_DECRYPT;
1298
1299 cctx->res = 0;
1300 memcpy(cctx->kmf.param.cv, iv, ivlen);
1301 memcpy(cctx->kmf.param.k, key, keylen);
1302 return 1;
1303 }
1304
s390x_aes_cfb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1305 static int s390x_aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1306 const unsigned char *in, size_t len)
1307 {
1308 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1309 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1310 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1311 int n = cctx->res;
1312 int rem;
1313 unsigned char tmp;
1314
1315 while (n && len) {
1316 tmp = *in;
1317 *out = cctx->kmf.param.cv[n] ^ tmp;
1318 cctx->kmf.param.cv[n] = enc ? *out : tmp;
1319 n = (n + 1) & 0xf;
1320 --len;
1321 ++in;
1322 ++out;
1323 }
1324
1325 rem = len & 0xf;
1326
1327 len &= ~(size_t)0xf;
1328 if (len) {
1329 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1330
1331 out += len;
1332 in += len;
1333 }
1334
1335 if (rem) {
1336 s390x_km(cctx->kmf.param.cv, 16, cctx->kmf.param.cv,
1337 S390X_AES_FC(keylen), cctx->kmf.param.k);
1338
1339 while (rem--) {
1340 tmp = in[n];
1341 out[n] = cctx->kmf.param.cv[n] ^ tmp;
1342 cctx->kmf.param.cv[n] = enc ? out[n] : tmp;
1343 ++n;
1344 }
1345 }
1346
1347 cctx->res = n;
1348 return 1;
1349 }
1350
1351 # define S390X_aes_128_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1352 S390X_CAPBIT(S390X_AES_128))
1353 # define S390X_aes_192_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1354 S390X_CAPBIT(S390X_AES_192))
1355 # define S390X_aes_256_cfb8_CAPABLE (OPENSSL_s390xcap_P.kmf[0] & \
1356 S390X_CAPBIT(S390X_AES_256))
1357
s390x_aes_cfb8_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * ivec,int enc)1358 static int s390x_aes_cfb8_init_key(EVP_CIPHER_CTX *ctx,
1359 const unsigned char *key,
1360 const unsigned char *ivec, int enc)
1361 {
1362 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1363 const unsigned char *iv = EVP_CIPHER_CTX_original_iv(ctx);
1364 const int keylen = EVP_CIPHER_CTX_key_length(ctx);
1365 const int ivlen = EVP_CIPHER_CTX_iv_length(ctx);
1366
1367 cctx->fc = S390X_AES_FC(keylen);
1368 cctx->fc |= 1 << 24; /* 1 byte cipher feedback */
1369 if (!enc)
1370 cctx->fc |= S390X_DECRYPT;
1371
1372 memcpy(cctx->kmf.param.cv, iv, ivlen);
1373 memcpy(cctx->kmf.param.k, key, keylen);
1374 return 1;
1375 }
1376
s390x_aes_cfb8_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1377 static int s390x_aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1378 const unsigned char *in, size_t len)
1379 {
1380 S390X_AES_CFB_CTX *cctx = EVP_C_DATA(S390X_AES_CFB_CTX, ctx);
1381
1382 s390x_kmf(in, len, out, cctx->fc, &cctx->kmf.param);
1383 return 1;
1384 }
1385
1386 # define S390X_aes_128_cfb1_CAPABLE 0
1387 # define S390X_aes_192_cfb1_CAPABLE 0
1388 # define S390X_aes_256_cfb1_CAPABLE 0
1389
1390 # define s390x_aes_cfb1_init_key aes_init_key
1391
1392 # define s390x_aes_cfb1_cipher aes_cfb1_cipher
1393 static int s390x_aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1394 const unsigned char *in, size_t len);
1395
1396 # define S390X_aes_128_ctr_CAPABLE 1 /* checked by callee */
1397 # define S390X_aes_192_ctr_CAPABLE 1
1398 # define S390X_aes_256_ctr_CAPABLE 1
1399 # define S390X_AES_CTR_CTX EVP_AES_KEY
1400
1401 # define s390x_aes_ctr_init_key aes_init_key
1402
1403 # define s390x_aes_ctr_cipher aes_ctr_cipher
1404 static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1405 const unsigned char *in, size_t len);
1406
1407 # define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE && \
1408 (OPENSSL_s390xcap_P.kma[0] & \
1409 S390X_CAPBIT(S390X_AES_128)))
1410 # define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE && \
1411 (OPENSSL_s390xcap_P.kma[0] & \
1412 S390X_CAPBIT(S390X_AES_192)))
1413 # define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE && \
1414 (OPENSSL_s390xcap_P.kma[0] & \
1415 S390X_CAPBIT(S390X_AES_256)))
1416
1417 /* iv + padding length for iv lengths != 12 */
1418 # define S390X_gcm_ivpadlen(i) ((((i) + 15) >> 4 << 4) + 16)
1419
1420 /*-
1421 * Process additional authenticated data. Returns 0 on success. Code is
1422 * big-endian.
1423 */
s390x_aes_gcm_aad(S390X_AES_GCM_CTX * ctx,const unsigned char * aad,size_t len)1424 static int s390x_aes_gcm_aad(S390X_AES_GCM_CTX *ctx, const unsigned char *aad,
1425 size_t len)
1426 {
1427 unsigned long long alen;
1428 int n, rem;
1429
1430 if (ctx->kma.param.tpcl)
1431 return -2;
1432
1433 alen = ctx->kma.param.taadl + len;
1434 if (alen > (U64(1) << 61) || (sizeof(len) == 8 && alen < len))
1435 return -1;
1436 ctx->kma.param.taadl = alen;
1437
1438 n = ctx->areslen;
1439 if (n) {
1440 while (n && len) {
1441 ctx->ares[n] = *aad;
1442 n = (n + 1) & 0xf;
1443 ++aad;
1444 --len;
1445 }
1446 /* ctx->ares contains a complete block if offset has wrapped around */
1447 if (!n) {
1448 s390x_kma(ctx->ares, 16, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1449 ctx->fc |= S390X_KMA_HS;
1450 }
1451 ctx->areslen = n;
1452 }
1453
1454 rem = len & 0xf;
1455
1456 len &= ~(size_t)0xf;
1457 if (len) {
1458 s390x_kma(aad, len, NULL, 0, NULL, ctx->fc, &ctx->kma.param);
1459 aad += len;
1460 ctx->fc |= S390X_KMA_HS;
1461 }
1462
1463 if (rem) {
1464 ctx->areslen = rem;
1465
1466 do {
1467 --rem;
1468 ctx->ares[rem] = aad[rem];
1469 } while (rem);
1470 }
1471 return 0;
1472 }
1473
1474 /*-
1475 * En/de-crypt plain/cipher-text and authenticate ciphertext. Returns 0 for
1476 * success. Code is big-endian.
1477 */
s390x_aes_gcm(S390X_AES_GCM_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len)1478 static int s390x_aes_gcm(S390X_AES_GCM_CTX *ctx, const unsigned char *in,
1479 unsigned char *out, size_t len)
1480 {
1481 const unsigned char *inptr;
1482 unsigned long long mlen;
1483 union {
1484 unsigned int w[4];
1485 unsigned char b[16];
1486 } buf;
1487 size_t inlen;
1488 int n, rem, i;
1489
1490 mlen = ctx->kma.param.tpcl + len;
1491 if (mlen > ((U64(1) << 36) - 32) || (sizeof(len) == 8 && mlen < len))
1492 return -1;
1493 ctx->kma.param.tpcl = mlen;
1494
1495 n = ctx->mreslen;
1496 if (n) {
1497 inptr = in;
1498 inlen = len;
1499 while (n && inlen) {
1500 ctx->mres[n] = *inptr;
1501 n = (n + 1) & 0xf;
1502 ++inptr;
1503 --inlen;
1504 }
1505 /* ctx->mres contains a complete block if offset has wrapped around */
1506 if (!n) {
1507 s390x_kma(ctx->ares, ctx->areslen, ctx->mres, 16, buf.b,
1508 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1509 ctx->fc |= S390X_KMA_HS;
1510 ctx->areslen = 0;
1511
1512 /* previous call already encrypted/decrypted its remainder,
1513 * see comment below */
1514 n = ctx->mreslen;
1515 while (n) {
1516 *out = buf.b[n];
1517 n = (n + 1) & 0xf;
1518 ++out;
1519 ++in;
1520 --len;
1521 }
1522 ctx->mreslen = 0;
1523 }
1524 }
1525
1526 rem = len & 0xf;
1527
1528 len &= ~(size_t)0xf;
1529 if (len) {
1530 s390x_kma(ctx->ares, ctx->areslen, in, len, out,
1531 ctx->fc | S390X_KMA_LAAD, &ctx->kma.param);
1532 in += len;
1533 out += len;
1534 ctx->fc |= S390X_KMA_HS;
1535 ctx->areslen = 0;
1536 }
1537
1538 /*-
1539 * If there is a remainder, it has to be saved such that it can be
1540 * processed by kma later. However, we also have to do the for-now
1541 * unauthenticated encryption/decryption part here and now...
1542 */
1543 if (rem) {
1544 if (!ctx->mreslen) {
1545 buf.w[0] = ctx->kma.param.j0.w[0];
1546 buf.w[1] = ctx->kma.param.j0.w[1];
1547 buf.w[2] = ctx->kma.param.j0.w[2];
1548 buf.w[3] = ctx->kma.param.cv.w + 1;
1549 s390x_km(buf.b, 16, ctx->kres, ctx->fc & 0x1f, &ctx->kma.param.k);
1550 }
1551
1552 n = ctx->mreslen;
1553 for (i = 0; i < rem; i++) {
1554 ctx->mres[n + i] = in[i];
1555 out[i] = in[i] ^ ctx->kres[n + i];
1556 }
1557
1558 ctx->mreslen += rem;
1559 }
1560 return 0;
1561 }
1562
1563 /*-
1564 * Initialize context structure. Code is big-endian.
1565 */
s390x_aes_gcm_setiv(S390X_AES_GCM_CTX * ctx,const unsigned char * iv)1566 static void s390x_aes_gcm_setiv(S390X_AES_GCM_CTX *ctx,
1567 const unsigned char *iv)
1568 {
1569 ctx->kma.param.t.g[0] = 0;
1570 ctx->kma.param.t.g[1] = 0;
1571 ctx->kma.param.tpcl = 0;
1572 ctx->kma.param.taadl = 0;
1573 ctx->mreslen = 0;
1574 ctx->areslen = 0;
1575 ctx->kreslen = 0;
1576
1577 if (ctx->ivlen == 12) {
1578 memcpy(&ctx->kma.param.j0, iv, ctx->ivlen);
1579 ctx->kma.param.j0.w[3] = 1;
1580 ctx->kma.param.cv.w = 1;
1581 } else {
1582 /* ctx->iv has the right size and is already padded. */
1583 memcpy(ctx->iv, iv, ctx->ivlen);
1584 s390x_kma(ctx->iv, S390X_gcm_ivpadlen(ctx->ivlen), NULL, 0, NULL,
1585 ctx->fc, &ctx->kma.param);
1586 ctx->fc |= S390X_KMA_HS;
1587
1588 ctx->kma.param.j0.g[0] = ctx->kma.param.t.g[0];
1589 ctx->kma.param.j0.g[1] = ctx->kma.param.t.g[1];
1590 ctx->kma.param.cv.w = ctx->kma.param.j0.w[3];
1591 ctx->kma.param.t.g[0] = 0;
1592 ctx->kma.param.t.g[1] = 0;
1593 }
1594 }
1595
1596 /*-
1597 * Performs various operations on the context structure depending on control
1598 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
1599 * Code is big-endian.
1600 */
s390x_aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)1601 static int s390x_aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
1602 {
1603 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1604 S390X_AES_GCM_CTX *gctx_out;
1605 EVP_CIPHER_CTX *out;
1606 unsigned char *buf, *iv;
1607 int ivlen, enc, len;
1608
1609 switch (type) {
1610 case EVP_CTRL_INIT:
1611 ivlen = EVP_CIPHER_iv_length(c->cipher);
1612 iv = EVP_CIPHER_CTX_iv_noconst(c);
1613 gctx->key_set = 0;
1614 gctx->iv_set = 0;
1615 gctx->ivlen = ivlen;
1616 gctx->iv = iv;
1617 gctx->taglen = -1;
1618 gctx->iv_gen = 0;
1619 gctx->tls_aad_len = -1;
1620 return 1;
1621
1622 case EVP_CTRL_GET_IVLEN:
1623 *(int *)ptr = gctx->ivlen;
1624 return 1;
1625
1626 case EVP_CTRL_AEAD_SET_IVLEN:
1627 if (arg <= 0)
1628 return 0;
1629
1630 if (arg != 12) {
1631 iv = EVP_CIPHER_CTX_iv_noconst(c);
1632 len = S390X_gcm_ivpadlen(arg);
1633
1634 /* Allocate memory for iv if needed. */
1635 if (gctx->ivlen == 12 || len > S390X_gcm_ivpadlen(gctx->ivlen)) {
1636 if (gctx->iv != iv)
1637 OPENSSL_free(gctx->iv);
1638
1639 if ((gctx->iv = OPENSSL_malloc(len)) == NULL) {
1640 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1641 return 0;
1642 }
1643 }
1644 /* Add padding. */
1645 memset(gctx->iv + arg, 0, len - arg - 8);
1646 *((unsigned long long *)(gctx->iv + len - 8)) = arg << 3;
1647 }
1648 gctx->ivlen = arg;
1649 return 1;
1650
1651 case EVP_CTRL_AEAD_SET_TAG:
1652 buf = EVP_CIPHER_CTX_buf_noconst(c);
1653 enc = EVP_CIPHER_CTX_encrypting(c);
1654 if (arg <= 0 || arg > 16 || enc)
1655 return 0;
1656
1657 memcpy(buf, ptr, arg);
1658 gctx->taglen = arg;
1659 return 1;
1660
1661 case EVP_CTRL_AEAD_GET_TAG:
1662 enc = EVP_CIPHER_CTX_encrypting(c);
1663 if (arg <= 0 || arg > 16 || !enc || gctx->taglen < 0)
1664 return 0;
1665
1666 memcpy(ptr, gctx->kma.param.t.b, arg);
1667 return 1;
1668
1669 case EVP_CTRL_GCM_SET_IV_FIXED:
1670 /* Special case: -1 length restores whole iv */
1671 if (arg == -1) {
1672 memcpy(gctx->iv, ptr, gctx->ivlen);
1673 gctx->iv_gen = 1;
1674 return 1;
1675 }
1676 /*
1677 * Fixed field must be at least 4 bytes and invocation field at least
1678 * 8.
1679 */
1680 if ((arg < 4) || (gctx->ivlen - arg) < 8)
1681 return 0;
1682
1683 if (arg)
1684 memcpy(gctx->iv, ptr, arg);
1685
1686 enc = EVP_CIPHER_CTX_encrypting(c);
1687 if (enc && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
1688 return 0;
1689
1690 gctx->iv_gen = 1;
1691 return 1;
1692
1693 case EVP_CTRL_GCM_IV_GEN:
1694 if (gctx->iv_gen == 0 || gctx->key_set == 0)
1695 return 0;
1696
1697 s390x_aes_gcm_setiv(gctx, gctx->iv);
1698
1699 if (arg <= 0 || arg > gctx->ivlen)
1700 arg = gctx->ivlen;
1701
1702 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
1703 /*
1704 * Invocation field will be at least 8 bytes in size and so no need
1705 * to check wrap around or increment more than last 8 bytes.
1706 */
1707 ctr64_inc(gctx->iv + gctx->ivlen - 8);
1708 gctx->iv_set = 1;
1709 return 1;
1710
1711 case EVP_CTRL_GCM_SET_IV_INV:
1712 enc = EVP_CIPHER_CTX_encrypting(c);
1713 if (gctx->iv_gen == 0 || gctx->key_set == 0 || enc)
1714 return 0;
1715
1716 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
1717 s390x_aes_gcm_setiv(gctx, gctx->iv);
1718 gctx->iv_set = 1;
1719 return 1;
1720
1721 case EVP_CTRL_AEAD_TLS1_AAD:
1722 /* Save the aad for later use. */
1723 if (arg != EVP_AEAD_TLS1_AAD_LEN)
1724 return 0;
1725
1726 buf = EVP_CIPHER_CTX_buf_noconst(c);
1727 memcpy(buf, ptr, arg);
1728 gctx->tls_aad_len = arg;
1729
1730 len = buf[arg - 2] << 8 | buf[arg - 1];
1731 /* Correct length for explicit iv. */
1732 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
1733 return 0;
1734 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
1735
1736 /* If decrypting correct for tag too. */
1737 enc = EVP_CIPHER_CTX_encrypting(c);
1738 if (!enc) {
1739 if (len < EVP_GCM_TLS_TAG_LEN)
1740 return 0;
1741 len -= EVP_GCM_TLS_TAG_LEN;
1742 }
1743 buf[arg - 2] = len >> 8;
1744 buf[arg - 1] = len & 0xff;
1745 /* Extra padding: tag appended to record. */
1746 return EVP_GCM_TLS_TAG_LEN;
1747
1748 case EVP_CTRL_COPY:
1749 out = ptr;
1750 gctx_out = EVP_C_DATA(S390X_AES_GCM_CTX, out);
1751 iv = EVP_CIPHER_CTX_iv_noconst(c);
1752
1753 if (gctx->iv == iv) {
1754 gctx_out->iv = EVP_CIPHER_CTX_iv_noconst(out);
1755 } else {
1756 len = S390X_gcm_ivpadlen(gctx->ivlen);
1757
1758 if ((gctx_out->iv = OPENSSL_malloc(len)) == NULL) {
1759 EVPerr(EVP_F_S390X_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
1760 return 0;
1761 }
1762
1763 memcpy(gctx_out->iv, gctx->iv, len);
1764 }
1765 return 1;
1766
1767 default:
1768 return -1;
1769 }
1770 }
1771
1772 /*-
1773 * Set key and/or iv. Returns 1 on success. Otherwise 0 is returned.
1774 */
s390x_aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)1775 static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
1776 const unsigned char *key,
1777 const unsigned char *iv, int enc)
1778 {
1779 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1780 int keylen;
1781
1782 if (iv == NULL && key == NULL)
1783 return 1;
1784
1785 if (key != NULL) {
1786 keylen = EVP_CIPHER_CTX_key_length(ctx);
1787 memcpy(&gctx->kma.param.k, key, keylen);
1788
1789 gctx->fc = S390X_AES_FC(keylen);
1790 if (!enc)
1791 gctx->fc |= S390X_DECRYPT;
1792
1793 if (iv == NULL && gctx->iv_set)
1794 iv = gctx->iv;
1795
1796 if (iv != NULL) {
1797 s390x_aes_gcm_setiv(gctx, iv);
1798 gctx->iv_set = 1;
1799 }
1800 gctx->key_set = 1;
1801 } else {
1802 if (gctx->key_set)
1803 s390x_aes_gcm_setiv(gctx, iv);
1804 else
1805 memcpy(gctx->iv, iv, gctx->ivlen);
1806
1807 gctx->iv_set = 1;
1808 gctx->iv_gen = 0;
1809 }
1810 return 1;
1811 }
1812
1813 /*-
1814 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
1815 * if successful. Otherwise -1 is returned. Code is big-endian.
1816 */
s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1817 static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1818 const unsigned char *in, size_t len)
1819 {
1820 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1821 const unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1822 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
1823 int rv = -1;
1824
1825 if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
1826 return -1;
1827
1828 if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN
1829 : EVP_CTRL_GCM_SET_IV_INV,
1830 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
1831 goto err;
1832
1833 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1834 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
1835 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1836
1837 gctx->kma.param.taadl = gctx->tls_aad_len << 3;
1838 gctx->kma.param.tpcl = len << 3;
1839 s390x_kma(buf, gctx->tls_aad_len, in, len, out,
1840 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1841
1842 if (enc) {
1843 memcpy(out + len, gctx->kma.param.t.b, EVP_GCM_TLS_TAG_LEN);
1844 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
1845 } else {
1846 if (CRYPTO_memcmp(gctx->kma.param.t.b, in + len,
1847 EVP_GCM_TLS_TAG_LEN)) {
1848 OPENSSL_cleanse(out, len);
1849 goto err;
1850 }
1851 rv = len;
1852 }
1853 err:
1854 gctx->iv_set = 0;
1855 gctx->tls_aad_len = -1;
1856 return rv;
1857 }
1858
1859 /*-
1860 * Called from EVP layer to initialize context, process additional
1861 * authenticated data, en/de-crypt plain/cipher-text and authenticate
1862 * ciphertext or process a TLS packet, depending on context. Returns bytes
1863 * written on success. Otherwise -1 is returned. Code is big-endian.
1864 */
s390x_aes_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)1865 static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1866 const unsigned char *in, size_t len)
1867 {
1868 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, ctx);
1869 unsigned char *buf, tmp[16];
1870 int enc;
1871
1872 if (!gctx->key_set)
1873 return -1;
1874
1875 if (gctx->tls_aad_len >= 0)
1876 return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
1877
1878 if (!gctx->iv_set)
1879 return -1;
1880
1881 if (in != NULL) {
1882 if (out == NULL) {
1883 if (s390x_aes_gcm_aad(gctx, in, len))
1884 return -1;
1885 } else {
1886 if (s390x_aes_gcm(gctx, in, out, len))
1887 return -1;
1888 }
1889 return len;
1890 } else {
1891 gctx->kma.param.taadl <<= 3;
1892 gctx->kma.param.tpcl <<= 3;
1893 s390x_kma(gctx->ares, gctx->areslen, gctx->mres, gctx->mreslen, tmp,
1894 gctx->fc | S390X_KMA_LAAD | S390X_KMA_LPC, &gctx->kma.param);
1895 /* recall that we already did en-/decrypt gctx->mres
1896 * and returned it to caller... */
1897 OPENSSL_cleanse(tmp, gctx->mreslen);
1898 gctx->iv_set = 0;
1899
1900 enc = EVP_CIPHER_CTX_encrypting(ctx);
1901 if (enc) {
1902 gctx->taglen = 16;
1903 } else {
1904 if (gctx->taglen < 0)
1905 return -1;
1906
1907 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
1908 if (CRYPTO_memcmp(buf, gctx->kma.param.t.b, gctx->taglen))
1909 return -1;
1910 }
1911 return 0;
1912 }
1913 }
1914
s390x_aes_gcm_cleanup(EVP_CIPHER_CTX * c)1915 static int s390x_aes_gcm_cleanup(EVP_CIPHER_CTX *c)
1916 {
1917 S390X_AES_GCM_CTX *gctx = EVP_C_DATA(S390X_AES_GCM_CTX, c);
1918 const unsigned char *iv;
1919
1920 if (gctx == NULL)
1921 return 0;
1922
1923 iv = EVP_CIPHER_CTX_iv(c);
1924 if (iv != gctx->iv)
1925 OPENSSL_free(gctx->iv);
1926
1927 OPENSSL_cleanse(gctx, sizeof(*gctx));
1928 return 1;
1929 }
1930
1931 # define S390X_AES_XTS_CTX EVP_AES_XTS_CTX
1932 # define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
1933 # define S390X_aes_256_xts_CAPABLE 1
1934
1935 # define s390x_aes_xts_init_key aes_xts_init_key
1936 static int s390x_aes_xts_init_key(EVP_CIPHER_CTX *ctx,
1937 const unsigned char *key,
1938 const unsigned char *iv, int enc);
1939 # define s390x_aes_xts_cipher aes_xts_cipher
1940 static int s390x_aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
1941 const unsigned char *in, size_t len);
1942 # define s390x_aes_xts_ctrl aes_xts_ctrl
1943 static int s390x_aes_xts_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
1944 # define s390x_aes_xts_cleanup aes_xts_cleanup
1945
1946 # define S390X_aes_128_ccm_CAPABLE (S390X_aes_128_CAPABLE && \
1947 (OPENSSL_s390xcap_P.kmac[0] & \
1948 S390X_CAPBIT(S390X_AES_128)))
1949 # define S390X_aes_192_ccm_CAPABLE (S390X_aes_192_CAPABLE && \
1950 (OPENSSL_s390xcap_P.kmac[0] & \
1951 S390X_CAPBIT(S390X_AES_192)))
1952 # define S390X_aes_256_ccm_CAPABLE (S390X_aes_256_CAPABLE && \
1953 (OPENSSL_s390xcap_P.kmac[0] & \
1954 S390X_CAPBIT(S390X_AES_256)))
1955
1956 # define S390X_CCM_AAD_FLAG 0x40
1957
1958 /*-
1959 * Set nonce and length fields. Code is big-endian.
1960 */
s390x_aes_ccm_setiv(S390X_AES_CCM_CTX * ctx,const unsigned char * nonce,size_t mlen)1961 static inline void s390x_aes_ccm_setiv(S390X_AES_CCM_CTX *ctx,
1962 const unsigned char *nonce,
1963 size_t mlen)
1964 {
1965 ctx->aes.ccm.nonce.b[0] &= ~S390X_CCM_AAD_FLAG;
1966 ctx->aes.ccm.nonce.g[1] = mlen;
1967 memcpy(ctx->aes.ccm.nonce.b + 1, nonce, 15 - ctx->aes.ccm.l);
1968 }
1969
1970 /*-
1971 * Process additional authenticated data. Code is big-endian.
1972 */
s390x_aes_ccm_aad(S390X_AES_CCM_CTX * ctx,const unsigned char * aad,size_t alen)1973 static void s390x_aes_ccm_aad(S390X_AES_CCM_CTX *ctx, const unsigned char *aad,
1974 size_t alen)
1975 {
1976 unsigned char *ptr;
1977 int i, rem;
1978
1979 if (!alen)
1980 return;
1981
1982 ctx->aes.ccm.nonce.b[0] |= S390X_CCM_AAD_FLAG;
1983
1984 /* Suppress 'type-punned pointer dereference' warning. */
1985 ptr = ctx->aes.ccm.buf.b;
1986
1987 if (alen < ((1 << 16) - (1 << 8))) {
1988 *(uint16_t *)ptr = alen;
1989 i = 2;
1990 } else if (sizeof(alen) == 8
1991 && alen >= (size_t)1 << (32 % (sizeof(alen) * 8))) {
1992 *(uint16_t *)ptr = 0xffff;
1993 *(uint64_t *)(ptr + 2) = alen;
1994 i = 10;
1995 } else {
1996 *(uint16_t *)ptr = 0xfffe;
1997 *(uint32_t *)(ptr + 2) = alen;
1998 i = 6;
1999 }
2000
2001 while (i < 16 && alen) {
2002 ctx->aes.ccm.buf.b[i] = *aad;
2003 ++aad;
2004 --alen;
2005 ++i;
2006 }
2007 while (i < 16) {
2008 ctx->aes.ccm.buf.b[i] = 0;
2009 ++i;
2010 }
2011
2012 ctx->aes.ccm.kmac_param.icv.g[0] = 0;
2013 ctx->aes.ccm.kmac_param.icv.g[1] = 0;
2014 s390x_kmac(ctx->aes.ccm.nonce.b, 32, ctx->aes.ccm.fc,
2015 &ctx->aes.ccm.kmac_param);
2016 ctx->aes.ccm.blocks += 2;
2017
2018 rem = alen & 0xf;
2019 alen &= ~(size_t)0xf;
2020 if (alen) {
2021 s390x_kmac(aad, alen, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2022 ctx->aes.ccm.blocks += alen >> 4;
2023 aad += alen;
2024 }
2025 if (rem) {
2026 for (i = 0; i < rem; i++)
2027 ctx->aes.ccm.kmac_param.icv.b[i] ^= aad[i];
2028
2029 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2030 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2031 ctx->aes.ccm.kmac_param.k);
2032 ctx->aes.ccm.blocks++;
2033 }
2034 }
2035
2036 /*-
2037 * En/de-crypt plain/cipher-text. Compute tag from plaintext. Returns 0 for
2038 * success.
2039 */
s390x_aes_ccm(S390X_AES_CCM_CTX * ctx,const unsigned char * in,unsigned char * out,size_t len,int enc)2040 static int s390x_aes_ccm(S390X_AES_CCM_CTX *ctx, const unsigned char *in,
2041 unsigned char *out, size_t len, int enc)
2042 {
2043 size_t n, rem;
2044 unsigned int i, l, num;
2045 unsigned char flags;
2046
2047 flags = ctx->aes.ccm.nonce.b[0];
2048 if (!(flags & S390X_CCM_AAD_FLAG)) {
2049 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.kmac_param.icv.b,
2050 ctx->aes.ccm.fc, ctx->aes.ccm.kmac_param.k);
2051 ctx->aes.ccm.blocks++;
2052 }
2053 l = flags & 0x7;
2054 ctx->aes.ccm.nonce.b[0] = l;
2055
2056 /*-
2057 * Reconstruct length from encoded length field
2058 * and initialize it with counter value.
2059 */
2060 n = 0;
2061 for (i = 15 - l; i < 15; i++) {
2062 n |= ctx->aes.ccm.nonce.b[i];
2063 ctx->aes.ccm.nonce.b[i] = 0;
2064 n <<= 8;
2065 }
2066 n |= ctx->aes.ccm.nonce.b[15];
2067 ctx->aes.ccm.nonce.b[15] = 1;
2068
2069 if (n != len)
2070 return -1; /* length mismatch */
2071
2072 if (enc) {
2073 /* Two operations per block plus one for tag encryption */
2074 ctx->aes.ccm.blocks += (((len + 15) >> 4) << 1) + 1;
2075 if (ctx->aes.ccm.blocks > (1ULL << 61))
2076 return -2; /* too much data */
2077 }
2078
2079 num = 0;
2080 rem = len & 0xf;
2081 len &= ~(size_t)0xf;
2082
2083 if (enc) {
2084 /* mac-then-encrypt */
2085 if (len)
2086 s390x_kmac(in, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2087 if (rem) {
2088 for (i = 0; i < rem; i++)
2089 ctx->aes.ccm.kmac_param.icv.b[i] ^= in[len + i];
2090
2091 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2092 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2093 ctx->aes.ccm.kmac_param.k);
2094 }
2095
2096 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2097 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2098 &num, (ctr128_f)AES_ctr32_encrypt);
2099 } else {
2100 /* decrypt-then-mac */
2101 CRYPTO_ctr128_encrypt_ctr32(in, out, len + rem, &ctx->aes.key.k,
2102 ctx->aes.ccm.nonce.b, ctx->aes.ccm.buf.b,
2103 &num, (ctr128_f)AES_ctr32_encrypt);
2104
2105 if (len)
2106 s390x_kmac(out, len, ctx->aes.ccm.fc, &ctx->aes.ccm.kmac_param);
2107 if (rem) {
2108 for (i = 0; i < rem; i++)
2109 ctx->aes.ccm.kmac_param.icv.b[i] ^= out[len + i];
2110
2111 s390x_km(ctx->aes.ccm.kmac_param.icv.b, 16,
2112 ctx->aes.ccm.kmac_param.icv.b, ctx->aes.ccm.fc,
2113 ctx->aes.ccm.kmac_param.k);
2114 }
2115 }
2116 /* encrypt tag */
2117 for (i = 15 - l; i < 16; i++)
2118 ctx->aes.ccm.nonce.b[i] = 0;
2119
2120 s390x_km(ctx->aes.ccm.nonce.b, 16, ctx->aes.ccm.buf.b, ctx->aes.ccm.fc,
2121 ctx->aes.ccm.kmac_param.k);
2122 ctx->aes.ccm.kmac_param.icv.g[0] ^= ctx->aes.ccm.buf.g[0];
2123 ctx->aes.ccm.kmac_param.icv.g[1] ^= ctx->aes.ccm.buf.g[1];
2124
2125 ctx->aes.ccm.nonce.b[0] = flags; /* restore flags field */
2126 return 0;
2127 }
2128
2129 /*-
2130 * En/de-crypt and authenticate TLS packet. Returns the number of bytes written
2131 * if successful. Otherwise -1 is returned.
2132 */
s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2133 static int s390x_aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2134 const unsigned char *in, size_t len)
2135 {
2136 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2137 unsigned char *ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2138 unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2139 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2140
2141 if (out != in
2142 || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->aes.ccm.m))
2143 return -1;
2144
2145 if (enc) {
2146 /* Set explicit iv (sequence number). */
2147 memcpy(out, buf, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2148 }
2149
2150 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2151 /*-
2152 * Get explicit iv (sequence number). We already have fixed iv
2153 * (server/client_write_iv) here.
2154 */
2155 memcpy(ivec + EVP_CCM_TLS_FIXED_IV_LEN, in, EVP_CCM_TLS_EXPLICIT_IV_LEN);
2156 s390x_aes_ccm_setiv(cctx, ivec, len);
2157
2158 /* Process aad (sequence number|type|version|length) */
2159 s390x_aes_ccm_aad(cctx, buf, cctx->aes.ccm.tls_aad_len);
2160
2161 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2162 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
2163
2164 if (enc) {
2165 if (s390x_aes_ccm(cctx, in, out, len, enc))
2166 return -1;
2167
2168 memcpy(out + len, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2169 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->aes.ccm.m;
2170 } else {
2171 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2172 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, in + len,
2173 cctx->aes.ccm.m))
2174 return len;
2175 }
2176
2177 OPENSSL_cleanse(out, len);
2178 return -1;
2179 }
2180 }
2181
2182 /*-
2183 * Set key and flag field and/or iv. Returns 1 if successful. Otherwise 0 is
2184 * returned.
2185 */
s390x_aes_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2186 static int s390x_aes_ccm_init_key(EVP_CIPHER_CTX *ctx,
2187 const unsigned char *key,
2188 const unsigned char *iv, int enc)
2189 {
2190 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2191 unsigned char *ivec;
2192 int keylen;
2193
2194 if (iv == NULL && key == NULL)
2195 return 1;
2196
2197 if (key != NULL) {
2198 keylen = EVP_CIPHER_CTX_key_length(ctx);
2199 cctx->aes.ccm.fc = S390X_AES_FC(keylen);
2200 memcpy(cctx->aes.ccm.kmac_param.k, key, keylen);
2201
2202 /* Store encoded m and l. */
2203 cctx->aes.ccm.nonce.b[0] = ((cctx->aes.ccm.l - 1) & 0x7)
2204 | (((cctx->aes.ccm.m - 2) >> 1) & 0x7) << 3;
2205 memset(cctx->aes.ccm.nonce.b + 1, 0,
2206 sizeof(cctx->aes.ccm.nonce.b));
2207 cctx->aes.ccm.blocks = 0;
2208
2209 cctx->aes.ccm.key_set = 1;
2210 }
2211
2212 if (iv != NULL) {
2213 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2214 memcpy(ivec, iv, 15 - cctx->aes.ccm.l);
2215
2216 cctx->aes.ccm.iv_set = 1;
2217 }
2218
2219 return 1;
2220 }
2221
2222 /*-
2223 * Called from EVP layer to initialize context, process additional
2224 * authenticated data, en/de-crypt plain/cipher-text and authenticate
2225 * plaintext or process a TLS packet, depending on context. Returns bytes
2226 * written on success. Otherwise -1 is returned.
2227 */
s390x_aes_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2228 static int s390x_aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2229 const unsigned char *in, size_t len)
2230 {
2231 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, ctx);
2232 const int enc = EVP_CIPHER_CTX_encrypting(ctx);
2233 int rv;
2234 unsigned char *buf, *ivec;
2235
2236 if (!cctx->aes.ccm.key_set)
2237 return -1;
2238
2239 if (cctx->aes.ccm.tls_aad_len >= 0)
2240 return s390x_aes_ccm_tls_cipher(ctx, out, in, len);
2241
2242 /*-
2243 * Final(): Does not return any data. Recall that ccm is mac-then-encrypt
2244 * so integrity must be checked already at Update() i.e., before
2245 * potentially corrupted data is output.
2246 */
2247 if (in == NULL && out != NULL)
2248 return 0;
2249
2250 if (!cctx->aes.ccm.iv_set)
2251 return -1;
2252
2253 if (out == NULL) {
2254 /* Update(): Pass message length. */
2255 if (in == NULL) {
2256 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2257 s390x_aes_ccm_setiv(cctx, ivec, len);
2258
2259 cctx->aes.ccm.len_set = 1;
2260 return len;
2261 }
2262
2263 /* Update(): Process aad. */
2264 if (!cctx->aes.ccm.len_set && len)
2265 return -1;
2266
2267 s390x_aes_ccm_aad(cctx, in, len);
2268 return len;
2269 }
2270
2271 /* The tag must be set before actually decrypting data */
2272 if (!enc && !cctx->aes.ccm.tag_set)
2273 return -1;
2274
2275 /* Update(): Process message. */
2276
2277 if (!cctx->aes.ccm.len_set) {
2278 /*-
2279 * In case message length was not previously set explicitly via
2280 * Update(), set it now.
2281 */
2282 ivec = EVP_CIPHER_CTX_iv_noconst(ctx);
2283 s390x_aes_ccm_setiv(cctx, ivec, len);
2284
2285 cctx->aes.ccm.len_set = 1;
2286 }
2287
2288 if (enc) {
2289 if (s390x_aes_ccm(cctx, in, out, len, enc))
2290 return -1;
2291
2292 cctx->aes.ccm.tag_set = 1;
2293 return len;
2294 } else {
2295 rv = -1;
2296
2297 if (!s390x_aes_ccm(cctx, in, out, len, enc)) {
2298 buf = EVP_CIPHER_CTX_buf_noconst(ctx);
2299 if (!CRYPTO_memcmp(cctx->aes.ccm.kmac_param.icv.b, buf,
2300 cctx->aes.ccm.m))
2301 rv = len;
2302 }
2303
2304 if (rv == -1)
2305 OPENSSL_cleanse(out, len);
2306
2307 cctx->aes.ccm.iv_set = 0;
2308 cctx->aes.ccm.tag_set = 0;
2309 cctx->aes.ccm.len_set = 0;
2310 return rv;
2311 }
2312 }
2313
2314 /*-
2315 * Performs various operations on the context structure depending on control
2316 * type. Returns 1 for success, 0 for failure and -1 for unknown control type.
2317 * Code is big-endian.
2318 */
s390x_aes_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)2319 static int s390x_aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2320 {
2321 S390X_AES_CCM_CTX *cctx = EVP_C_DATA(S390X_AES_CCM_CTX, c);
2322 unsigned char *buf, *iv;
2323 int enc, len;
2324
2325 switch (type) {
2326 case EVP_CTRL_INIT:
2327 cctx->aes.ccm.key_set = 0;
2328 cctx->aes.ccm.iv_set = 0;
2329 cctx->aes.ccm.l = 8;
2330 cctx->aes.ccm.m = 12;
2331 cctx->aes.ccm.tag_set = 0;
2332 cctx->aes.ccm.len_set = 0;
2333 cctx->aes.ccm.tls_aad_len = -1;
2334 return 1;
2335
2336 case EVP_CTRL_GET_IVLEN:
2337 *(int *)ptr = 15 - cctx->aes.ccm.l;
2338 return 1;
2339
2340 case EVP_CTRL_AEAD_TLS1_AAD:
2341 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2342 return 0;
2343
2344 /* Save the aad for later use. */
2345 buf = EVP_CIPHER_CTX_buf_noconst(c);
2346 memcpy(buf, ptr, arg);
2347 cctx->aes.ccm.tls_aad_len = arg;
2348
2349 len = buf[arg - 2] << 8 | buf[arg - 1];
2350 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
2351 return 0;
2352
2353 /* Correct length for explicit iv. */
2354 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
2355
2356 enc = EVP_CIPHER_CTX_encrypting(c);
2357 if (!enc) {
2358 if (len < cctx->aes.ccm.m)
2359 return 0;
2360
2361 /* Correct length for tag. */
2362 len -= cctx->aes.ccm.m;
2363 }
2364
2365 buf[arg - 2] = len >> 8;
2366 buf[arg - 1] = len & 0xff;
2367
2368 /* Extra padding: tag appended to record. */
2369 return cctx->aes.ccm.m;
2370
2371 case EVP_CTRL_CCM_SET_IV_FIXED:
2372 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
2373 return 0;
2374
2375 /* Copy to first part of the iv. */
2376 iv = EVP_CIPHER_CTX_iv_noconst(c);
2377 memcpy(iv, ptr, arg);
2378 return 1;
2379
2380 case EVP_CTRL_AEAD_SET_IVLEN:
2381 arg = 15 - arg;
2382 /* fall-through */
2383
2384 case EVP_CTRL_CCM_SET_L:
2385 if (arg < 2 || arg > 8)
2386 return 0;
2387
2388 cctx->aes.ccm.l = arg;
2389 return 1;
2390
2391 case EVP_CTRL_AEAD_SET_TAG:
2392 if ((arg & 1) || arg < 4 || arg > 16)
2393 return 0;
2394
2395 enc = EVP_CIPHER_CTX_encrypting(c);
2396 if (enc && ptr)
2397 return 0;
2398
2399 if (ptr) {
2400 cctx->aes.ccm.tag_set = 1;
2401 buf = EVP_CIPHER_CTX_buf_noconst(c);
2402 memcpy(buf, ptr, arg);
2403 }
2404
2405 cctx->aes.ccm.m = arg;
2406 return 1;
2407
2408 case EVP_CTRL_AEAD_GET_TAG:
2409 enc = EVP_CIPHER_CTX_encrypting(c);
2410 if (!enc || !cctx->aes.ccm.tag_set)
2411 return 0;
2412
2413 if(arg < cctx->aes.ccm.m)
2414 return 0;
2415
2416 memcpy(ptr, cctx->aes.ccm.kmac_param.icv.b, cctx->aes.ccm.m);
2417 cctx->aes.ccm.tag_set = 0;
2418 cctx->aes.ccm.iv_set = 0;
2419 cctx->aes.ccm.len_set = 0;
2420 return 1;
2421
2422 case EVP_CTRL_COPY:
2423 return 1;
2424
2425 default:
2426 return -1;
2427 }
2428 }
2429
2430 # define s390x_aes_ccm_cleanup aes_ccm_cleanup
2431
2432 # ifndef OPENSSL_NO_OCB
2433 # define S390X_AES_OCB_CTX EVP_AES_OCB_CTX
2434 # define S390X_aes_128_ocb_CAPABLE 0
2435 # define S390X_aes_192_ocb_CAPABLE 0
2436 # define S390X_aes_256_ocb_CAPABLE 0
2437
2438 # define s390x_aes_ocb_init_key aes_ocb_init_key
2439 static int s390x_aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2440 const unsigned char *iv, int enc);
2441 # define s390x_aes_ocb_cipher aes_ocb_cipher
2442 static int s390x_aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2443 const unsigned char *in, size_t len);
2444 # define s390x_aes_ocb_cleanup aes_ocb_cleanup
2445 static int s390x_aes_ocb_cleanup(EVP_CIPHER_CTX *);
2446 # define s390x_aes_ocb_ctrl aes_ocb_ctrl
2447 static int s390x_aes_ocb_ctrl(EVP_CIPHER_CTX *, int type, int arg, void *ptr);
2448 # endif
2449
2450 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode, \
2451 MODE,flags) \
2452 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2453 nid##_##keylen##_##nmode,blocksize, \
2454 keylen / 8, \
2455 ivlen, \
2456 flags | EVP_CIPH_##MODE##_MODE, \
2457 s390x_aes_##mode##_init_key, \
2458 s390x_aes_##mode##_cipher, \
2459 NULL, \
2460 sizeof(S390X_AES_##MODE##_CTX), \
2461 NULL, \
2462 NULL, \
2463 NULL, \
2464 NULL \
2465 }; \
2466 static const EVP_CIPHER aes_##keylen##_##mode = { \
2467 nid##_##keylen##_##nmode, \
2468 blocksize, \
2469 keylen / 8, \
2470 ivlen, \
2471 flags | EVP_CIPH_##MODE##_MODE, \
2472 aes_init_key, \
2473 aes_##mode##_cipher, \
2474 NULL, \
2475 sizeof(EVP_AES_KEY), \
2476 NULL, \
2477 NULL, \
2478 NULL, \
2479 NULL \
2480 }; \
2481 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2482 { \
2483 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2484 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2485 }
2486
2487 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags)\
2488 static const EVP_CIPHER s390x_aes_##keylen##_##mode = { \
2489 nid##_##keylen##_##mode, \
2490 blocksize, \
2491 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2492 ivlen, \
2493 flags | EVP_CIPH_##MODE##_MODE, \
2494 s390x_aes_##mode##_init_key, \
2495 s390x_aes_##mode##_cipher, \
2496 s390x_aes_##mode##_cleanup, \
2497 sizeof(S390X_AES_##MODE##_CTX), \
2498 NULL, \
2499 NULL, \
2500 s390x_aes_##mode##_ctrl, \
2501 NULL \
2502 }; \
2503 static const EVP_CIPHER aes_##keylen##_##mode = { \
2504 nid##_##keylen##_##mode,blocksize, \
2505 (EVP_CIPH_##MODE##_MODE == EVP_CIPH_XTS_MODE ? 2 : 1) * keylen / 8, \
2506 ivlen, \
2507 flags | EVP_CIPH_##MODE##_MODE, \
2508 aes_##mode##_init_key, \
2509 aes_##mode##_cipher, \
2510 aes_##mode##_cleanup, \
2511 sizeof(EVP_AES_##MODE##_CTX), \
2512 NULL, \
2513 NULL, \
2514 aes_##mode##_ctrl, \
2515 NULL \
2516 }; \
2517 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2518 { \
2519 return S390X_aes_##keylen##_##mode##_CAPABLE ? \
2520 &s390x_aes_##keylen##_##mode : &aes_##keylen##_##mode; \
2521 }
2522
2523 #else
2524
2525 # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
2526 static const EVP_CIPHER aes_##keylen##_##mode = { \
2527 nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
2528 flags|EVP_CIPH_##MODE##_MODE, \
2529 aes_init_key, \
2530 aes_##mode##_cipher, \
2531 NULL, \
2532 sizeof(EVP_AES_KEY), \
2533 NULL,NULL,NULL,NULL }; \
2534 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2535 { return &aes_##keylen##_##mode; }
2536
2537 # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
2538 static const EVP_CIPHER aes_##keylen##_##mode = { \
2539 nid##_##keylen##_##mode,blocksize, \
2540 (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
2541 flags|EVP_CIPH_##MODE##_MODE, \
2542 aes_##mode##_init_key, \
2543 aes_##mode##_cipher, \
2544 aes_##mode##_cleanup, \
2545 sizeof(EVP_AES_##MODE##_CTX), \
2546 NULL,NULL,aes_##mode##_ctrl,NULL }; \
2547 const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
2548 { return &aes_##keylen##_##mode; }
2549
2550 #endif
2551
2552 #if defined(OPENSSL_CPUID_OBJ) && (defined(__arm__) || defined(__arm) || defined(__aarch64__))
2553 # include "arm_arch.h"
2554 # if __ARM_MAX_ARCH__>=7
2555 # if defined(BSAES_ASM)
2556 # define BSAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2557 # endif
2558 # if defined(VPAES_ASM)
2559 # define VPAES_CAPABLE (OPENSSL_armcap_P & ARMV7_NEON)
2560 # endif
2561 # define HWAES_CAPABLE (OPENSSL_armcap_P & ARMV8_AES)
2562 # define HWAES_set_encrypt_key aes_v8_set_encrypt_key
2563 # define HWAES_set_decrypt_key aes_v8_set_decrypt_key
2564 # define HWAES_encrypt aes_v8_encrypt
2565 # define HWAES_decrypt aes_v8_decrypt
2566 # define HWAES_cbc_encrypt aes_v8_cbc_encrypt
2567 # define HWAES_ctr32_encrypt_blocks aes_v8_ctr32_encrypt_blocks
2568 # endif
2569 #endif
2570
2571 #if defined(HWAES_CAPABLE)
2572 int HWAES_set_encrypt_key(const unsigned char *userKey, const int bits,
2573 AES_KEY *key);
2574 int HWAES_set_decrypt_key(const unsigned char *userKey, const int bits,
2575 AES_KEY *key);
2576 void HWAES_encrypt(const unsigned char *in, unsigned char *out,
2577 const AES_KEY *key);
2578 void HWAES_decrypt(const unsigned char *in, unsigned char *out,
2579 const AES_KEY *key);
2580 void HWAES_cbc_encrypt(const unsigned char *in, unsigned char *out,
2581 size_t length, const AES_KEY *key,
2582 unsigned char *ivec, const int enc);
2583 void HWAES_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
2584 size_t len, const AES_KEY *key,
2585 const unsigned char ivec[16]);
2586 void HWAES_xts_encrypt(const unsigned char *inp, unsigned char *out,
2587 size_t len, const AES_KEY *key1,
2588 const AES_KEY *key2, const unsigned char iv[16]);
2589 void HWAES_xts_decrypt(const unsigned char *inp, unsigned char *out,
2590 size_t len, const AES_KEY *key1,
2591 const AES_KEY *key2, const unsigned char iv[16]);
2592 #endif
2593
2594 #define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
2595 BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2596 BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2597 BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2598 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
2599 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
2600 BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
2601 BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
2602
aes_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2603 static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2604 const unsigned char *iv, int enc)
2605 {
2606 int ret, mode;
2607 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2608
2609 mode = EVP_CIPHER_CTX_mode(ctx);
2610 if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
2611 && !enc) {
2612 #ifdef HWAES_CAPABLE
2613 if (HWAES_CAPABLE) {
2614 ret = HWAES_set_decrypt_key(key,
2615 EVP_CIPHER_CTX_key_length(ctx) * 8,
2616 &dat->ks.ks);
2617 dat->block = (block128_f) HWAES_decrypt;
2618 dat->stream.cbc = NULL;
2619 # ifdef HWAES_cbc_encrypt
2620 if (mode == EVP_CIPH_CBC_MODE)
2621 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2622 # endif
2623 } else
2624 #endif
2625 #ifdef BSAES_CAPABLE
2626 if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
2627 ret = AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2628 &dat->ks.ks);
2629 dat->block = (block128_f) AES_decrypt;
2630 dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
2631 } else
2632 #endif
2633 #ifdef VPAES_CAPABLE
2634 if (VPAES_CAPABLE) {
2635 ret = vpaes_set_decrypt_key(key,
2636 EVP_CIPHER_CTX_key_length(ctx) * 8,
2637 &dat->ks.ks);
2638 dat->block = (block128_f) vpaes_decrypt;
2639 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2640 (cbc128_f) vpaes_cbc_encrypt : NULL;
2641 } else
2642 #endif
2643 {
2644 ret = AES_set_decrypt_key(key,
2645 EVP_CIPHER_CTX_key_length(ctx) * 8,
2646 &dat->ks.ks);
2647 dat->block = (block128_f) AES_decrypt;
2648 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2649 (cbc128_f) AES_cbc_encrypt : NULL;
2650 }
2651 } else
2652 #ifdef HWAES_CAPABLE
2653 if (HWAES_CAPABLE) {
2654 ret = HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2655 &dat->ks.ks);
2656 dat->block = (block128_f) HWAES_encrypt;
2657 dat->stream.cbc = NULL;
2658 # ifdef HWAES_cbc_encrypt
2659 if (mode == EVP_CIPH_CBC_MODE)
2660 dat->stream.cbc = (cbc128_f) HWAES_cbc_encrypt;
2661 else
2662 # endif
2663 # ifdef HWAES_ctr32_encrypt_blocks
2664 if (mode == EVP_CIPH_CTR_MODE)
2665 dat->stream.ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
2666 else
2667 # endif
2668 (void)0; /* terminate potentially open 'else' */
2669 } else
2670 #endif
2671 #ifdef BSAES_CAPABLE
2672 if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
2673 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2674 &dat->ks.ks);
2675 dat->block = (block128_f) AES_encrypt;
2676 dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
2677 } else
2678 #endif
2679 #ifdef VPAES_CAPABLE
2680 if (VPAES_CAPABLE) {
2681 ret = vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2682 &dat->ks.ks);
2683 dat->block = (block128_f) vpaes_encrypt;
2684 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2685 (cbc128_f) vpaes_cbc_encrypt : NULL;
2686 } else
2687 #endif
2688 {
2689 ret = AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
2690 &dat->ks.ks);
2691 dat->block = (block128_f) AES_encrypt;
2692 dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
2693 (cbc128_f) AES_cbc_encrypt : NULL;
2694 #ifdef AES_CTR_ASM
2695 if (mode == EVP_CIPH_CTR_MODE)
2696 dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
2697 #endif
2698 }
2699
2700 if (ret < 0) {
2701 EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
2702 return 0;
2703 }
2704
2705 return 1;
2706 }
2707
aes_cbc_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2708 static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2709 const unsigned char *in, size_t len)
2710 {
2711 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2712
2713 if (dat->stream.cbc)
2714 (*dat->stream.cbc) (in, out, len, &dat->ks,
2715 EVP_CIPHER_CTX_iv_noconst(ctx),
2716 EVP_CIPHER_CTX_encrypting(ctx));
2717 else if (EVP_CIPHER_CTX_encrypting(ctx))
2718 CRYPTO_cbc128_encrypt(in, out, len, &dat->ks,
2719 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2720 else
2721 CRYPTO_cbc128_decrypt(in, out, len, &dat->ks,
2722 EVP_CIPHER_CTX_iv_noconst(ctx), dat->block);
2723
2724 return 1;
2725 }
2726
aes_ecb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2727 static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2728 const unsigned char *in, size_t len)
2729 {
2730 size_t bl = EVP_CIPHER_CTX_block_size(ctx);
2731 size_t i;
2732 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2733
2734 if (len < bl)
2735 return 1;
2736
2737 for (i = 0, len -= bl; i <= len; i += bl)
2738 (*dat->block) (in + i, out + i, &dat->ks);
2739
2740 return 1;
2741 }
2742
aes_ofb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2743 static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2744 const unsigned char *in, size_t len)
2745 {
2746 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2747
2748 int num = EVP_CIPHER_CTX_num(ctx);
2749 CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
2750 EVP_CIPHER_CTX_iv_noconst(ctx), &num, dat->block);
2751 EVP_CIPHER_CTX_set_num(ctx, num);
2752 return 1;
2753 }
2754
aes_cfb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2755 static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2756 const unsigned char *in, size_t len)
2757 {
2758 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2759
2760 int num = EVP_CIPHER_CTX_num(ctx);
2761 CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
2762 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2763 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2764 EVP_CIPHER_CTX_set_num(ctx, num);
2765 return 1;
2766 }
2767
aes_cfb8_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2768 static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2769 const unsigned char *in, size_t len)
2770 {
2771 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2772
2773 int num = EVP_CIPHER_CTX_num(ctx);
2774 CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
2775 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2776 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2777 EVP_CIPHER_CTX_set_num(ctx, num);
2778 return 1;
2779 }
2780
aes_cfb1_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2781 static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2782 const unsigned char *in, size_t len)
2783 {
2784 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2785
2786 if (EVP_CIPHER_CTX_test_flags(ctx, EVP_CIPH_FLAG_LENGTH_BITS)) {
2787 int num = EVP_CIPHER_CTX_num(ctx);
2788 CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
2789 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2790 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2791 EVP_CIPHER_CTX_set_num(ctx, num);
2792 return 1;
2793 }
2794
2795 while (len >= MAXBITCHUNK) {
2796 int num = EVP_CIPHER_CTX_num(ctx);
2797 CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
2798 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2799 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2800 EVP_CIPHER_CTX_set_num(ctx, num);
2801 len -= MAXBITCHUNK;
2802 out += MAXBITCHUNK;
2803 in += MAXBITCHUNK;
2804 }
2805 if (len) {
2806 int num = EVP_CIPHER_CTX_num(ctx);
2807 CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
2808 EVP_CIPHER_CTX_iv_noconst(ctx), &num,
2809 EVP_CIPHER_CTX_encrypting(ctx), dat->block);
2810 EVP_CIPHER_CTX_set_num(ctx, num);
2811 }
2812
2813 return 1;
2814 }
2815
aes_ctr_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)2816 static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
2817 const unsigned char *in, size_t len)
2818 {
2819 unsigned int num = EVP_CIPHER_CTX_num(ctx);
2820 EVP_AES_KEY *dat = EVP_C_DATA(EVP_AES_KEY,ctx);
2821
2822 if (dat->stream.ctr)
2823 CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
2824 EVP_CIPHER_CTX_iv_noconst(ctx),
2825 EVP_CIPHER_CTX_buf_noconst(ctx),
2826 &num, dat->stream.ctr);
2827 else
2828 CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
2829 EVP_CIPHER_CTX_iv_noconst(ctx),
2830 EVP_CIPHER_CTX_buf_noconst(ctx), &num,
2831 dat->block);
2832 EVP_CIPHER_CTX_set_num(ctx, num);
2833 return 1;
2834 }
2835
2836 BLOCK_CIPHER_generic_pack(NID_aes, 128, 0)
2837 BLOCK_CIPHER_generic_pack(NID_aes, 192, 0)
2838 BLOCK_CIPHER_generic_pack(NID_aes, 256, 0)
2839
aes_gcm_cleanup(EVP_CIPHER_CTX * c)2840 static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
2841 {
2842 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2843 if (gctx == NULL)
2844 return 0;
2845 OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
2846 if (gctx->iv != EVP_CIPHER_CTX_iv_noconst(c))
2847 OPENSSL_free(gctx->iv);
2848 return 1;
2849 }
2850
aes_gcm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)2851 static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
2852 {
2853 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,c);
2854 switch (type) {
2855 case EVP_CTRL_INIT:
2856 gctx->key_set = 0;
2857 gctx->iv_set = 0;
2858 gctx->ivlen = EVP_CIPHER_iv_length(c->cipher);
2859 gctx->iv = c->iv;
2860 gctx->taglen = -1;
2861 gctx->iv_gen = 0;
2862 gctx->tls_aad_len = -1;
2863 return 1;
2864
2865 case EVP_CTRL_GET_IVLEN:
2866 *(int *)ptr = gctx->ivlen;
2867 return 1;
2868
2869 case EVP_CTRL_AEAD_SET_IVLEN:
2870 if (arg <= 0)
2871 return 0;
2872 /* Allocate memory for IV if needed */
2873 if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
2874 if (gctx->iv != c->iv)
2875 OPENSSL_free(gctx->iv);
2876 if ((gctx->iv = OPENSSL_malloc(arg)) == NULL) {
2877 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2878 return 0;
2879 }
2880 }
2881 gctx->ivlen = arg;
2882 return 1;
2883
2884 case EVP_CTRL_AEAD_SET_TAG:
2885 if (arg <= 0 || arg > 16 || c->encrypt)
2886 return 0;
2887 memcpy(c->buf, ptr, arg);
2888 gctx->taglen = arg;
2889 return 1;
2890
2891 case EVP_CTRL_AEAD_GET_TAG:
2892 if (arg <= 0 || arg > 16 || !c->encrypt
2893 || gctx->taglen < 0)
2894 return 0;
2895 memcpy(ptr, c->buf, arg);
2896 return 1;
2897
2898 case EVP_CTRL_GCM_SET_IV_FIXED:
2899 /* Special case: -1 length restores whole IV */
2900 if (arg == -1) {
2901 memcpy(gctx->iv, ptr, gctx->ivlen);
2902 gctx->iv_gen = 1;
2903 return 1;
2904 }
2905 /*
2906 * Fixed field must be at least 4 bytes and invocation field at least
2907 * 8.
2908 */
2909 if ((arg < 4) || (gctx->ivlen - arg) < 8)
2910 return 0;
2911 if (arg)
2912 memcpy(gctx->iv, ptr, arg);
2913 if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
2914 return 0;
2915 gctx->iv_gen = 1;
2916 return 1;
2917
2918 case EVP_CTRL_GCM_IV_GEN:
2919 if (gctx->iv_gen == 0 || gctx->key_set == 0)
2920 return 0;
2921 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2922 if (arg <= 0 || arg > gctx->ivlen)
2923 arg = gctx->ivlen;
2924 memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
2925 /*
2926 * Invocation field will be at least 8 bytes in size and so no need
2927 * to check wrap around or increment more than last 8 bytes.
2928 */
2929 ctr64_inc(gctx->iv + gctx->ivlen - 8);
2930 gctx->iv_set = 1;
2931 return 1;
2932
2933 case EVP_CTRL_GCM_SET_IV_INV:
2934 if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
2935 return 0;
2936 memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
2937 CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
2938 gctx->iv_set = 1;
2939 return 1;
2940
2941 case EVP_CTRL_AEAD_TLS1_AAD:
2942 /* Save the AAD for later use */
2943 if (arg != EVP_AEAD_TLS1_AAD_LEN)
2944 return 0;
2945 memcpy(c->buf, ptr, arg);
2946 gctx->tls_aad_len = arg;
2947 {
2948 unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
2949 /* Correct length for explicit IV */
2950 if (len < EVP_GCM_TLS_EXPLICIT_IV_LEN)
2951 return 0;
2952 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
2953 /* If decrypting correct for tag too */
2954 if (!c->encrypt) {
2955 if (len < EVP_GCM_TLS_TAG_LEN)
2956 return 0;
2957 len -= EVP_GCM_TLS_TAG_LEN;
2958 }
2959 c->buf[arg - 2] = len >> 8;
2960 c->buf[arg - 1] = len & 0xff;
2961 }
2962 /* Extra padding: tag appended to record */
2963 return EVP_GCM_TLS_TAG_LEN;
2964
2965 case EVP_CTRL_COPY:
2966 {
2967 EVP_CIPHER_CTX *out = ptr;
2968 EVP_AES_GCM_CTX *gctx_out = EVP_C_DATA(EVP_AES_GCM_CTX,out);
2969 if (gctx->gcm.key) {
2970 if (gctx->gcm.key != &gctx->ks)
2971 return 0;
2972 gctx_out->gcm.key = &gctx_out->ks;
2973 }
2974 if (gctx->iv == c->iv)
2975 gctx_out->iv = out->iv;
2976 else {
2977 if ((gctx_out->iv = OPENSSL_malloc(gctx->ivlen)) == NULL) {
2978 EVPerr(EVP_F_AES_GCM_CTRL, ERR_R_MALLOC_FAILURE);
2979 return 0;
2980 }
2981 memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
2982 }
2983 return 1;
2984 }
2985
2986 default:
2987 return -1;
2988
2989 }
2990 }
2991
aes_gcm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)2992 static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
2993 const unsigned char *iv, int enc)
2994 {
2995 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
2996 if (!iv && !key)
2997 return 1;
2998 if (key) {
2999 do {
3000 #ifdef HWAES_CAPABLE
3001 if (HWAES_CAPABLE) {
3002 HWAES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3003 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3004 (block128_f) HWAES_encrypt);
3005 # ifdef HWAES_ctr32_encrypt_blocks
3006 gctx->ctr = (ctr128_f) HWAES_ctr32_encrypt_blocks;
3007 # else
3008 gctx->ctr = NULL;
3009 # endif
3010 break;
3011 } else
3012 #endif
3013 #ifdef BSAES_CAPABLE
3014 if (BSAES_CAPABLE) {
3015 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3016 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3017 (block128_f) AES_encrypt);
3018 gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
3019 break;
3020 } else
3021 #endif
3022 #ifdef VPAES_CAPABLE
3023 if (VPAES_CAPABLE) {
3024 vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3025 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3026 (block128_f) vpaes_encrypt);
3027 gctx->ctr = NULL;
3028 break;
3029 } else
3030 #endif
3031 (void)0; /* terminate potentially open 'else' */
3032
3033 AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks.ks);
3034 CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
3035 (block128_f) AES_encrypt);
3036 #ifdef AES_CTR_ASM
3037 gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
3038 #else
3039 gctx->ctr = NULL;
3040 #endif
3041 } while (0);
3042
3043 /*
3044 * If we have an iv can set it directly, otherwise use saved IV.
3045 */
3046 if (iv == NULL && gctx->iv_set)
3047 iv = gctx->iv;
3048 if (iv) {
3049 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3050 gctx->iv_set = 1;
3051 }
3052 gctx->key_set = 1;
3053 } else {
3054 /* If key set use IV, otherwise copy */
3055 if (gctx->key_set)
3056 CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
3057 else
3058 memcpy(gctx->iv, iv, gctx->ivlen);
3059 gctx->iv_set = 1;
3060 gctx->iv_gen = 0;
3061 }
3062 return 1;
3063 }
3064
3065 /*
3066 * Handle TLS GCM packet format. This consists of the last portion of the IV
3067 * followed by the payload and finally the tag. On encrypt generate IV,
3068 * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
3069 * and verify tag.
3070 */
3071
aes_gcm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3072 static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3073 const unsigned char *in, size_t len)
3074 {
3075 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3076 int rv = -1;
3077 /* Encrypt/decrypt must be performed in place */
3078 if (out != in
3079 || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
3080 return -1;
3081 /*
3082 * Set IV from start of buffer or generate IV and write to start of
3083 * buffer.
3084 */
3085 if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ? EVP_CTRL_GCM_IV_GEN
3086 : EVP_CTRL_GCM_SET_IV_INV,
3087 EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
3088 goto err;
3089 /* Use saved AAD */
3090 if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
3091 goto err;
3092 /* Fix buffer and length to point to payload */
3093 in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3094 out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
3095 len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3096 if (ctx->encrypt) {
3097 /* Encrypt payload */
3098 if (gctx->ctr) {
3099 size_t bulk = 0;
3100 #if defined(AES_GCM_ASM)
3101 if (len >= 32 && AES_GCM_ASM(gctx)) {
3102 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3103 return -1;
3104
3105 bulk = AES_gcm_encrypt(in, out, len,
3106 gctx->gcm.key,
3107 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3108 gctx->gcm.len.u[1] += bulk;
3109 }
3110 #endif
3111 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3112 in + bulk,
3113 out + bulk,
3114 len - bulk, gctx->ctr))
3115 goto err;
3116 } else {
3117 size_t bulk = 0;
3118 #if defined(AES_GCM_ASM2)
3119 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3120 if (CRYPTO_gcm128_encrypt(&gctx->gcm, NULL, NULL, 0))
3121 return -1;
3122
3123 bulk = AES_gcm_encrypt(in, out, len,
3124 gctx->gcm.key,
3125 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3126 gctx->gcm.len.u[1] += bulk;
3127 }
3128 #endif
3129 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3130 in + bulk, out + bulk, len - bulk))
3131 goto err;
3132 }
3133 out += len;
3134 /* Finally write tag */
3135 CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
3136 rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
3137 } else {
3138 /* Decrypt */
3139 if (gctx->ctr) {
3140 size_t bulk = 0;
3141 #if defined(AES_GCM_ASM)
3142 if (len >= 16 && AES_GCM_ASM(gctx)) {
3143 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3144 return -1;
3145
3146 bulk = AES_gcm_decrypt(in, out, len,
3147 gctx->gcm.key,
3148 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3149 gctx->gcm.len.u[1] += bulk;
3150 }
3151 #endif
3152 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3153 in + bulk,
3154 out + bulk,
3155 len - bulk, gctx->ctr))
3156 goto err;
3157 } else {
3158 size_t bulk = 0;
3159 #if defined(AES_GCM_ASM2)
3160 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3161 if (CRYPTO_gcm128_decrypt(&gctx->gcm, NULL, NULL, 0))
3162 return -1;
3163
3164 bulk = AES_gcm_decrypt(in, out, len,
3165 gctx->gcm.key,
3166 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3167 gctx->gcm.len.u[1] += bulk;
3168 }
3169 #endif
3170 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3171 in + bulk, out + bulk, len - bulk))
3172 goto err;
3173 }
3174 /* Retrieve tag */
3175 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
3176 /* If tag mismatch wipe buffer */
3177 if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
3178 OPENSSL_cleanse(out, len);
3179 goto err;
3180 }
3181 rv = len;
3182 }
3183
3184 err:
3185 gctx->iv_set = 0;
3186 gctx->tls_aad_len = -1;
3187 return rv;
3188 }
3189
aes_gcm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3190 static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3191 const unsigned char *in, size_t len)
3192 {
3193 EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
3194 /* If not set up, return error */
3195 if (!gctx->key_set)
3196 return -1;
3197
3198 if (gctx->tls_aad_len >= 0)
3199 return aes_gcm_tls_cipher(ctx, out, in, len);
3200
3201 if (!gctx->iv_set)
3202 return -1;
3203 if (in) {
3204 if (out == NULL) {
3205 if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
3206 return -1;
3207 } else if (ctx->encrypt) {
3208 if (gctx->ctr) {
3209 size_t bulk = 0;
3210 #if defined(AES_GCM_ASM)
3211 if (len >= 32 && AES_GCM_ASM(gctx)) {
3212 size_t res = (16 - gctx->gcm.mres) % 16;
3213
3214 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3215 return -1;
3216
3217 bulk = AES_gcm_encrypt(in + res,
3218 out + res, len - res,
3219 gctx->gcm.key, gctx->gcm.Yi.c,
3220 gctx->gcm.Xi.u);
3221 gctx->gcm.len.u[1] += bulk;
3222 bulk += res;
3223 }
3224 #endif
3225 if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
3226 in + bulk,
3227 out + bulk,
3228 len - bulk, gctx->ctr))
3229 return -1;
3230 } else {
3231 size_t bulk = 0;
3232 #if defined(AES_GCM_ASM2)
3233 if (len >= 32 && AES_GCM_ASM2(gctx)) {
3234 size_t res = (16 - gctx->gcm.mres) % 16;
3235
3236 if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, res))
3237 return -1;
3238
3239 bulk = AES_gcm_encrypt(in + res,
3240 out + res, len - res,
3241 gctx->gcm.key, gctx->gcm.Yi.c,
3242 gctx->gcm.Xi.u);
3243 gctx->gcm.len.u[1] += bulk;
3244 bulk += res;
3245 }
3246 #endif
3247 if (CRYPTO_gcm128_encrypt(&gctx->gcm,
3248 in + bulk, out + bulk, len - bulk))
3249 return -1;
3250 }
3251 } else {
3252 if (gctx->ctr) {
3253 size_t bulk = 0;
3254 #if defined(AES_GCM_ASM)
3255 if (len >= 16 && AES_GCM_ASM(gctx)) {
3256 size_t res = (16 - gctx->gcm.mres) % 16;
3257
3258 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3259 return -1;
3260
3261 bulk = AES_gcm_decrypt(in + res,
3262 out + res, len - res,
3263 gctx->gcm.key,
3264 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3265 gctx->gcm.len.u[1] += bulk;
3266 bulk += res;
3267 }
3268 #endif
3269 if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
3270 in + bulk,
3271 out + bulk,
3272 len - bulk, gctx->ctr))
3273 return -1;
3274 } else {
3275 size_t bulk = 0;
3276 #if defined(AES_GCM_ASM2)
3277 if (len >= 16 && AES_GCM_ASM2(gctx)) {
3278 size_t res = (16 - gctx->gcm.mres) % 16;
3279
3280 if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, res))
3281 return -1;
3282
3283 bulk = AES_gcm_decrypt(in + res,
3284 out + res, len - res,
3285 gctx->gcm.key,
3286 gctx->gcm.Yi.c, gctx->gcm.Xi.u);
3287 gctx->gcm.len.u[1] += bulk;
3288 bulk += res;
3289 }
3290 #endif
3291 if (CRYPTO_gcm128_decrypt(&gctx->gcm,
3292 in + bulk, out + bulk, len - bulk))
3293 return -1;
3294 }
3295 }
3296 return len;
3297 } else {
3298 if (!ctx->encrypt) {
3299 if (gctx->taglen < 0)
3300 return -1;
3301 if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
3302 return -1;
3303 gctx->iv_set = 0;
3304 return 0;
3305 }
3306 CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
3307 gctx->taglen = 16;
3308 /* Don't reuse the IV */
3309 gctx->iv_set = 0;
3310 return 0;
3311 }
3312
3313 }
3314
3315 #define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
3316 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3317 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3318 | EVP_CIPH_CUSTOM_COPY | EVP_CIPH_CUSTOM_IV_LENGTH)
3319
3320 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
3321 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3322 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
3323 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3324 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
3325 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3326
aes_xts_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3327 static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3328 {
3329 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX, c);
3330
3331 if (type == EVP_CTRL_COPY) {
3332 EVP_CIPHER_CTX *out = ptr;
3333 EVP_AES_XTS_CTX *xctx_out = EVP_C_DATA(EVP_AES_XTS_CTX,out);
3334
3335 if (xctx->xts.key1) {
3336 if (xctx->xts.key1 != &xctx->ks1)
3337 return 0;
3338 xctx_out->xts.key1 = &xctx_out->ks1;
3339 }
3340 if (xctx->xts.key2) {
3341 if (xctx->xts.key2 != &xctx->ks2)
3342 return 0;
3343 xctx_out->xts.key2 = &xctx_out->ks2;
3344 }
3345 return 1;
3346 } else if (type != EVP_CTRL_INIT)
3347 return -1;
3348 /* key1 and key2 are used as an indicator both key and IV are set */
3349 xctx->xts.key1 = NULL;
3350 xctx->xts.key2 = NULL;
3351 return 1;
3352 }
3353
aes_xts_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3354 static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3355 const unsigned char *iv, int enc)
3356 {
3357 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3358
3359 if (!iv && !key)
3360 return 1;
3361
3362 if (key)
3363 do {
3364 /* The key is two half length keys in reality */
3365 const int bytes = EVP_CIPHER_CTX_key_length(ctx) / 2;
3366
3367 /*
3368 * Verify that the two keys are different.
3369 *
3370 * This addresses the vulnerability described in Rogaway's
3371 * September 2004 paper:
3372 *
3373 * "Efficient Instantiations of Tweakable Blockciphers and
3374 * Refinements to Modes OCB and PMAC".
3375 * (http://web.cs.ucdavis.edu/~rogaway/papers/offsets.pdf)
3376 *
3377 * FIPS 140-2 IG A.9 XTS-AES Key Generation Requirements states
3378 * that:
3379 * "The check for Key_1 != Key_2 shall be done at any place
3380 * BEFORE using the keys in the XTS-AES algorithm to process
3381 * data with them."
3382 */
3383 if (enc && CRYPTO_memcmp(key, key + bytes, bytes) == 0) {
3384 EVPerr(EVP_F_AES_XTS_INIT_KEY, EVP_R_XTS_DUPLICATED_KEYS);
3385 return 0;
3386 }
3387
3388 #ifdef AES_XTS_ASM
3389 xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
3390 #else
3391 xctx->stream = NULL;
3392 #endif
3393 /* key_len is two AES keys */
3394 #ifdef HWAES_CAPABLE
3395 if (HWAES_CAPABLE) {
3396 if (enc) {
3397 HWAES_set_encrypt_key(key,
3398 EVP_CIPHER_CTX_key_length(ctx) * 4,
3399 &xctx->ks1.ks);
3400 xctx->xts.block1 = (block128_f) HWAES_encrypt;
3401 # ifdef HWAES_xts_encrypt
3402 xctx->stream = HWAES_xts_encrypt;
3403 # endif
3404 } else {
3405 HWAES_set_decrypt_key(key,
3406 EVP_CIPHER_CTX_key_length(ctx) * 4,
3407 &xctx->ks1.ks);
3408 xctx->xts.block1 = (block128_f) HWAES_decrypt;
3409 # ifdef HWAES_xts_decrypt
3410 xctx->stream = HWAES_xts_decrypt;
3411 #endif
3412 }
3413
3414 HWAES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3415 EVP_CIPHER_CTX_key_length(ctx) * 4,
3416 &xctx->ks2.ks);
3417 xctx->xts.block2 = (block128_f) HWAES_encrypt;
3418
3419 xctx->xts.key1 = &xctx->ks1;
3420 break;
3421 } else
3422 #endif
3423 #ifdef BSAES_CAPABLE
3424 if (BSAES_CAPABLE)
3425 xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
3426 else
3427 #endif
3428 #ifdef VPAES_CAPABLE
3429 if (VPAES_CAPABLE) {
3430 if (enc) {
3431 vpaes_set_encrypt_key(key,
3432 EVP_CIPHER_CTX_key_length(ctx) * 4,
3433 &xctx->ks1.ks);
3434 xctx->xts.block1 = (block128_f) vpaes_encrypt;
3435 } else {
3436 vpaes_set_decrypt_key(key,
3437 EVP_CIPHER_CTX_key_length(ctx) * 4,
3438 &xctx->ks1.ks);
3439 xctx->xts.block1 = (block128_f) vpaes_decrypt;
3440 }
3441
3442 vpaes_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3443 EVP_CIPHER_CTX_key_length(ctx) * 4,
3444 &xctx->ks2.ks);
3445 xctx->xts.block2 = (block128_f) vpaes_encrypt;
3446
3447 xctx->xts.key1 = &xctx->ks1;
3448 break;
3449 } else
3450 #endif
3451 (void)0; /* terminate potentially open 'else' */
3452
3453 if (enc) {
3454 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3455 &xctx->ks1.ks);
3456 xctx->xts.block1 = (block128_f) AES_encrypt;
3457 } else {
3458 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 4,
3459 &xctx->ks1.ks);
3460 xctx->xts.block1 = (block128_f) AES_decrypt;
3461 }
3462
3463 AES_set_encrypt_key(key + EVP_CIPHER_CTX_key_length(ctx) / 2,
3464 EVP_CIPHER_CTX_key_length(ctx) * 4,
3465 &xctx->ks2.ks);
3466 xctx->xts.block2 = (block128_f) AES_encrypt;
3467
3468 xctx->xts.key1 = &xctx->ks1;
3469 } while (0);
3470
3471 if (iv) {
3472 xctx->xts.key2 = &xctx->ks2;
3473 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 16);
3474 }
3475
3476 return 1;
3477 }
3478
aes_xts_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3479 static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3480 const unsigned char *in, size_t len)
3481 {
3482 EVP_AES_XTS_CTX *xctx = EVP_C_DATA(EVP_AES_XTS_CTX,ctx);
3483 if (!xctx->xts.key1 || !xctx->xts.key2)
3484 return 0;
3485 if (!out || !in || len < AES_BLOCK_SIZE)
3486 return 0;
3487 if (xctx->stream)
3488 (*xctx->stream) (in, out, len,
3489 xctx->xts.key1, xctx->xts.key2,
3490 EVP_CIPHER_CTX_iv_noconst(ctx));
3491 else if (CRYPTO_xts128_encrypt(&xctx->xts, EVP_CIPHER_CTX_iv_noconst(ctx),
3492 in, out, len,
3493 EVP_CIPHER_CTX_encrypting(ctx)))
3494 return 0;
3495 return 1;
3496 }
3497
3498 #define aes_xts_cleanup NULL
3499
3500 #define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
3501 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
3502 | EVP_CIPH_CUSTOM_COPY)
3503
3504 BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS, XTS_FLAGS)
3505 BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS, XTS_FLAGS)
3506
aes_ccm_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3507 static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3508 {
3509 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,c);
3510 switch (type) {
3511 case EVP_CTRL_INIT:
3512 cctx->key_set = 0;
3513 cctx->iv_set = 0;
3514 cctx->L = 8;
3515 cctx->M = 12;
3516 cctx->tag_set = 0;
3517 cctx->len_set = 0;
3518 cctx->tls_aad_len = -1;
3519 return 1;
3520 case EVP_CTRL_GET_IVLEN:
3521 *(int *)ptr = 15 - cctx->L;
3522 return 1;
3523 case EVP_CTRL_AEAD_TLS1_AAD:
3524 /* Save the AAD for later use */
3525 if (arg != EVP_AEAD_TLS1_AAD_LEN)
3526 return 0;
3527 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3528 cctx->tls_aad_len = arg;
3529 {
3530 uint16_t len =
3531 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] << 8
3532 | EVP_CIPHER_CTX_buf_noconst(c)[arg - 1];
3533 /* Correct length for explicit IV */
3534 if (len < EVP_CCM_TLS_EXPLICIT_IV_LEN)
3535 return 0;
3536 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN;
3537 /* If decrypting correct for tag too */
3538 if (!EVP_CIPHER_CTX_encrypting(c)) {
3539 if (len < cctx->M)
3540 return 0;
3541 len -= cctx->M;
3542 }
3543 EVP_CIPHER_CTX_buf_noconst(c)[arg - 2] = len >> 8;
3544 EVP_CIPHER_CTX_buf_noconst(c)[arg - 1] = len & 0xff;
3545 }
3546 /* Extra padding: tag appended to record */
3547 return cctx->M;
3548
3549 case EVP_CTRL_CCM_SET_IV_FIXED:
3550 /* Sanity check length */
3551 if (arg != EVP_CCM_TLS_FIXED_IV_LEN)
3552 return 0;
3553 /* Just copy to first part of IV */
3554 memcpy(EVP_CIPHER_CTX_iv_noconst(c), ptr, arg);
3555 return 1;
3556
3557 case EVP_CTRL_AEAD_SET_IVLEN:
3558 arg = 15 - arg;
3559 /* fall thru */
3560 case EVP_CTRL_CCM_SET_L:
3561 if (arg < 2 || arg > 8)
3562 return 0;
3563 cctx->L = arg;
3564 return 1;
3565
3566 case EVP_CTRL_AEAD_SET_TAG:
3567 if ((arg & 1) || arg < 4 || arg > 16)
3568 return 0;
3569 if (EVP_CIPHER_CTX_encrypting(c) && ptr)
3570 return 0;
3571 if (ptr) {
3572 cctx->tag_set = 1;
3573 memcpy(EVP_CIPHER_CTX_buf_noconst(c), ptr, arg);
3574 }
3575 cctx->M = arg;
3576 return 1;
3577
3578 case EVP_CTRL_AEAD_GET_TAG:
3579 if (!EVP_CIPHER_CTX_encrypting(c) || !cctx->tag_set)
3580 return 0;
3581 if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
3582 return 0;
3583 cctx->tag_set = 0;
3584 cctx->iv_set = 0;
3585 cctx->len_set = 0;
3586 return 1;
3587
3588 case EVP_CTRL_COPY:
3589 {
3590 EVP_CIPHER_CTX *out = ptr;
3591 EVP_AES_CCM_CTX *cctx_out = EVP_C_DATA(EVP_AES_CCM_CTX,out);
3592 if (cctx->ccm.key) {
3593 if (cctx->ccm.key != &cctx->ks)
3594 return 0;
3595 cctx_out->ccm.key = &cctx_out->ks;
3596 }
3597 return 1;
3598 }
3599
3600 default:
3601 return -1;
3602
3603 }
3604 }
3605
aes_ccm_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3606 static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3607 const unsigned char *iv, int enc)
3608 {
3609 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3610 if (!iv && !key)
3611 return 1;
3612 if (key)
3613 do {
3614 #ifdef HWAES_CAPABLE
3615 if (HWAES_CAPABLE) {
3616 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3617 &cctx->ks.ks);
3618
3619 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3620 &cctx->ks, (block128_f) HWAES_encrypt);
3621 cctx->str = NULL;
3622 cctx->key_set = 1;
3623 break;
3624 } else
3625 #endif
3626 #ifdef VPAES_CAPABLE
3627 if (VPAES_CAPABLE) {
3628 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3629 &cctx->ks.ks);
3630 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3631 &cctx->ks, (block128_f) vpaes_encrypt);
3632 cctx->str = NULL;
3633 cctx->key_set = 1;
3634 break;
3635 }
3636 #endif
3637 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3638 &cctx->ks.ks);
3639 CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
3640 &cctx->ks, (block128_f) AES_encrypt);
3641 cctx->str = NULL;
3642 cctx->key_set = 1;
3643 } while (0);
3644 if (iv) {
3645 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, 15 - cctx->L);
3646 cctx->iv_set = 1;
3647 }
3648 return 1;
3649 }
3650
aes_ccm_tls_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3651 static int aes_ccm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3652 const unsigned char *in, size_t len)
3653 {
3654 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3655 CCM128_CONTEXT *ccm = &cctx->ccm;
3656 /* Encrypt/decrypt must be performed in place */
3657 if (out != in || len < (EVP_CCM_TLS_EXPLICIT_IV_LEN + (size_t)cctx->M))
3658 return -1;
3659 /* If encrypting set explicit IV from sequence number (start of AAD) */
3660 if (EVP_CIPHER_CTX_encrypting(ctx))
3661 memcpy(out, EVP_CIPHER_CTX_buf_noconst(ctx),
3662 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3663 /* Get rest of IV from explicit IV */
3664 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx) + EVP_CCM_TLS_FIXED_IV_LEN, in,
3665 EVP_CCM_TLS_EXPLICIT_IV_LEN);
3666 /* Correct length value */
3667 len -= EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3668 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx), 15 - cctx->L,
3669 len))
3670 return -1;
3671 /* Use saved AAD */
3672 CRYPTO_ccm128_aad(ccm, EVP_CIPHER_CTX_buf_noconst(ctx), cctx->tls_aad_len);
3673 /* Fix buffer to point to payload */
3674 in += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3675 out += EVP_CCM_TLS_EXPLICIT_IV_LEN;
3676 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3677 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3678 cctx->str) :
3679 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3680 return -1;
3681 if (!CRYPTO_ccm128_tag(ccm, out + len, cctx->M))
3682 return -1;
3683 return len + EVP_CCM_TLS_EXPLICIT_IV_LEN + cctx->M;
3684 } else {
3685 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3686 cctx->str) :
3687 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3688 unsigned char tag[16];
3689 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3690 if (!CRYPTO_memcmp(tag, in + len, cctx->M))
3691 return len;
3692 }
3693 }
3694 OPENSSL_cleanse(out, len);
3695 return -1;
3696 }
3697 }
3698
aes_ccm_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)3699 static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3700 const unsigned char *in, size_t len)
3701 {
3702 EVP_AES_CCM_CTX *cctx = EVP_C_DATA(EVP_AES_CCM_CTX,ctx);
3703 CCM128_CONTEXT *ccm = &cctx->ccm;
3704 /* If not set up, return error */
3705 if (!cctx->key_set)
3706 return -1;
3707
3708 if (cctx->tls_aad_len >= 0)
3709 return aes_ccm_tls_cipher(ctx, out, in, len);
3710
3711 /* EVP_*Final() doesn't return any data */
3712 if (in == NULL && out != NULL)
3713 return 0;
3714
3715 if (!cctx->iv_set)
3716 return -1;
3717
3718 if (!out) {
3719 if (!in) {
3720 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3721 15 - cctx->L, len))
3722 return -1;
3723 cctx->len_set = 1;
3724 return len;
3725 }
3726 /* If have AAD need message length */
3727 if (!cctx->len_set && len)
3728 return -1;
3729 CRYPTO_ccm128_aad(ccm, in, len);
3730 return len;
3731 }
3732
3733 /* The tag must be set before actually decrypting data */
3734 if (!EVP_CIPHER_CTX_encrypting(ctx) && !cctx->tag_set)
3735 return -1;
3736
3737 /* If not set length yet do it */
3738 if (!cctx->len_set) {
3739 if (CRYPTO_ccm128_setiv(ccm, EVP_CIPHER_CTX_iv_noconst(ctx),
3740 15 - cctx->L, len))
3741 return -1;
3742 cctx->len_set = 1;
3743 }
3744 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3745 if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
3746 cctx->str) :
3747 CRYPTO_ccm128_encrypt(ccm, in, out, len))
3748 return -1;
3749 cctx->tag_set = 1;
3750 return len;
3751 } else {
3752 int rv = -1;
3753 if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
3754 cctx->str) :
3755 !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
3756 unsigned char tag[16];
3757 if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
3758 if (!CRYPTO_memcmp(tag, EVP_CIPHER_CTX_buf_noconst(ctx),
3759 cctx->M))
3760 rv = len;
3761 }
3762 }
3763 if (rv == -1)
3764 OPENSSL_cleanse(out, len);
3765 cctx->iv_set = 0;
3766 cctx->tag_set = 0;
3767 cctx->len_set = 0;
3768 return rv;
3769 }
3770 }
3771
3772 #define aes_ccm_cleanup NULL
3773
3774 BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
3775 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3776 BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
3777 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3778 BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
3779 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
3780
3781 typedef struct {
3782 union {
3783 double align;
3784 AES_KEY ks;
3785 } ks;
3786 /* Indicates if IV has been set */
3787 unsigned char *iv;
3788 } EVP_AES_WRAP_CTX;
3789
aes_wrap_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)3790 static int aes_wrap_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
3791 const unsigned char *iv, int enc)
3792 {
3793 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3794 if (!iv && !key)
3795 return 1;
3796 if (key) {
3797 if (EVP_CIPHER_CTX_encrypting(ctx))
3798 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3799 &wctx->ks.ks);
3800 else
3801 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
3802 &wctx->ks.ks);
3803 if (!iv)
3804 wctx->iv = NULL;
3805 }
3806 if (iv) {
3807 memcpy(EVP_CIPHER_CTX_iv_noconst(ctx), iv, EVP_CIPHER_CTX_iv_length(ctx));
3808 wctx->iv = EVP_CIPHER_CTX_iv_noconst(ctx);
3809 }
3810 return 1;
3811 }
3812
aes_wrap_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t inlen)3813 static int aes_wrap_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
3814 const unsigned char *in, size_t inlen)
3815 {
3816 EVP_AES_WRAP_CTX *wctx = EVP_C_DATA(EVP_AES_WRAP_CTX,ctx);
3817 size_t rv;
3818 /* AES wrap with padding has IV length of 4, without padding 8 */
3819 int pad = EVP_CIPHER_CTX_iv_length(ctx) == 4;
3820 /* No final operation so always return zero length */
3821 if (!in)
3822 return 0;
3823 /* Input length must always be non-zero */
3824 if (!inlen)
3825 return -1;
3826 /* If decrypting need at least 16 bytes and multiple of 8 */
3827 if (!EVP_CIPHER_CTX_encrypting(ctx) && (inlen < 16 || inlen & 0x7))
3828 return -1;
3829 /* If not padding input must be multiple of 8 */
3830 if (!pad && inlen & 0x7)
3831 return -1;
3832 if (is_partially_overlapping(out, in, inlen)) {
3833 EVPerr(EVP_F_AES_WRAP_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
3834 return 0;
3835 }
3836 if (!out) {
3837 if (EVP_CIPHER_CTX_encrypting(ctx)) {
3838 /* If padding round up to multiple of 8 */
3839 if (pad)
3840 inlen = (inlen + 7) / 8 * 8;
3841 /* 8 byte prefix */
3842 return inlen + 8;
3843 } else {
3844 /*
3845 * If not padding output will be exactly 8 bytes smaller than
3846 * input. If padding it will be at least 8 bytes smaller but we
3847 * don't know how much.
3848 */
3849 return inlen - 8;
3850 }
3851 }
3852 if (pad) {
3853 if (EVP_CIPHER_CTX_encrypting(ctx))
3854 rv = CRYPTO_128_wrap_pad(&wctx->ks.ks, wctx->iv,
3855 out, in, inlen,
3856 (block128_f) AES_encrypt);
3857 else
3858 rv = CRYPTO_128_unwrap_pad(&wctx->ks.ks, wctx->iv,
3859 out, in, inlen,
3860 (block128_f) AES_decrypt);
3861 } else {
3862 if (EVP_CIPHER_CTX_encrypting(ctx))
3863 rv = CRYPTO_128_wrap(&wctx->ks.ks, wctx->iv,
3864 out, in, inlen, (block128_f) AES_encrypt);
3865 else
3866 rv = CRYPTO_128_unwrap(&wctx->ks.ks, wctx->iv,
3867 out, in, inlen, (block128_f) AES_decrypt);
3868 }
3869 return rv ? (int)rv : -1;
3870 }
3871
3872 #define WRAP_FLAGS (EVP_CIPH_WRAP_MODE \
3873 | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
3874 | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_FLAG_DEFAULT_ASN1)
3875
3876 static const EVP_CIPHER aes_128_wrap = {
3877 NID_id_aes128_wrap,
3878 8, 16, 8, WRAP_FLAGS,
3879 aes_wrap_init_key, aes_wrap_cipher,
3880 NULL,
3881 sizeof(EVP_AES_WRAP_CTX),
3882 NULL, NULL, NULL, NULL
3883 };
3884
EVP_aes_128_wrap(void)3885 const EVP_CIPHER *EVP_aes_128_wrap(void)
3886 {
3887 return &aes_128_wrap;
3888 }
3889
3890 static const EVP_CIPHER aes_192_wrap = {
3891 NID_id_aes192_wrap,
3892 8, 24, 8, WRAP_FLAGS,
3893 aes_wrap_init_key, aes_wrap_cipher,
3894 NULL,
3895 sizeof(EVP_AES_WRAP_CTX),
3896 NULL, NULL, NULL, NULL
3897 };
3898
EVP_aes_192_wrap(void)3899 const EVP_CIPHER *EVP_aes_192_wrap(void)
3900 {
3901 return &aes_192_wrap;
3902 }
3903
3904 static const EVP_CIPHER aes_256_wrap = {
3905 NID_id_aes256_wrap,
3906 8, 32, 8, WRAP_FLAGS,
3907 aes_wrap_init_key, aes_wrap_cipher,
3908 NULL,
3909 sizeof(EVP_AES_WRAP_CTX),
3910 NULL, NULL, NULL, NULL
3911 };
3912
EVP_aes_256_wrap(void)3913 const EVP_CIPHER *EVP_aes_256_wrap(void)
3914 {
3915 return &aes_256_wrap;
3916 }
3917
3918 static const EVP_CIPHER aes_128_wrap_pad = {
3919 NID_id_aes128_wrap_pad,
3920 8, 16, 4, WRAP_FLAGS,
3921 aes_wrap_init_key, aes_wrap_cipher,
3922 NULL,
3923 sizeof(EVP_AES_WRAP_CTX),
3924 NULL, NULL, NULL, NULL
3925 };
3926
EVP_aes_128_wrap_pad(void)3927 const EVP_CIPHER *EVP_aes_128_wrap_pad(void)
3928 {
3929 return &aes_128_wrap_pad;
3930 }
3931
3932 static const EVP_CIPHER aes_192_wrap_pad = {
3933 NID_id_aes192_wrap_pad,
3934 8, 24, 4, WRAP_FLAGS,
3935 aes_wrap_init_key, aes_wrap_cipher,
3936 NULL,
3937 sizeof(EVP_AES_WRAP_CTX),
3938 NULL, NULL, NULL, NULL
3939 };
3940
EVP_aes_192_wrap_pad(void)3941 const EVP_CIPHER *EVP_aes_192_wrap_pad(void)
3942 {
3943 return &aes_192_wrap_pad;
3944 }
3945
3946 static const EVP_CIPHER aes_256_wrap_pad = {
3947 NID_id_aes256_wrap_pad,
3948 8, 32, 4, WRAP_FLAGS,
3949 aes_wrap_init_key, aes_wrap_cipher,
3950 NULL,
3951 sizeof(EVP_AES_WRAP_CTX),
3952 NULL, NULL, NULL, NULL
3953 };
3954
EVP_aes_256_wrap_pad(void)3955 const EVP_CIPHER *EVP_aes_256_wrap_pad(void)
3956 {
3957 return &aes_256_wrap_pad;
3958 }
3959
3960 #ifndef OPENSSL_NO_OCB
aes_ocb_ctrl(EVP_CIPHER_CTX * c,int type,int arg,void * ptr)3961 static int aes_ocb_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
3962 {
3963 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
3964 EVP_CIPHER_CTX *newc;
3965 EVP_AES_OCB_CTX *new_octx;
3966
3967 switch (type) {
3968 case EVP_CTRL_INIT:
3969 octx->key_set = 0;
3970 octx->iv_set = 0;
3971 octx->ivlen = EVP_CIPHER_iv_length(c->cipher);
3972 octx->iv = EVP_CIPHER_CTX_iv_noconst(c);
3973 octx->taglen = 16;
3974 octx->data_buf_len = 0;
3975 octx->aad_buf_len = 0;
3976 return 1;
3977
3978 case EVP_CTRL_GET_IVLEN:
3979 *(int *)ptr = octx->ivlen;
3980 return 1;
3981
3982 case EVP_CTRL_AEAD_SET_IVLEN:
3983 /* IV len must be 1 to 15 */
3984 if (arg <= 0 || arg > 15)
3985 return 0;
3986
3987 octx->ivlen = arg;
3988 return 1;
3989
3990 case EVP_CTRL_AEAD_SET_TAG:
3991 if (!ptr) {
3992 /* Tag len must be 0 to 16 */
3993 if (arg < 0 || arg > 16)
3994 return 0;
3995
3996 octx->taglen = arg;
3997 return 1;
3998 }
3999 if (arg != octx->taglen || EVP_CIPHER_CTX_encrypting(c))
4000 return 0;
4001 memcpy(octx->tag, ptr, arg);
4002 return 1;
4003
4004 case EVP_CTRL_AEAD_GET_TAG:
4005 if (arg != octx->taglen || !EVP_CIPHER_CTX_encrypting(c))
4006 return 0;
4007
4008 memcpy(ptr, octx->tag, arg);
4009 return 1;
4010
4011 case EVP_CTRL_COPY:
4012 newc = (EVP_CIPHER_CTX *)ptr;
4013 new_octx = EVP_C_DATA(EVP_AES_OCB_CTX,newc);
4014 return CRYPTO_ocb128_copy_ctx(&new_octx->ocb, &octx->ocb,
4015 &new_octx->ksenc.ks,
4016 &new_octx->ksdec.ks);
4017
4018 default:
4019 return -1;
4020
4021 }
4022 }
4023
4024 # ifdef HWAES_CAPABLE
4025 # ifdef HWAES_ocb_encrypt
4026 void HWAES_ocb_encrypt(const unsigned char *in, unsigned char *out,
4027 size_t blocks, const void *key,
4028 size_t start_block_num,
4029 unsigned char offset_i[16],
4030 const unsigned char L_[][16],
4031 unsigned char checksum[16]);
4032 # else
4033 # define HWAES_ocb_encrypt ((ocb128_f)NULL)
4034 # endif
4035 # ifdef HWAES_ocb_decrypt
4036 void HWAES_ocb_decrypt(const unsigned char *in, unsigned char *out,
4037 size_t blocks, const void *key,
4038 size_t start_block_num,
4039 unsigned char offset_i[16],
4040 const unsigned char L_[][16],
4041 unsigned char checksum[16]);
4042 # else
4043 # define HWAES_ocb_decrypt ((ocb128_f)NULL)
4044 # endif
4045 # endif
4046
aes_ocb_init_key(EVP_CIPHER_CTX * ctx,const unsigned char * key,const unsigned char * iv,int enc)4047 static int aes_ocb_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
4048 const unsigned char *iv, int enc)
4049 {
4050 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4051 if (!iv && !key)
4052 return 1;
4053 if (key) {
4054 do {
4055 /*
4056 * We set both the encrypt and decrypt key here because decrypt
4057 * needs both. We could possibly optimise to remove setting the
4058 * decrypt for an encryption operation.
4059 */
4060 # ifdef HWAES_CAPABLE
4061 if (HWAES_CAPABLE) {
4062 HWAES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4063 &octx->ksenc.ks);
4064 HWAES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4065 &octx->ksdec.ks);
4066 if (!CRYPTO_ocb128_init(&octx->ocb,
4067 &octx->ksenc.ks, &octx->ksdec.ks,
4068 (block128_f) HWAES_encrypt,
4069 (block128_f) HWAES_decrypt,
4070 enc ? HWAES_ocb_encrypt
4071 : HWAES_ocb_decrypt))
4072 return 0;
4073 break;
4074 }
4075 # endif
4076 # ifdef VPAES_CAPABLE
4077 if (VPAES_CAPABLE) {
4078 vpaes_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4079 &octx->ksenc.ks);
4080 vpaes_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4081 &octx->ksdec.ks);
4082 if (!CRYPTO_ocb128_init(&octx->ocb,
4083 &octx->ksenc.ks, &octx->ksdec.ks,
4084 (block128_f) vpaes_encrypt,
4085 (block128_f) vpaes_decrypt,
4086 NULL))
4087 return 0;
4088 break;
4089 }
4090 # endif
4091 AES_set_encrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4092 &octx->ksenc.ks);
4093 AES_set_decrypt_key(key, EVP_CIPHER_CTX_key_length(ctx) * 8,
4094 &octx->ksdec.ks);
4095 if (!CRYPTO_ocb128_init(&octx->ocb,
4096 &octx->ksenc.ks, &octx->ksdec.ks,
4097 (block128_f) AES_encrypt,
4098 (block128_f) AES_decrypt,
4099 NULL))
4100 return 0;
4101 }
4102 while (0);
4103
4104 /*
4105 * If we have an iv we can set it directly, otherwise use saved IV.
4106 */
4107 if (iv == NULL && octx->iv_set)
4108 iv = octx->iv;
4109 if (iv) {
4110 if (CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen)
4111 != 1)
4112 return 0;
4113 octx->iv_set = 1;
4114 }
4115 octx->key_set = 1;
4116 } else {
4117 /* If key set use IV, otherwise copy */
4118 if (octx->key_set)
4119 CRYPTO_ocb128_setiv(&octx->ocb, iv, octx->ivlen, octx->taglen);
4120 else
4121 memcpy(octx->iv, iv, octx->ivlen);
4122 octx->iv_set = 1;
4123 }
4124 return 1;
4125 }
4126
aes_ocb_cipher(EVP_CIPHER_CTX * ctx,unsigned char * out,const unsigned char * in,size_t len)4127 static int aes_ocb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
4128 const unsigned char *in, size_t len)
4129 {
4130 unsigned char *buf;
4131 int *buf_len;
4132 int written_len = 0;
4133 size_t trailing_len;
4134 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,ctx);
4135
4136 /* If IV or Key not set then return error */
4137 if (!octx->iv_set)
4138 return -1;
4139
4140 if (!octx->key_set)
4141 return -1;
4142
4143 if (in != NULL) {
4144 /*
4145 * Need to ensure we are only passing full blocks to low level OCB
4146 * routines. We do it here rather than in EVP_EncryptUpdate/
4147 * EVP_DecryptUpdate because we need to pass full blocks of AAD too
4148 * and those routines don't support that
4149 */
4150
4151 /* Are we dealing with AAD or normal data here? */
4152 if (out == NULL) {
4153 buf = octx->aad_buf;
4154 buf_len = &(octx->aad_buf_len);
4155 } else {
4156 buf = octx->data_buf;
4157 buf_len = &(octx->data_buf_len);
4158
4159 if (is_partially_overlapping(out + *buf_len, in, len)) {
4160 EVPerr(EVP_F_AES_OCB_CIPHER, EVP_R_PARTIALLY_OVERLAPPING);
4161 return 0;
4162 }
4163 }
4164
4165 /*
4166 * If we've got a partially filled buffer from a previous call then
4167 * use that data first
4168 */
4169 if (*buf_len > 0) {
4170 unsigned int remaining;
4171
4172 remaining = AES_BLOCK_SIZE - (*buf_len);
4173 if (remaining > len) {
4174 memcpy(buf + (*buf_len), in, len);
4175 *(buf_len) += len;
4176 return 0;
4177 }
4178 memcpy(buf + (*buf_len), in, remaining);
4179
4180 /*
4181 * If we get here we've filled the buffer, so process it
4182 */
4183 len -= remaining;
4184 in += remaining;
4185 if (out == NULL) {
4186 if (!CRYPTO_ocb128_aad(&octx->ocb, buf, AES_BLOCK_SIZE))
4187 return -1;
4188 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4189 if (!CRYPTO_ocb128_encrypt(&octx->ocb, buf, out,
4190 AES_BLOCK_SIZE))
4191 return -1;
4192 } else {
4193 if (!CRYPTO_ocb128_decrypt(&octx->ocb, buf, out,
4194 AES_BLOCK_SIZE))
4195 return -1;
4196 }
4197 written_len = AES_BLOCK_SIZE;
4198 *buf_len = 0;
4199 if (out != NULL)
4200 out += AES_BLOCK_SIZE;
4201 }
4202
4203 /* Do we have a partial block to handle at the end? */
4204 trailing_len = len % AES_BLOCK_SIZE;
4205
4206 /*
4207 * If we've got some full blocks to handle, then process these first
4208 */
4209 if (len != trailing_len) {
4210 if (out == NULL) {
4211 if (!CRYPTO_ocb128_aad(&octx->ocb, in, len - trailing_len))
4212 return -1;
4213 } else if (EVP_CIPHER_CTX_encrypting(ctx)) {
4214 if (!CRYPTO_ocb128_encrypt
4215 (&octx->ocb, in, out, len - trailing_len))
4216 return -1;
4217 } else {
4218 if (!CRYPTO_ocb128_decrypt
4219 (&octx->ocb, in, out, len - trailing_len))
4220 return -1;
4221 }
4222 written_len += len - trailing_len;
4223 in += len - trailing_len;
4224 }
4225
4226 /* Handle any trailing partial block */
4227 if (trailing_len > 0) {
4228 memcpy(buf, in, trailing_len);
4229 *buf_len = trailing_len;
4230 }
4231
4232 return written_len;
4233 } else {
4234 /*
4235 * First of all empty the buffer of any partial block that we might
4236 * have been provided - both for data and AAD
4237 */
4238 if (octx->data_buf_len > 0) {
4239 if (EVP_CIPHER_CTX_encrypting(ctx)) {
4240 if (!CRYPTO_ocb128_encrypt(&octx->ocb, octx->data_buf, out,
4241 octx->data_buf_len))
4242 return -1;
4243 } else {
4244 if (!CRYPTO_ocb128_decrypt(&octx->ocb, octx->data_buf, out,
4245 octx->data_buf_len))
4246 return -1;
4247 }
4248 written_len = octx->data_buf_len;
4249 octx->data_buf_len = 0;
4250 }
4251 if (octx->aad_buf_len > 0) {
4252 if (!CRYPTO_ocb128_aad
4253 (&octx->ocb, octx->aad_buf, octx->aad_buf_len))
4254 return -1;
4255 octx->aad_buf_len = 0;
4256 }
4257 /* If decrypting then verify */
4258 if (!EVP_CIPHER_CTX_encrypting(ctx)) {
4259 if (octx->taglen < 0)
4260 return -1;
4261 if (CRYPTO_ocb128_finish(&octx->ocb,
4262 octx->tag, octx->taglen) != 0)
4263 return -1;
4264 octx->iv_set = 0;
4265 return written_len;
4266 }
4267 /* If encrypting then just get the tag */
4268 if (CRYPTO_ocb128_tag(&octx->ocb, octx->tag, 16) != 1)
4269 return -1;
4270 /* Don't reuse the IV */
4271 octx->iv_set = 0;
4272 return written_len;
4273 }
4274 }
4275
aes_ocb_cleanup(EVP_CIPHER_CTX * c)4276 static int aes_ocb_cleanup(EVP_CIPHER_CTX *c)
4277 {
4278 EVP_AES_OCB_CTX *octx = EVP_C_DATA(EVP_AES_OCB_CTX,c);
4279 CRYPTO_ocb128_cleanup(&octx->ocb);
4280 return 1;
4281 }
4282
4283 BLOCK_CIPHER_custom(NID_aes, 128, 16, 12, ocb, OCB,
4284 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4285 BLOCK_CIPHER_custom(NID_aes, 192, 16, 12, ocb, OCB,
4286 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4287 BLOCK_CIPHER_custom(NID_aes, 256, 16, 12, ocb, OCB,
4288 EVP_CIPH_FLAG_AEAD_CIPHER | CUSTOM_FLAGS)
4289 #endif /* OPENSSL_NO_OCB */
4290