1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3 * caam - Freescale FSL CAAM support for crypto API
4 *
5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
6 * Copyright 2016-2019 NXP
7 *
8 * Based on talitos crypto API driver.
9 *
10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 *
12 * --------------- ---------------
13 * | JobDesc #1 |-------------------->| ShareDesc |
14 * | *(packet 1) | | (PDB) |
15 * --------------- |------------->| (hashKey) |
16 * . | | (cipherKey) |
17 * . | |-------->| (operation) |
18 * --------------- | | ---------------
19 * | JobDesc #2 |------| |
20 * | *(packet 2) | |
21 * --------------- |
22 * . |
23 * . |
24 * --------------- |
25 * | JobDesc #3 |------------
26 * | *(packet 3) |
27 * ---------------
28 *
29 * The SharedDesc never changes for a connection unless rekeyed, but
30 * each packet will likely be in a different place. So all we need
31 * to know to process the packet is where the input is, where the
32 * output goes, and what context we want to process with. Context is
33 * in the SharedDesc, packet references in the JobDesc.
34 *
35 * So, a job desc looks like:
36 *
37 * ---------------------
38 * | Header |
39 * | ShareDesc Pointer |
40 * | SEQ_OUT_PTR |
41 * | (output buffer) |
42 * | (output length) |
43 * | SEQ_IN_PTR |
44 * | (input buffer) |
45 * | (input length) |
46 * ---------------------
47 */
48
49 #include "compat.h"
50
51 #include "regs.h"
52 #include "intern.h"
53 #include "desc_constr.h"
54 #include "jr.h"
55 #include "error.h"
56 #include "sg_sw_sec4.h"
57 #include "key_gen.h"
58 #include "caamalg_desc.h"
59 #include <crypto/engine.h>
60 #include <crypto/xts.h>
61 #include <asm/unaligned.h>
62
63 /*
64 * crypto alg
65 */
66 #define CAAM_CRA_PRIORITY 3000
67 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
68 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
69 CTR_RFC3686_NONCE_SIZE + \
70 SHA512_DIGEST_SIZE * 2)
71
72 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
73 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 CAAM_CMD_SZ * 4)
75 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
76 CAAM_CMD_SZ * 5)
77
78 #define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
79
80 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
81 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
82
83 struct caam_alg_entry {
84 int class1_alg_type;
85 int class2_alg_type;
86 bool rfc3686;
87 bool geniv;
88 bool nodkp;
89 };
90
91 struct caam_aead_alg {
92 struct aead_alg aead;
93 struct caam_alg_entry caam;
94 bool registered;
95 };
96
97 struct caam_skcipher_alg {
98 struct skcipher_alg skcipher;
99 struct caam_alg_entry caam;
100 bool registered;
101 };
102
103 /*
104 * per-session context
105 */
106 struct caam_ctx {
107 struct crypto_engine_ctx enginectx;
108 u32 sh_desc_enc[DESC_MAX_USED_LEN];
109 u32 sh_desc_dec[DESC_MAX_USED_LEN];
110 u8 key[CAAM_MAX_KEY_SIZE];
111 dma_addr_t sh_desc_enc_dma;
112 dma_addr_t sh_desc_dec_dma;
113 dma_addr_t key_dma;
114 enum dma_data_direction dir;
115 struct device *jrdev;
116 struct alginfo adata;
117 struct alginfo cdata;
118 unsigned int authsize;
119 bool xts_key_fallback;
120 struct crypto_skcipher *fallback;
121 };
122
123 struct caam_skcipher_req_ctx {
124 struct skcipher_edesc *edesc;
125 struct skcipher_request fallback_req;
126 };
127
128 struct caam_aead_req_ctx {
129 struct aead_edesc *edesc;
130 };
131
aead_null_set_sh_desc(struct crypto_aead * aead)132 static int aead_null_set_sh_desc(struct crypto_aead *aead)
133 {
134 struct caam_ctx *ctx = crypto_aead_ctx(aead);
135 struct device *jrdev = ctx->jrdev;
136 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
137 u32 *desc;
138 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
139 ctx->adata.keylen_pad;
140
141 /*
142 * Job Descriptor and Shared Descriptors
143 * must all fit into the 64-word Descriptor h/w Buffer
144 */
145 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
146 ctx->adata.key_inline = true;
147 ctx->adata.key_virt = ctx->key;
148 } else {
149 ctx->adata.key_inline = false;
150 ctx->adata.key_dma = ctx->key_dma;
151 }
152
153 /* aead_encrypt shared descriptor */
154 desc = ctx->sh_desc_enc;
155 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
156 ctrlpriv->era);
157 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
158 desc_bytes(desc), ctx->dir);
159
160 /*
161 * Job Descriptor and Shared Descriptors
162 * must all fit into the 64-word Descriptor h/w Buffer
163 */
164 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
165 ctx->adata.key_inline = true;
166 ctx->adata.key_virt = ctx->key;
167 } else {
168 ctx->adata.key_inline = false;
169 ctx->adata.key_dma = ctx->key_dma;
170 }
171
172 /* aead_decrypt shared descriptor */
173 desc = ctx->sh_desc_dec;
174 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
175 ctrlpriv->era);
176 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
177 desc_bytes(desc), ctx->dir);
178
179 return 0;
180 }
181
aead_set_sh_desc(struct crypto_aead * aead)182 static int aead_set_sh_desc(struct crypto_aead *aead)
183 {
184 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
185 struct caam_aead_alg, aead);
186 unsigned int ivsize = crypto_aead_ivsize(aead);
187 struct caam_ctx *ctx = crypto_aead_ctx(aead);
188 struct device *jrdev = ctx->jrdev;
189 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
190 u32 ctx1_iv_off = 0;
191 u32 *desc, *nonce = NULL;
192 u32 inl_mask;
193 unsigned int data_len[2];
194 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
195 OP_ALG_AAI_CTR_MOD128);
196 const bool is_rfc3686 = alg->caam.rfc3686;
197
198 if (!ctx->authsize)
199 return 0;
200
201 /* NULL encryption / decryption */
202 if (!ctx->cdata.keylen)
203 return aead_null_set_sh_desc(aead);
204
205 /*
206 * AES-CTR needs to load IV in CONTEXT1 reg
207 * at an offset of 128bits (16bytes)
208 * CONTEXT1[255:128] = IV
209 */
210 if (ctr_mode)
211 ctx1_iv_off = 16;
212
213 /*
214 * RFC3686 specific:
215 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
216 */
217 if (is_rfc3686) {
218 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
219 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
220 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
221 }
222
223 /*
224 * In case |user key| > |derived key|, using DKP<imm,imm>
225 * would result in invalid opcodes (last bytes of user key) in
226 * the resulting descriptor. Use DKP<ptr,imm> instead => both
227 * virtual and dma key addresses are needed.
228 */
229 ctx->adata.key_virt = ctx->key;
230 ctx->adata.key_dma = ctx->key_dma;
231
232 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
233 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
234
235 data_len[0] = ctx->adata.keylen_pad;
236 data_len[1] = ctx->cdata.keylen;
237
238 if (alg->caam.geniv)
239 goto skip_enc;
240
241 /*
242 * Job Descriptor and Shared Descriptors
243 * must all fit into the 64-word Descriptor h/w Buffer
244 */
245 if (desc_inline_query(DESC_AEAD_ENC_LEN +
246 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
247 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
248 ARRAY_SIZE(data_len)) < 0)
249 return -EINVAL;
250
251 ctx->adata.key_inline = !!(inl_mask & 1);
252 ctx->cdata.key_inline = !!(inl_mask & 2);
253
254 /* aead_encrypt shared descriptor */
255 desc = ctx->sh_desc_enc;
256 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
257 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
258 false, ctrlpriv->era);
259 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
260 desc_bytes(desc), ctx->dir);
261
262 skip_enc:
263 /*
264 * Job Descriptor and Shared Descriptors
265 * must all fit into the 64-word Descriptor h/w Buffer
266 */
267 if (desc_inline_query(DESC_AEAD_DEC_LEN +
268 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
269 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
270 ARRAY_SIZE(data_len)) < 0)
271 return -EINVAL;
272
273 ctx->adata.key_inline = !!(inl_mask & 1);
274 ctx->cdata.key_inline = !!(inl_mask & 2);
275
276 /* aead_decrypt shared descriptor */
277 desc = ctx->sh_desc_dec;
278 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
279 ctx->authsize, alg->caam.geniv, is_rfc3686,
280 nonce, ctx1_iv_off, false, ctrlpriv->era);
281 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
282 desc_bytes(desc), ctx->dir);
283
284 if (!alg->caam.geniv)
285 goto skip_givenc;
286
287 /*
288 * Job Descriptor and Shared Descriptors
289 * must all fit into the 64-word Descriptor h/w Buffer
290 */
291 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
292 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
293 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
294 ARRAY_SIZE(data_len)) < 0)
295 return -EINVAL;
296
297 ctx->adata.key_inline = !!(inl_mask & 1);
298 ctx->cdata.key_inline = !!(inl_mask & 2);
299
300 /* aead_givencrypt shared descriptor */
301 desc = ctx->sh_desc_enc;
302 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
303 ctx->authsize, is_rfc3686, nonce,
304 ctx1_iv_off, false, ctrlpriv->era);
305 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
306 desc_bytes(desc), ctx->dir);
307
308 skip_givenc:
309 return 0;
310 }
311
aead_setauthsize(struct crypto_aead * authenc,unsigned int authsize)312 static int aead_setauthsize(struct crypto_aead *authenc,
313 unsigned int authsize)
314 {
315 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
316
317 ctx->authsize = authsize;
318 aead_set_sh_desc(authenc);
319
320 return 0;
321 }
322
gcm_set_sh_desc(struct crypto_aead * aead)323 static int gcm_set_sh_desc(struct crypto_aead *aead)
324 {
325 struct caam_ctx *ctx = crypto_aead_ctx(aead);
326 struct device *jrdev = ctx->jrdev;
327 unsigned int ivsize = crypto_aead_ivsize(aead);
328 u32 *desc;
329 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
330 ctx->cdata.keylen;
331
332 if (!ctx->cdata.keylen || !ctx->authsize)
333 return 0;
334
335 /*
336 * AES GCM encrypt shared descriptor
337 * Job Descriptor and Shared Descriptor
338 * must fit into the 64-word Descriptor h/w Buffer
339 */
340 if (rem_bytes >= DESC_GCM_ENC_LEN) {
341 ctx->cdata.key_inline = true;
342 ctx->cdata.key_virt = ctx->key;
343 } else {
344 ctx->cdata.key_inline = false;
345 ctx->cdata.key_dma = ctx->key_dma;
346 }
347
348 desc = ctx->sh_desc_enc;
349 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
350 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
351 desc_bytes(desc), ctx->dir);
352
353 /*
354 * Job Descriptor and Shared Descriptors
355 * must all fit into the 64-word Descriptor h/w Buffer
356 */
357 if (rem_bytes >= DESC_GCM_DEC_LEN) {
358 ctx->cdata.key_inline = true;
359 ctx->cdata.key_virt = ctx->key;
360 } else {
361 ctx->cdata.key_inline = false;
362 ctx->cdata.key_dma = ctx->key_dma;
363 }
364
365 desc = ctx->sh_desc_dec;
366 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
367 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
368 desc_bytes(desc), ctx->dir);
369
370 return 0;
371 }
372
gcm_setauthsize(struct crypto_aead * authenc,unsigned int authsize)373 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
374 {
375 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
376 int err;
377
378 err = crypto_gcm_check_authsize(authsize);
379 if (err)
380 return err;
381
382 ctx->authsize = authsize;
383 gcm_set_sh_desc(authenc);
384
385 return 0;
386 }
387
rfc4106_set_sh_desc(struct crypto_aead * aead)388 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
389 {
390 struct caam_ctx *ctx = crypto_aead_ctx(aead);
391 struct device *jrdev = ctx->jrdev;
392 unsigned int ivsize = crypto_aead_ivsize(aead);
393 u32 *desc;
394 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
395 ctx->cdata.keylen;
396
397 if (!ctx->cdata.keylen || !ctx->authsize)
398 return 0;
399
400 /*
401 * RFC4106 encrypt shared descriptor
402 * Job Descriptor and Shared Descriptor
403 * must fit into the 64-word Descriptor h/w Buffer
404 */
405 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
406 ctx->cdata.key_inline = true;
407 ctx->cdata.key_virt = ctx->key;
408 } else {
409 ctx->cdata.key_inline = false;
410 ctx->cdata.key_dma = ctx->key_dma;
411 }
412
413 desc = ctx->sh_desc_enc;
414 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
415 false);
416 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
417 desc_bytes(desc), ctx->dir);
418
419 /*
420 * Job Descriptor and Shared Descriptors
421 * must all fit into the 64-word Descriptor h/w Buffer
422 */
423 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
424 ctx->cdata.key_inline = true;
425 ctx->cdata.key_virt = ctx->key;
426 } else {
427 ctx->cdata.key_inline = false;
428 ctx->cdata.key_dma = ctx->key_dma;
429 }
430
431 desc = ctx->sh_desc_dec;
432 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
433 false);
434 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
435 desc_bytes(desc), ctx->dir);
436
437 return 0;
438 }
439
rfc4106_setauthsize(struct crypto_aead * authenc,unsigned int authsize)440 static int rfc4106_setauthsize(struct crypto_aead *authenc,
441 unsigned int authsize)
442 {
443 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
444 int err;
445
446 err = crypto_rfc4106_check_authsize(authsize);
447 if (err)
448 return err;
449
450 ctx->authsize = authsize;
451 rfc4106_set_sh_desc(authenc);
452
453 return 0;
454 }
455
rfc4543_set_sh_desc(struct crypto_aead * aead)456 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
457 {
458 struct caam_ctx *ctx = crypto_aead_ctx(aead);
459 struct device *jrdev = ctx->jrdev;
460 unsigned int ivsize = crypto_aead_ivsize(aead);
461 u32 *desc;
462 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
463 ctx->cdata.keylen;
464
465 if (!ctx->cdata.keylen || !ctx->authsize)
466 return 0;
467
468 /*
469 * RFC4543 encrypt shared descriptor
470 * Job Descriptor and Shared Descriptor
471 * must fit into the 64-word Descriptor h/w Buffer
472 */
473 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
474 ctx->cdata.key_inline = true;
475 ctx->cdata.key_virt = ctx->key;
476 } else {
477 ctx->cdata.key_inline = false;
478 ctx->cdata.key_dma = ctx->key_dma;
479 }
480
481 desc = ctx->sh_desc_enc;
482 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
483 false);
484 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
485 desc_bytes(desc), ctx->dir);
486
487 /*
488 * Job Descriptor and Shared Descriptors
489 * must all fit into the 64-word Descriptor h/w Buffer
490 */
491 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
492 ctx->cdata.key_inline = true;
493 ctx->cdata.key_virt = ctx->key;
494 } else {
495 ctx->cdata.key_inline = false;
496 ctx->cdata.key_dma = ctx->key_dma;
497 }
498
499 desc = ctx->sh_desc_dec;
500 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
501 false);
502 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
503 desc_bytes(desc), ctx->dir);
504
505 return 0;
506 }
507
rfc4543_setauthsize(struct crypto_aead * authenc,unsigned int authsize)508 static int rfc4543_setauthsize(struct crypto_aead *authenc,
509 unsigned int authsize)
510 {
511 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
512
513 if (authsize != 16)
514 return -EINVAL;
515
516 ctx->authsize = authsize;
517 rfc4543_set_sh_desc(authenc);
518
519 return 0;
520 }
521
chachapoly_set_sh_desc(struct crypto_aead * aead)522 static int chachapoly_set_sh_desc(struct crypto_aead *aead)
523 {
524 struct caam_ctx *ctx = crypto_aead_ctx(aead);
525 struct device *jrdev = ctx->jrdev;
526 unsigned int ivsize = crypto_aead_ivsize(aead);
527 u32 *desc;
528
529 if (!ctx->cdata.keylen || !ctx->authsize)
530 return 0;
531
532 desc = ctx->sh_desc_enc;
533 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
534 ctx->authsize, true, false);
535 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
536 desc_bytes(desc), ctx->dir);
537
538 desc = ctx->sh_desc_dec;
539 cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
540 ctx->authsize, false, false);
541 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
542 desc_bytes(desc), ctx->dir);
543
544 return 0;
545 }
546
chachapoly_setauthsize(struct crypto_aead * aead,unsigned int authsize)547 static int chachapoly_setauthsize(struct crypto_aead *aead,
548 unsigned int authsize)
549 {
550 struct caam_ctx *ctx = crypto_aead_ctx(aead);
551
552 if (authsize != POLY1305_DIGEST_SIZE)
553 return -EINVAL;
554
555 ctx->authsize = authsize;
556 return chachapoly_set_sh_desc(aead);
557 }
558
chachapoly_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)559 static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
560 unsigned int keylen)
561 {
562 struct caam_ctx *ctx = crypto_aead_ctx(aead);
563 unsigned int ivsize = crypto_aead_ivsize(aead);
564 unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
565
566 if (keylen != CHACHA_KEY_SIZE + saltlen)
567 return -EINVAL;
568
569 memcpy(ctx->key, key, keylen);
570 ctx->cdata.key_virt = ctx->key;
571 ctx->cdata.keylen = keylen - saltlen;
572
573 return chachapoly_set_sh_desc(aead);
574 }
575
aead_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)576 static int aead_setkey(struct crypto_aead *aead,
577 const u8 *key, unsigned int keylen)
578 {
579 struct caam_ctx *ctx = crypto_aead_ctx(aead);
580 struct device *jrdev = ctx->jrdev;
581 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
582 struct crypto_authenc_keys keys;
583 int ret = 0;
584
585 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
586 goto badkey;
587
588 dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
589 keys.authkeylen + keys.enckeylen, keys.enckeylen,
590 keys.authkeylen);
591 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
592 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
593
594 /*
595 * If DKP is supported, use it in the shared descriptor to generate
596 * the split key.
597 */
598 if (ctrlpriv->era >= 6) {
599 ctx->adata.keylen = keys.authkeylen;
600 ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
601 OP_ALG_ALGSEL_MASK);
602
603 if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
604 goto badkey;
605
606 memcpy(ctx->key, keys.authkey, keys.authkeylen);
607 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
608 keys.enckeylen);
609 dma_sync_single_for_device(jrdev, ctx->key_dma,
610 ctx->adata.keylen_pad +
611 keys.enckeylen, ctx->dir);
612 goto skip_split_key;
613 }
614
615 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
616 keys.authkeylen, CAAM_MAX_KEY_SIZE -
617 keys.enckeylen);
618 if (ret) {
619 goto badkey;
620 }
621
622 /* postpend encryption key to auth split key */
623 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
624 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
625 keys.enckeylen, ctx->dir);
626
627 print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
628 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
629 ctx->adata.keylen_pad + keys.enckeylen, 1);
630
631 skip_split_key:
632 ctx->cdata.keylen = keys.enckeylen;
633 memzero_explicit(&keys, sizeof(keys));
634 return aead_set_sh_desc(aead);
635 badkey:
636 memzero_explicit(&keys, sizeof(keys));
637 return -EINVAL;
638 }
639
des3_aead_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)640 static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
641 unsigned int keylen)
642 {
643 struct crypto_authenc_keys keys;
644 int err;
645
646 err = crypto_authenc_extractkeys(&keys, key, keylen);
647 if (unlikely(err))
648 return err;
649
650 err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
651 aead_setkey(aead, key, keylen);
652
653 memzero_explicit(&keys, sizeof(keys));
654 return err;
655 }
656
gcm_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)657 static int gcm_setkey(struct crypto_aead *aead,
658 const u8 *key, unsigned int keylen)
659 {
660 struct caam_ctx *ctx = crypto_aead_ctx(aead);
661 struct device *jrdev = ctx->jrdev;
662 int err;
663
664 err = aes_check_keylen(keylen);
665 if (err)
666 return err;
667
668 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
669 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
670
671 memcpy(ctx->key, key, keylen);
672 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
673 ctx->cdata.keylen = keylen;
674
675 return gcm_set_sh_desc(aead);
676 }
677
rfc4106_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)678 static int rfc4106_setkey(struct crypto_aead *aead,
679 const u8 *key, unsigned int keylen)
680 {
681 struct caam_ctx *ctx = crypto_aead_ctx(aead);
682 struct device *jrdev = ctx->jrdev;
683 int err;
684
685 err = aes_check_keylen(keylen - 4);
686 if (err)
687 return err;
688
689 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
690 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
691
692 memcpy(ctx->key, key, keylen);
693
694 /*
695 * The last four bytes of the key material are used as the salt value
696 * in the nonce. Update the AES key length.
697 */
698 ctx->cdata.keylen = keylen - 4;
699 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
700 ctx->dir);
701 return rfc4106_set_sh_desc(aead);
702 }
703
rfc4543_setkey(struct crypto_aead * aead,const u8 * key,unsigned int keylen)704 static int rfc4543_setkey(struct crypto_aead *aead,
705 const u8 *key, unsigned int keylen)
706 {
707 struct caam_ctx *ctx = crypto_aead_ctx(aead);
708 struct device *jrdev = ctx->jrdev;
709 int err;
710
711 err = aes_check_keylen(keylen - 4);
712 if (err)
713 return err;
714
715 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
716 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
717
718 memcpy(ctx->key, key, keylen);
719
720 /*
721 * The last four bytes of the key material are used as the salt value
722 * in the nonce. Update the AES key length.
723 */
724 ctx->cdata.keylen = keylen - 4;
725 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
726 ctx->dir);
727 return rfc4543_set_sh_desc(aead);
728 }
729
skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen,const u32 ctx1_iv_off)730 static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
731 unsigned int keylen, const u32 ctx1_iv_off)
732 {
733 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
734 struct caam_skcipher_alg *alg =
735 container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
736 skcipher);
737 struct device *jrdev = ctx->jrdev;
738 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
739 u32 *desc;
740 const bool is_rfc3686 = alg->caam.rfc3686;
741
742 print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
743 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
744
745 ctx->cdata.keylen = keylen;
746 ctx->cdata.key_virt = key;
747 ctx->cdata.key_inline = true;
748
749 /* skcipher_encrypt shared descriptor */
750 desc = ctx->sh_desc_enc;
751 cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
752 ctx1_iv_off);
753 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
754 desc_bytes(desc), ctx->dir);
755
756 /* skcipher_decrypt shared descriptor */
757 desc = ctx->sh_desc_dec;
758 cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
759 ctx1_iv_off);
760 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
761 desc_bytes(desc), ctx->dir);
762
763 return 0;
764 }
765
aes_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)766 static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
767 const u8 *key, unsigned int keylen)
768 {
769 int err;
770
771 err = aes_check_keylen(keylen);
772 if (err)
773 return err;
774
775 return skcipher_setkey(skcipher, key, keylen, 0);
776 }
777
rfc3686_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)778 static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
779 const u8 *key, unsigned int keylen)
780 {
781 u32 ctx1_iv_off;
782 int err;
783
784 /*
785 * RFC3686 specific:
786 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
787 * | *key = {KEY, NONCE}
788 */
789 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
790 keylen -= CTR_RFC3686_NONCE_SIZE;
791
792 err = aes_check_keylen(keylen);
793 if (err)
794 return err;
795
796 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
797 }
798
ctr_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)799 static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
800 const u8 *key, unsigned int keylen)
801 {
802 u32 ctx1_iv_off;
803 int err;
804
805 /*
806 * AES-CTR needs to load IV in CONTEXT1 reg
807 * at an offset of 128bits (16bytes)
808 * CONTEXT1[255:128] = IV
809 */
810 ctx1_iv_off = 16;
811
812 err = aes_check_keylen(keylen);
813 if (err)
814 return err;
815
816 return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
817 }
818
des_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)819 static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
820 const u8 *key, unsigned int keylen)
821 {
822 return verify_skcipher_des_key(skcipher, key) ?:
823 skcipher_setkey(skcipher, key, keylen, 0);
824 }
825
des3_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)826 static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
827 const u8 *key, unsigned int keylen)
828 {
829 return verify_skcipher_des3_key(skcipher, key) ?:
830 skcipher_setkey(skcipher, key, keylen, 0);
831 }
832
xts_skcipher_setkey(struct crypto_skcipher * skcipher,const u8 * key,unsigned int keylen)833 static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
834 unsigned int keylen)
835 {
836 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
837 struct device *jrdev = ctx->jrdev;
838 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
839 u32 *desc;
840 int err;
841
842 err = xts_verify_key(skcipher, key, keylen);
843 if (err) {
844 dev_dbg(jrdev, "key size mismatch\n");
845 return err;
846 }
847
848 if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
849 ctx->xts_key_fallback = true;
850
851 if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
852 err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
853 if (err)
854 return err;
855 }
856
857 ctx->cdata.keylen = keylen;
858 ctx->cdata.key_virt = key;
859 ctx->cdata.key_inline = true;
860
861 /* xts_skcipher_encrypt shared descriptor */
862 desc = ctx->sh_desc_enc;
863 cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
864 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
865 desc_bytes(desc), ctx->dir);
866
867 /* xts_skcipher_decrypt shared descriptor */
868 desc = ctx->sh_desc_dec;
869 cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
870 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
871 desc_bytes(desc), ctx->dir);
872
873 return 0;
874 }
875
876 /*
877 * aead_edesc - s/w-extended aead descriptor
878 * @src_nents: number of segments in input s/w scatterlist
879 * @dst_nents: number of segments in output s/w scatterlist
880 * @mapped_src_nents: number of segments in input h/w link table
881 * @mapped_dst_nents: number of segments in output h/w link table
882 * @sec4_sg_bytes: length of dma mapped sec4_sg space
883 * @bklog: stored to determine if the request needs backlog
884 * @sec4_sg_dma: bus physical mapped address of h/w link table
885 * @sec4_sg: pointer to h/w link table
886 * @hw_desc: the h/w job descriptor followed by any referenced link tables
887 */
888 struct aead_edesc {
889 int src_nents;
890 int dst_nents;
891 int mapped_src_nents;
892 int mapped_dst_nents;
893 int sec4_sg_bytes;
894 bool bklog;
895 dma_addr_t sec4_sg_dma;
896 struct sec4_sg_entry *sec4_sg;
897 u32 hw_desc[];
898 };
899
900 /*
901 * skcipher_edesc - s/w-extended skcipher descriptor
902 * @src_nents: number of segments in input s/w scatterlist
903 * @dst_nents: number of segments in output s/w scatterlist
904 * @mapped_src_nents: number of segments in input h/w link table
905 * @mapped_dst_nents: number of segments in output h/w link table
906 * @iv_dma: dma address of iv for checking continuity and link table
907 * @sec4_sg_bytes: length of dma mapped sec4_sg space
908 * @bklog: stored to determine if the request needs backlog
909 * @sec4_sg_dma: bus physical mapped address of h/w link table
910 * @sec4_sg: pointer to h/w link table
911 * @hw_desc: the h/w job descriptor followed by any referenced link tables
912 * and IV
913 */
914 struct skcipher_edesc {
915 int src_nents;
916 int dst_nents;
917 int mapped_src_nents;
918 int mapped_dst_nents;
919 dma_addr_t iv_dma;
920 int sec4_sg_bytes;
921 bool bklog;
922 dma_addr_t sec4_sg_dma;
923 struct sec4_sg_entry *sec4_sg;
924 u32 hw_desc[];
925 };
926
caam_unmap(struct device * dev,struct scatterlist * src,struct scatterlist * dst,int src_nents,int dst_nents,dma_addr_t iv_dma,int ivsize,dma_addr_t sec4_sg_dma,int sec4_sg_bytes)927 static void caam_unmap(struct device *dev, struct scatterlist *src,
928 struct scatterlist *dst, int src_nents,
929 int dst_nents,
930 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
931 int sec4_sg_bytes)
932 {
933 if (dst != src) {
934 if (src_nents)
935 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
936 if (dst_nents)
937 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
938 } else {
939 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
940 }
941
942 if (iv_dma)
943 dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
944 if (sec4_sg_bytes)
945 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
946 DMA_TO_DEVICE);
947 }
948
aead_unmap(struct device * dev,struct aead_edesc * edesc,struct aead_request * req)949 static void aead_unmap(struct device *dev,
950 struct aead_edesc *edesc,
951 struct aead_request *req)
952 {
953 caam_unmap(dev, req->src, req->dst,
954 edesc->src_nents, edesc->dst_nents, 0, 0,
955 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
956 }
957
skcipher_unmap(struct device * dev,struct skcipher_edesc * edesc,struct skcipher_request * req)958 static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
959 struct skcipher_request *req)
960 {
961 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
962 int ivsize = crypto_skcipher_ivsize(skcipher);
963
964 caam_unmap(dev, req->src, req->dst,
965 edesc->src_nents, edesc->dst_nents,
966 edesc->iv_dma, ivsize,
967 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
968 }
969
aead_crypt_done(struct device * jrdev,u32 * desc,u32 err,void * context)970 static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
971 void *context)
972 {
973 struct aead_request *req = context;
974 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
975 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
976 struct aead_edesc *edesc;
977 int ecode = 0;
978 bool has_bklog;
979
980 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
981
982 edesc = rctx->edesc;
983 has_bklog = edesc->bklog;
984
985 if (err)
986 ecode = caam_jr_strstatus(jrdev, err);
987
988 aead_unmap(jrdev, edesc, req);
989
990 kfree(edesc);
991
992 /*
993 * If no backlog flag, the completion of the request is done
994 * by CAAM, not crypto engine.
995 */
996 if (!has_bklog)
997 aead_request_complete(req, ecode);
998 else
999 crypto_finalize_aead_request(jrp->engine, req, ecode);
1000 }
1001
skcipher_crypt_done(struct device * jrdev,u32 * desc,u32 err,void * context)1002 static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
1003 void *context)
1004 {
1005 struct skcipher_request *req = context;
1006 struct skcipher_edesc *edesc;
1007 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1008 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1009 struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
1010 int ivsize = crypto_skcipher_ivsize(skcipher);
1011 int ecode = 0;
1012 bool has_bklog;
1013
1014 dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1015
1016 edesc = rctx->edesc;
1017 has_bklog = edesc->bklog;
1018 if (err)
1019 ecode = caam_jr_strstatus(jrdev, err);
1020
1021 skcipher_unmap(jrdev, edesc, req);
1022
1023 /*
1024 * The crypto API expects us to set the IV (req->iv) to the last
1025 * ciphertext block (CBC mode) or last counter (CTR mode).
1026 * This is used e.g. by the CTS mode.
1027 */
1028 if (ivsize && !ecode) {
1029 memcpy(req->iv, (u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1030 ivsize);
1031
1032 print_hex_dump_debug("dstiv @" __stringify(__LINE__)": ",
1033 DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1034 ivsize, 1);
1035 }
1036
1037 caam_dump_sg("dst @" __stringify(__LINE__)": ",
1038 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1039 edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1040
1041 kfree(edesc);
1042
1043 /*
1044 * If no backlog flag, the completion of the request is done
1045 * by CAAM, not crypto engine.
1046 */
1047 if (!has_bklog)
1048 skcipher_request_complete(req, ecode);
1049 else
1050 crypto_finalize_skcipher_request(jrp->engine, req, ecode);
1051 }
1052
1053 /*
1054 * Fill in aead job descriptor
1055 */
init_aead_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1056 static void init_aead_job(struct aead_request *req,
1057 struct aead_edesc *edesc,
1058 bool all_contig, bool encrypt)
1059 {
1060 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1061 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1062 int authsize = ctx->authsize;
1063 u32 *desc = edesc->hw_desc;
1064 u32 out_options, in_options;
1065 dma_addr_t dst_dma, src_dma;
1066 int len, sec4_sg_index = 0;
1067 dma_addr_t ptr;
1068 u32 *sh_desc;
1069
1070 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1071 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1072
1073 len = desc_len(sh_desc);
1074 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1075
1076 if (all_contig) {
1077 src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1078 0;
1079 in_options = 0;
1080 } else {
1081 src_dma = edesc->sec4_sg_dma;
1082 sec4_sg_index += edesc->mapped_src_nents;
1083 in_options = LDST_SGF;
1084 }
1085
1086 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1087 in_options);
1088
1089 dst_dma = src_dma;
1090 out_options = in_options;
1091
1092 if (unlikely(req->src != req->dst)) {
1093 if (!edesc->mapped_dst_nents) {
1094 dst_dma = 0;
1095 out_options = 0;
1096 } else if (edesc->mapped_dst_nents == 1) {
1097 dst_dma = sg_dma_address(req->dst);
1098 out_options = 0;
1099 } else {
1100 dst_dma = edesc->sec4_sg_dma +
1101 sec4_sg_index *
1102 sizeof(struct sec4_sg_entry);
1103 out_options = LDST_SGF;
1104 }
1105 }
1106
1107 if (encrypt)
1108 append_seq_out_ptr(desc, dst_dma,
1109 req->assoclen + req->cryptlen + authsize,
1110 out_options);
1111 else
1112 append_seq_out_ptr(desc, dst_dma,
1113 req->assoclen + req->cryptlen - authsize,
1114 out_options);
1115 }
1116
init_gcm_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1117 static void init_gcm_job(struct aead_request *req,
1118 struct aead_edesc *edesc,
1119 bool all_contig, bool encrypt)
1120 {
1121 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1122 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1123 unsigned int ivsize = crypto_aead_ivsize(aead);
1124 u32 *desc = edesc->hw_desc;
1125 bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1126 unsigned int last;
1127
1128 init_aead_job(req, edesc, all_contig, encrypt);
1129 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1130
1131 /* BUG This should not be specific to generic GCM. */
1132 last = 0;
1133 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1134 last = FIFOLD_TYPE_LAST1;
1135
1136 /* Read GCM IV */
1137 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1138 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1139 /* Append Salt */
1140 if (!generic_gcm)
1141 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1142 /* Append IV */
1143 append_data(desc, req->iv, ivsize);
1144 /* End of blank commands */
1145 }
1146
init_chachapoly_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1147 static void init_chachapoly_job(struct aead_request *req,
1148 struct aead_edesc *edesc, bool all_contig,
1149 bool encrypt)
1150 {
1151 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1152 unsigned int ivsize = crypto_aead_ivsize(aead);
1153 unsigned int assoclen = req->assoclen;
1154 u32 *desc = edesc->hw_desc;
1155 u32 ctx_iv_off = 4;
1156
1157 init_aead_job(req, edesc, all_contig, encrypt);
1158
1159 if (ivsize != CHACHAPOLY_IV_SIZE) {
1160 /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1161 ctx_iv_off += 4;
1162
1163 /*
1164 * The associated data comes already with the IV but we need
1165 * to skip it when we authenticate or encrypt...
1166 */
1167 assoclen -= ivsize;
1168 }
1169
1170 append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1171
1172 /*
1173 * For IPsec load the IV further in the same register.
1174 * For RFC7539 simply load the 12 bytes nonce in a single operation
1175 */
1176 append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1177 LDST_SRCDST_BYTE_CONTEXT |
1178 ctx_iv_off << LDST_OFFSET_SHIFT);
1179 }
1180
init_authenc_job(struct aead_request * req,struct aead_edesc * edesc,bool all_contig,bool encrypt)1181 static void init_authenc_job(struct aead_request *req,
1182 struct aead_edesc *edesc,
1183 bool all_contig, bool encrypt)
1184 {
1185 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1186 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1187 struct caam_aead_alg, aead);
1188 unsigned int ivsize = crypto_aead_ivsize(aead);
1189 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1190 struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1191 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1192 OP_ALG_AAI_CTR_MOD128);
1193 const bool is_rfc3686 = alg->caam.rfc3686;
1194 u32 *desc = edesc->hw_desc;
1195 u32 ivoffset = 0;
1196
1197 /*
1198 * AES-CTR needs to load IV in CONTEXT1 reg
1199 * at an offset of 128bits (16bytes)
1200 * CONTEXT1[255:128] = IV
1201 */
1202 if (ctr_mode)
1203 ivoffset = 16;
1204
1205 /*
1206 * RFC3686 specific:
1207 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1208 */
1209 if (is_rfc3686)
1210 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1211
1212 init_aead_job(req, edesc, all_contig, encrypt);
1213
1214 /*
1215 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1216 * having DPOVRD as destination.
1217 */
1218 if (ctrlpriv->era < 3)
1219 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1220 else
1221 append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1222
1223 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1224 append_load_as_imm(desc, req->iv, ivsize,
1225 LDST_CLASS_1_CCB |
1226 LDST_SRCDST_BYTE_CONTEXT |
1227 (ivoffset << LDST_OFFSET_SHIFT));
1228 }
1229
1230 /*
1231 * Fill in skcipher job descriptor
1232 */
init_skcipher_job(struct skcipher_request * req,struct skcipher_edesc * edesc,const bool encrypt)1233 static void init_skcipher_job(struct skcipher_request *req,
1234 struct skcipher_edesc *edesc,
1235 const bool encrypt)
1236 {
1237 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1238 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1239 struct device *jrdev = ctx->jrdev;
1240 int ivsize = crypto_skcipher_ivsize(skcipher);
1241 u32 *desc = edesc->hw_desc;
1242 u32 *sh_desc;
1243 u32 in_options = 0, out_options = 0;
1244 dma_addr_t src_dma, dst_dma, ptr;
1245 int len, sec4_sg_index = 0;
1246
1247 print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1248 DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1249 dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1250 (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1251
1252 caam_dump_sg("src @" __stringify(__LINE__)": ",
1253 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1254 edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1255
1256 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1257 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1258
1259 len = desc_len(sh_desc);
1260 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1261
1262 if (ivsize || edesc->mapped_src_nents > 1) {
1263 src_dma = edesc->sec4_sg_dma;
1264 sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1265 in_options = LDST_SGF;
1266 } else {
1267 src_dma = sg_dma_address(req->src);
1268 }
1269
1270 append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
1271
1272 if (likely(req->src == req->dst)) {
1273 dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1274 out_options = in_options;
1275 } else if (!ivsize && edesc->mapped_dst_nents == 1) {
1276 dst_dma = sg_dma_address(req->dst);
1277 } else {
1278 dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1279 sizeof(struct sec4_sg_entry);
1280 out_options = LDST_SGF;
1281 }
1282
1283 append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
1284 }
1285
1286 /*
1287 * allocate and map the aead extended descriptor
1288 */
aead_edesc_alloc(struct aead_request * req,int desc_bytes,bool * all_contig_ptr,bool encrypt)1289 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1290 int desc_bytes, bool *all_contig_ptr,
1291 bool encrypt)
1292 {
1293 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1294 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1295 struct device *jrdev = ctx->jrdev;
1296 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1297 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1298 GFP_KERNEL : GFP_ATOMIC;
1299 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1300 int src_len, dst_len = 0;
1301 struct aead_edesc *edesc;
1302 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1303 unsigned int authsize = ctx->authsize;
1304
1305 if (unlikely(req->dst != req->src)) {
1306 src_len = req->assoclen + req->cryptlen;
1307 dst_len = src_len + (encrypt ? authsize : (-authsize));
1308
1309 src_nents = sg_nents_for_len(req->src, src_len);
1310 if (unlikely(src_nents < 0)) {
1311 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1312 src_len);
1313 return ERR_PTR(src_nents);
1314 }
1315
1316 dst_nents = sg_nents_for_len(req->dst, dst_len);
1317 if (unlikely(dst_nents < 0)) {
1318 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1319 dst_len);
1320 return ERR_PTR(dst_nents);
1321 }
1322 } else {
1323 src_len = req->assoclen + req->cryptlen +
1324 (encrypt ? authsize : 0);
1325
1326 src_nents = sg_nents_for_len(req->src, src_len);
1327 if (unlikely(src_nents < 0)) {
1328 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1329 src_len);
1330 return ERR_PTR(src_nents);
1331 }
1332 }
1333
1334 if (likely(req->src == req->dst)) {
1335 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1336 DMA_BIDIRECTIONAL);
1337 if (unlikely(!mapped_src_nents)) {
1338 dev_err(jrdev, "unable to map source\n");
1339 return ERR_PTR(-ENOMEM);
1340 }
1341 } else {
1342 /* Cover also the case of null (zero length) input data */
1343 if (src_nents) {
1344 mapped_src_nents = dma_map_sg(jrdev, req->src,
1345 src_nents, DMA_TO_DEVICE);
1346 if (unlikely(!mapped_src_nents)) {
1347 dev_err(jrdev, "unable to map source\n");
1348 return ERR_PTR(-ENOMEM);
1349 }
1350 } else {
1351 mapped_src_nents = 0;
1352 }
1353
1354 /* Cover also the case of null (zero length) output data */
1355 if (dst_nents) {
1356 mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1357 dst_nents,
1358 DMA_FROM_DEVICE);
1359 if (unlikely(!mapped_dst_nents)) {
1360 dev_err(jrdev, "unable to map destination\n");
1361 dma_unmap_sg(jrdev, req->src, src_nents,
1362 DMA_TO_DEVICE);
1363 return ERR_PTR(-ENOMEM);
1364 }
1365 } else {
1366 mapped_dst_nents = 0;
1367 }
1368 }
1369
1370 /*
1371 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1372 * the end of the table by allocating more S/G entries.
1373 */
1374 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1375 if (mapped_dst_nents > 1)
1376 sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1377 else
1378 sec4_sg_len = pad_sg_nents(sec4_sg_len);
1379
1380 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1381
1382 /* allocate space for base edesc and hw desc commands, link tables */
1383 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1384 GFP_DMA | flags);
1385 if (!edesc) {
1386 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1387 0, 0, 0);
1388 return ERR_PTR(-ENOMEM);
1389 }
1390
1391 edesc->src_nents = src_nents;
1392 edesc->dst_nents = dst_nents;
1393 edesc->mapped_src_nents = mapped_src_nents;
1394 edesc->mapped_dst_nents = mapped_dst_nents;
1395 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1396 desc_bytes;
1397
1398 rctx->edesc = edesc;
1399
1400 *all_contig_ptr = !(mapped_src_nents > 1);
1401
1402 sec4_sg_index = 0;
1403 if (mapped_src_nents > 1) {
1404 sg_to_sec4_sg_last(req->src, src_len,
1405 edesc->sec4_sg + sec4_sg_index, 0);
1406 sec4_sg_index += mapped_src_nents;
1407 }
1408 if (mapped_dst_nents > 1) {
1409 sg_to_sec4_sg_last(req->dst, dst_len,
1410 edesc->sec4_sg + sec4_sg_index, 0);
1411 }
1412
1413 if (!sec4_sg_bytes)
1414 return edesc;
1415
1416 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1417 sec4_sg_bytes, DMA_TO_DEVICE);
1418 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1419 dev_err(jrdev, "unable to map S/G table\n");
1420 aead_unmap(jrdev, edesc, req);
1421 kfree(edesc);
1422 return ERR_PTR(-ENOMEM);
1423 }
1424
1425 edesc->sec4_sg_bytes = sec4_sg_bytes;
1426
1427 return edesc;
1428 }
1429
aead_enqueue_req(struct device * jrdev,struct aead_request * req)1430 static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1431 {
1432 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1433 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1434 struct aead_edesc *edesc = rctx->edesc;
1435 u32 *desc = edesc->hw_desc;
1436 int ret;
1437
1438 /*
1439 * Only the backlog request are sent to crypto-engine since the others
1440 * can be handled by CAAM, if free, especially since JR has up to 1024
1441 * entries (more than the 10 entries from crypto-engine).
1442 */
1443 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1444 ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1445 req);
1446 else
1447 ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1448
1449 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1450 aead_unmap(jrdev, edesc, req);
1451 kfree(rctx->edesc);
1452 }
1453
1454 return ret;
1455 }
1456
chachapoly_crypt(struct aead_request * req,bool encrypt)1457 static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1458 {
1459 struct aead_edesc *edesc;
1460 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1461 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1462 struct device *jrdev = ctx->jrdev;
1463 bool all_contig;
1464 u32 *desc;
1465
1466 edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1467 encrypt);
1468 if (IS_ERR(edesc))
1469 return PTR_ERR(edesc);
1470
1471 desc = edesc->hw_desc;
1472
1473 init_chachapoly_job(req, edesc, all_contig, encrypt);
1474 print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1475 DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1476 1);
1477
1478 return aead_enqueue_req(jrdev, req);
1479 }
1480
chachapoly_encrypt(struct aead_request * req)1481 static int chachapoly_encrypt(struct aead_request *req)
1482 {
1483 return chachapoly_crypt(req, true);
1484 }
1485
chachapoly_decrypt(struct aead_request * req)1486 static int chachapoly_decrypt(struct aead_request *req)
1487 {
1488 return chachapoly_crypt(req, false);
1489 }
1490
aead_crypt(struct aead_request * req,bool encrypt)1491 static inline int aead_crypt(struct aead_request *req, bool encrypt)
1492 {
1493 struct aead_edesc *edesc;
1494 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1495 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1496 struct device *jrdev = ctx->jrdev;
1497 bool all_contig;
1498
1499 /* allocate extended descriptor */
1500 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1501 &all_contig, encrypt);
1502 if (IS_ERR(edesc))
1503 return PTR_ERR(edesc);
1504
1505 /* Create and submit job descriptor */
1506 init_authenc_job(req, edesc, all_contig, encrypt);
1507
1508 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1509 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1510 desc_bytes(edesc->hw_desc), 1);
1511
1512 return aead_enqueue_req(jrdev, req);
1513 }
1514
aead_encrypt(struct aead_request * req)1515 static int aead_encrypt(struct aead_request *req)
1516 {
1517 return aead_crypt(req, true);
1518 }
1519
aead_decrypt(struct aead_request * req)1520 static int aead_decrypt(struct aead_request *req)
1521 {
1522 return aead_crypt(req, false);
1523 }
1524
aead_do_one_req(struct crypto_engine * engine,void * areq)1525 static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1526 {
1527 struct aead_request *req = aead_request_cast(areq);
1528 struct caam_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1529 struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1530 u32 *desc = rctx->edesc->hw_desc;
1531 int ret;
1532
1533 rctx->edesc->bklog = true;
1534
1535 ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1536
1537 if (ret != -EINPROGRESS) {
1538 aead_unmap(ctx->jrdev, rctx->edesc, req);
1539 kfree(rctx->edesc);
1540 } else {
1541 ret = 0;
1542 }
1543
1544 return ret;
1545 }
1546
gcm_crypt(struct aead_request * req,bool encrypt)1547 static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1548 {
1549 struct aead_edesc *edesc;
1550 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1551 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1552 struct device *jrdev = ctx->jrdev;
1553 bool all_contig;
1554
1555 /* allocate extended descriptor */
1556 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1557 encrypt);
1558 if (IS_ERR(edesc))
1559 return PTR_ERR(edesc);
1560
1561 /* Create and submit job descriptor */
1562 init_gcm_job(req, edesc, all_contig, encrypt);
1563
1564 print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1565 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1566 desc_bytes(edesc->hw_desc), 1);
1567
1568 return aead_enqueue_req(jrdev, req);
1569 }
1570
gcm_encrypt(struct aead_request * req)1571 static int gcm_encrypt(struct aead_request *req)
1572 {
1573 return gcm_crypt(req, true);
1574 }
1575
gcm_decrypt(struct aead_request * req)1576 static int gcm_decrypt(struct aead_request *req)
1577 {
1578 return gcm_crypt(req, false);
1579 }
1580
ipsec_gcm_encrypt(struct aead_request * req)1581 static int ipsec_gcm_encrypt(struct aead_request *req)
1582 {
1583 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1584 }
1585
ipsec_gcm_decrypt(struct aead_request * req)1586 static int ipsec_gcm_decrypt(struct aead_request *req)
1587 {
1588 return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
1589 }
1590
1591 /*
1592 * allocate and map the skcipher extended descriptor for skcipher
1593 */
skcipher_edesc_alloc(struct skcipher_request * req,int desc_bytes)1594 static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1595 int desc_bytes)
1596 {
1597 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1598 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1599 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1600 struct device *jrdev = ctx->jrdev;
1601 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1602 GFP_KERNEL : GFP_ATOMIC;
1603 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1604 struct skcipher_edesc *edesc;
1605 dma_addr_t iv_dma = 0;
1606 u8 *iv;
1607 int ivsize = crypto_skcipher_ivsize(skcipher);
1608 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1609
1610 src_nents = sg_nents_for_len(req->src, req->cryptlen);
1611 if (unlikely(src_nents < 0)) {
1612 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1613 req->cryptlen);
1614 return ERR_PTR(src_nents);
1615 }
1616
1617 if (req->dst != req->src) {
1618 dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1619 if (unlikely(dst_nents < 0)) {
1620 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1621 req->cryptlen);
1622 return ERR_PTR(dst_nents);
1623 }
1624 }
1625
1626 if (likely(req->src == req->dst)) {
1627 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1628 DMA_BIDIRECTIONAL);
1629 if (unlikely(!mapped_src_nents)) {
1630 dev_err(jrdev, "unable to map source\n");
1631 return ERR_PTR(-ENOMEM);
1632 }
1633 } else {
1634 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1635 DMA_TO_DEVICE);
1636 if (unlikely(!mapped_src_nents)) {
1637 dev_err(jrdev, "unable to map source\n");
1638 return ERR_PTR(-ENOMEM);
1639 }
1640 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1641 DMA_FROM_DEVICE);
1642 if (unlikely(!mapped_dst_nents)) {
1643 dev_err(jrdev, "unable to map destination\n");
1644 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1645 return ERR_PTR(-ENOMEM);
1646 }
1647 }
1648
1649 if (!ivsize && mapped_src_nents == 1)
1650 sec4_sg_ents = 0; // no need for an input hw s/g table
1651 else
1652 sec4_sg_ents = mapped_src_nents + !!ivsize;
1653 dst_sg_idx = sec4_sg_ents;
1654
1655 /*
1656 * Input, output HW S/G tables: [IV, src][dst, IV]
1657 * IV entries point to the same buffer
1658 * If src == dst, S/G entries are reused (S/G tables overlap)
1659 *
1660 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1661 * the end of the table by allocating more S/G entries. Logic:
1662 * if (output S/G)
1663 * pad output S/G, if needed
1664 * else if (input S/G) ...
1665 * pad input S/G, if needed
1666 */
1667 if (ivsize || mapped_dst_nents > 1) {
1668 if (req->src == req->dst)
1669 sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1670 else
1671 sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1672 !!ivsize);
1673 } else {
1674 sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
1675 }
1676
1677 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1678
1679 /*
1680 * allocate space for base edesc and hw desc commands, link tables, IV
1681 */
1682 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes + ivsize,
1683 GFP_DMA | flags);
1684 if (!edesc) {
1685 dev_err(jrdev, "could not allocate extended descriptor\n");
1686 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1687 0, 0, 0);
1688 return ERR_PTR(-ENOMEM);
1689 }
1690
1691 edesc->src_nents = src_nents;
1692 edesc->dst_nents = dst_nents;
1693 edesc->mapped_src_nents = mapped_src_nents;
1694 edesc->mapped_dst_nents = mapped_dst_nents;
1695 edesc->sec4_sg_bytes = sec4_sg_bytes;
1696 edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1697 desc_bytes);
1698 rctx->edesc = edesc;
1699
1700 /* Make sure IV is located in a DMAable area */
1701 if (ivsize) {
1702 iv = (u8 *)edesc->sec4_sg + sec4_sg_bytes;
1703 memcpy(iv, req->iv, ivsize);
1704
1705 iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1706 if (dma_mapping_error(jrdev, iv_dma)) {
1707 dev_err(jrdev, "unable to map IV\n");
1708 caam_unmap(jrdev, req->src, req->dst, src_nents,
1709 dst_nents, 0, 0, 0, 0);
1710 kfree(edesc);
1711 return ERR_PTR(-ENOMEM);
1712 }
1713
1714 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1715 }
1716 if (dst_sg_idx)
1717 sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1718 !!ivsize, 0);
1719
1720 if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1721 sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1722 dst_sg_idx, 0);
1723
1724 if (ivsize)
1725 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1726 mapped_dst_nents, iv_dma, ivsize, 0);
1727
1728 if (ivsize || mapped_dst_nents > 1)
1729 sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1730 mapped_dst_nents - 1 + !!ivsize);
1731
1732 if (sec4_sg_bytes) {
1733 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1734 sec4_sg_bytes,
1735 DMA_TO_DEVICE);
1736 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1737 dev_err(jrdev, "unable to map S/G table\n");
1738 caam_unmap(jrdev, req->src, req->dst, src_nents,
1739 dst_nents, iv_dma, ivsize, 0, 0);
1740 kfree(edesc);
1741 return ERR_PTR(-ENOMEM);
1742 }
1743 }
1744
1745 edesc->iv_dma = iv_dma;
1746
1747 print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1748 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1749 sec4_sg_bytes, 1);
1750
1751 return edesc;
1752 }
1753
skcipher_do_one_req(struct crypto_engine * engine,void * areq)1754 static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1755 {
1756 struct skcipher_request *req = skcipher_request_cast(areq);
1757 struct caam_ctx *ctx = crypto_skcipher_ctx(crypto_skcipher_reqtfm(req));
1758 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1759 u32 *desc = rctx->edesc->hw_desc;
1760 int ret;
1761
1762 rctx->edesc->bklog = true;
1763
1764 ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
1765
1766 if (ret != -EINPROGRESS) {
1767 skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1768 kfree(rctx->edesc);
1769 } else {
1770 ret = 0;
1771 }
1772
1773 return ret;
1774 }
1775
xts_skcipher_ivsize(struct skcipher_request * req)1776 static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1777 {
1778 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1779 unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
1780
1781 return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
1782 }
1783
skcipher_crypt(struct skcipher_request * req,bool encrypt)1784 static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1785 {
1786 struct skcipher_edesc *edesc;
1787 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1788 struct caam_ctx *ctx = crypto_skcipher_ctx(skcipher);
1789 struct device *jrdev = ctx->jrdev;
1790 struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1791 struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1792 u32 *desc;
1793 int ret = 0;
1794
1795 /*
1796 * XTS is expected to return an error even for input length = 0
1797 * Note that the case input length < block size will be caught during
1798 * HW offloading and return an error.
1799 */
1800 if (!req->cryptlen && !ctx->fallback)
1801 return 0;
1802
1803 if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1804 ctx->xts_key_fallback)) {
1805 struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1806
1807 skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1808 skcipher_request_set_callback(&rctx->fallback_req,
1809 req->base.flags,
1810 req->base.complete,
1811 req->base.data);
1812 skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1813 req->dst, req->cryptlen, req->iv);
1814
1815 return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1816 crypto_skcipher_decrypt(&rctx->fallback_req);
1817 }
1818
1819 /* allocate extended descriptor */
1820 edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
1821 if (IS_ERR(edesc))
1822 return PTR_ERR(edesc);
1823
1824 /* Create and submit job descriptor*/
1825 init_skcipher_job(req, edesc, encrypt);
1826
1827 print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1828 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1829 desc_bytes(edesc->hw_desc), 1);
1830
1831 desc = edesc->hw_desc;
1832 /*
1833 * Only the backlog request are sent to crypto-engine since the others
1834 * can be handled by CAAM, if free, especially since JR has up to 1024
1835 * entries (more than the 10 entries from crypto-engine).
1836 */
1837 if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1838 ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1839 req);
1840 else
1841 ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1842
1843 if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1844 skcipher_unmap(jrdev, edesc, req);
1845 kfree(edesc);
1846 }
1847
1848 return ret;
1849 }
1850
skcipher_encrypt(struct skcipher_request * req)1851 static int skcipher_encrypt(struct skcipher_request *req)
1852 {
1853 return skcipher_crypt(req, true);
1854 }
1855
skcipher_decrypt(struct skcipher_request * req)1856 static int skcipher_decrypt(struct skcipher_request *req)
1857 {
1858 return skcipher_crypt(req, false);
1859 }
1860
1861 static struct caam_skcipher_alg driver_algs[] = {
1862 {
1863 .skcipher = {
1864 .base = {
1865 .cra_name = "cbc(aes)",
1866 .cra_driver_name = "cbc-aes-caam",
1867 .cra_blocksize = AES_BLOCK_SIZE,
1868 },
1869 .setkey = aes_skcipher_setkey,
1870 .encrypt = skcipher_encrypt,
1871 .decrypt = skcipher_decrypt,
1872 .min_keysize = AES_MIN_KEY_SIZE,
1873 .max_keysize = AES_MAX_KEY_SIZE,
1874 .ivsize = AES_BLOCK_SIZE,
1875 },
1876 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1877 },
1878 {
1879 .skcipher = {
1880 .base = {
1881 .cra_name = "cbc(des3_ede)",
1882 .cra_driver_name = "cbc-3des-caam",
1883 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1884 },
1885 .setkey = des3_skcipher_setkey,
1886 .encrypt = skcipher_encrypt,
1887 .decrypt = skcipher_decrypt,
1888 .min_keysize = DES3_EDE_KEY_SIZE,
1889 .max_keysize = DES3_EDE_KEY_SIZE,
1890 .ivsize = DES3_EDE_BLOCK_SIZE,
1891 },
1892 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1893 },
1894 {
1895 .skcipher = {
1896 .base = {
1897 .cra_name = "cbc(des)",
1898 .cra_driver_name = "cbc-des-caam",
1899 .cra_blocksize = DES_BLOCK_SIZE,
1900 },
1901 .setkey = des_skcipher_setkey,
1902 .encrypt = skcipher_encrypt,
1903 .decrypt = skcipher_decrypt,
1904 .min_keysize = DES_KEY_SIZE,
1905 .max_keysize = DES_KEY_SIZE,
1906 .ivsize = DES_BLOCK_SIZE,
1907 },
1908 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1909 },
1910 {
1911 .skcipher = {
1912 .base = {
1913 .cra_name = "ctr(aes)",
1914 .cra_driver_name = "ctr-aes-caam",
1915 .cra_blocksize = 1,
1916 },
1917 .setkey = ctr_skcipher_setkey,
1918 .encrypt = skcipher_encrypt,
1919 .decrypt = skcipher_decrypt,
1920 .min_keysize = AES_MIN_KEY_SIZE,
1921 .max_keysize = AES_MAX_KEY_SIZE,
1922 .ivsize = AES_BLOCK_SIZE,
1923 .chunksize = AES_BLOCK_SIZE,
1924 },
1925 .caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1926 OP_ALG_AAI_CTR_MOD128,
1927 },
1928 {
1929 .skcipher = {
1930 .base = {
1931 .cra_name = "rfc3686(ctr(aes))",
1932 .cra_driver_name = "rfc3686-ctr-aes-caam",
1933 .cra_blocksize = 1,
1934 },
1935 .setkey = rfc3686_skcipher_setkey,
1936 .encrypt = skcipher_encrypt,
1937 .decrypt = skcipher_decrypt,
1938 .min_keysize = AES_MIN_KEY_SIZE +
1939 CTR_RFC3686_NONCE_SIZE,
1940 .max_keysize = AES_MAX_KEY_SIZE +
1941 CTR_RFC3686_NONCE_SIZE,
1942 .ivsize = CTR_RFC3686_IV_SIZE,
1943 .chunksize = AES_BLOCK_SIZE,
1944 },
1945 .caam = {
1946 .class1_alg_type = OP_ALG_ALGSEL_AES |
1947 OP_ALG_AAI_CTR_MOD128,
1948 .rfc3686 = true,
1949 },
1950 },
1951 {
1952 .skcipher = {
1953 .base = {
1954 .cra_name = "xts(aes)",
1955 .cra_driver_name = "xts-aes-caam",
1956 .cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1957 .cra_blocksize = AES_BLOCK_SIZE,
1958 },
1959 .setkey = xts_skcipher_setkey,
1960 .encrypt = skcipher_encrypt,
1961 .decrypt = skcipher_decrypt,
1962 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1963 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1964 .ivsize = AES_BLOCK_SIZE,
1965 },
1966 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1967 },
1968 {
1969 .skcipher = {
1970 .base = {
1971 .cra_name = "ecb(des)",
1972 .cra_driver_name = "ecb-des-caam",
1973 .cra_blocksize = DES_BLOCK_SIZE,
1974 },
1975 .setkey = des_skcipher_setkey,
1976 .encrypt = skcipher_encrypt,
1977 .decrypt = skcipher_decrypt,
1978 .min_keysize = DES_KEY_SIZE,
1979 .max_keysize = DES_KEY_SIZE,
1980 },
1981 .caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
1982 },
1983 {
1984 .skcipher = {
1985 .base = {
1986 .cra_name = "ecb(aes)",
1987 .cra_driver_name = "ecb-aes-caam",
1988 .cra_blocksize = AES_BLOCK_SIZE,
1989 },
1990 .setkey = aes_skcipher_setkey,
1991 .encrypt = skcipher_encrypt,
1992 .decrypt = skcipher_decrypt,
1993 .min_keysize = AES_MIN_KEY_SIZE,
1994 .max_keysize = AES_MAX_KEY_SIZE,
1995 },
1996 .caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
1997 },
1998 {
1999 .skcipher = {
2000 .base = {
2001 .cra_name = "ecb(des3_ede)",
2002 .cra_driver_name = "ecb-des3-caam",
2003 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004 },
2005 .setkey = des3_skcipher_setkey,
2006 .encrypt = skcipher_encrypt,
2007 .decrypt = skcipher_decrypt,
2008 .min_keysize = DES3_EDE_KEY_SIZE,
2009 .max_keysize = DES3_EDE_KEY_SIZE,
2010 },
2011 .caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2012 },
2013 };
2014
2015 static struct caam_aead_alg driver_aeads[] = {
2016 {
2017 .aead = {
2018 .base = {
2019 .cra_name = "rfc4106(gcm(aes))",
2020 .cra_driver_name = "rfc4106-gcm-aes-caam",
2021 .cra_blocksize = 1,
2022 },
2023 .setkey = rfc4106_setkey,
2024 .setauthsize = rfc4106_setauthsize,
2025 .encrypt = ipsec_gcm_encrypt,
2026 .decrypt = ipsec_gcm_decrypt,
2027 .ivsize = GCM_RFC4106_IV_SIZE,
2028 .maxauthsize = AES_BLOCK_SIZE,
2029 },
2030 .caam = {
2031 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2032 .nodkp = true,
2033 },
2034 },
2035 {
2036 .aead = {
2037 .base = {
2038 .cra_name = "rfc4543(gcm(aes))",
2039 .cra_driver_name = "rfc4543-gcm-aes-caam",
2040 .cra_blocksize = 1,
2041 },
2042 .setkey = rfc4543_setkey,
2043 .setauthsize = rfc4543_setauthsize,
2044 .encrypt = ipsec_gcm_encrypt,
2045 .decrypt = ipsec_gcm_decrypt,
2046 .ivsize = GCM_RFC4543_IV_SIZE,
2047 .maxauthsize = AES_BLOCK_SIZE,
2048 },
2049 .caam = {
2050 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2051 .nodkp = true,
2052 },
2053 },
2054 /* Galois Counter Mode */
2055 {
2056 .aead = {
2057 .base = {
2058 .cra_name = "gcm(aes)",
2059 .cra_driver_name = "gcm-aes-caam",
2060 .cra_blocksize = 1,
2061 },
2062 .setkey = gcm_setkey,
2063 .setauthsize = gcm_setauthsize,
2064 .encrypt = gcm_encrypt,
2065 .decrypt = gcm_decrypt,
2066 .ivsize = GCM_AES_IV_SIZE,
2067 .maxauthsize = AES_BLOCK_SIZE,
2068 },
2069 .caam = {
2070 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2071 .nodkp = true,
2072 },
2073 },
2074 /* single-pass ipsec_esp descriptor */
2075 {
2076 .aead = {
2077 .base = {
2078 .cra_name = "authenc(hmac(md5),"
2079 "ecb(cipher_null))",
2080 .cra_driver_name = "authenc-hmac-md5-"
2081 "ecb-cipher_null-caam",
2082 .cra_blocksize = NULL_BLOCK_SIZE,
2083 },
2084 .setkey = aead_setkey,
2085 .setauthsize = aead_setauthsize,
2086 .encrypt = aead_encrypt,
2087 .decrypt = aead_decrypt,
2088 .ivsize = NULL_IV_SIZE,
2089 .maxauthsize = MD5_DIGEST_SIZE,
2090 },
2091 .caam = {
2092 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2093 OP_ALG_AAI_HMAC_PRECOMP,
2094 },
2095 },
2096 {
2097 .aead = {
2098 .base = {
2099 .cra_name = "authenc(hmac(sha1),"
2100 "ecb(cipher_null))",
2101 .cra_driver_name = "authenc-hmac-sha1-"
2102 "ecb-cipher_null-caam",
2103 .cra_blocksize = NULL_BLOCK_SIZE,
2104 },
2105 .setkey = aead_setkey,
2106 .setauthsize = aead_setauthsize,
2107 .encrypt = aead_encrypt,
2108 .decrypt = aead_decrypt,
2109 .ivsize = NULL_IV_SIZE,
2110 .maxauthsize = SHA1_DIGEST_SIZE,
2111 },
2112 .caam = {
2113 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2114 OP_ALG_AAI_HMAC_PRECOMP,
2115 },
2116 },
2117 {
2118 .aead = {
2119 .base = {
2120 .cra_name = "authenc(hmac(sha224),"
2121 "ecb(cipher_null))",
2122 .cra_driver_name = "authenc-hmac-sha224-"
2123 "ecb-cipher_null-caam",
2124 .cra_blocksize = NULL_BLOCK_SIZE,
2125 },
2126 .setkey = aead_setkey,
2127 .setauthsize = aead_setauthsize,
2128 .encrypt = aead_encrypt,
2129 .decrypt = aead_decrypt,
2130 .ivsize = NULL_IV_SIZE,
2131 .maxauthsize = SHA224_DIGEST_SIZE,
2132 },
2133 .caam = {
2134 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2135 OP_ALG_AAI_HMAC_PRECOMP,
2136 },
2137 },
2138 {
2139 .aead = {
2140 .base = {
2141 .cra_name = "authenc(hmac(sha256),"
2142 "ecb(cipher_null))",
2143 .cra_driver_name = "authenc-hmac-sha256-"
2144 "ecb-cipher_null-caam",
2145 .cra_blocksize = NULL_BLOCK_SIZE,
2146 },
2147 .setkey = aead_setkey,
2148 .setauthsize = aead_setauthsize,
2149 .encrypt = aead_encrypt,
2150 .decrypt = aead_decrypt,
2151 .ivsize = NULL_IV_SIZE,
2152 .maxauthsize = SHA256_DIGEST_SIZE,
2153 },
2154 .caam = {
2155 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2156 OP_ALG_AAI_HMAC_PRECOMP,
2157 },
2158 },
2159 {
2160 .aead = {
2161 .base = {
2162 .cra_name = "authenc(hmac(sha384),"
2163 "ecb(cipher_null))",
2164 .cra_driver_name = "authenc-hmac-sha384-"
2165 "ecb-cipher_null-caam",
2166 .cra_blocksize = NULL_BLOCK_SIZE,
2167 },
2168 .setkey = aead_setkey,
2169 .setauthsize = aead_setauthsize,
2170 .encrypt = aead_encrypt,
2171 .decrypt = aead_decrypt,
2172 .ivsize = NULL_IV_SIZE,
2173 .maxauthsize = SHA384_DIGEST_SIZE,
2174 },
2175 .caam = {
2176 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2177 OP_ALG_AAI_HMAC_PRECOMP,
2178 },
2179 },
2180 {
2181 .aead = {
2182 .base = {
2183 .cra_name = "authenc(hmac(sha512),"
2184 "ecb(cipher_null))",
2185 .cra_driver_name = "authenc-hmac-sha512-"
2186 "ecb-cipher_null-caam",
2187 .cra_blocksize = NULL_BLOCK_SIZE,
2188 },
2189 .setkey = aead_setkey,
2190 .setauthsize = aead_setauthsize,
2191 .encrypt = aead_encrypt,
2192 .decrypt = aead_decrypt,
2193 .ivsize = NULL_IV_SIZE,
2194 .maxauthsize = SHA512_DIGEST_SIZE,
2195 },
2196 .caam = {
2197 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2198 OP_ALG_AAI_HMAC_PRECOMP,
2199 },
2200 },
2201 {
2202 .aead = {
2203 .base = {
2204 .cra_name = "authenc(hmac(md5),cbc(aes))",
2205 .cra_driver_name = "authenc-hmac-md5-"
2206 "cbc-aes-caam",
2207 .cra_blocksize = AES_BLOCK_SIZE,
2208 },
2209 .setkey = aead_setkey,
2210 .setauthsize = aead_setauthsize,
2211 .encrypt = aead_encrypt,
2212 .decrypt = aead_decrypt,
2213 .ivsize = AES_BLOCK_SIZE,
2214 .maxauthsize = MD5_DIGEST_SIZE,
2215 },
2216 .caam = {
2217 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2218 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2219 OP_ALG_AAI_HMAC_PRECOMP,
2220 },
2221 },
2222 {
2223 .aead = {
2224 .base = {
2225 .cra_name = "echainiv(authenc(hmac(md5),"
2226 "cbc(aes)))",
2227 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2228 "cbc-aes-caam",
2229 .cra_blocksize = AES_BLOCK_SIZE,
2230 },
2231 .setkey = aead_setkey,
2232 .setauthsize = aead_setauthsize,
2233 .encrypt = aead_encrypt,
2234 .decrypt = aead_decrypt,
2235 .ivsize = AES_BLOCK_SIZE,
2236 .maxauthsize = MD5_DIGEST_SIZE,
2237 },
2238 .caam = {
2239 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2240 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2241 OP_ALG_AAI_HMAC_PRECOMP,
2242 .geniv = true,
2243 },
2244 },
2245 {
2246 .aead = {
2247 .base = {
2248 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2249 .cra_driver_name = "authenc-hmac-sha1-"
2250 "cbc-aes-caam",
2251 .cra_blocksize = AES_BLOCK_SIZE,
2252 },
2253 .setkey = aead_setkey,
2254 .setauthsize = aead_setauthsize,
2255 .encrypt = aead_encrypt,
2256 .decrypt = aead_decrypt,
2257 .ivsize = AES_BLOCK_SIZE,
2258 .maxauthsize = SHA1_DIGEST_SIZE,
2259 },
2260 .caam = {
2261 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2262 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2263 OP_ALG_AAI_HMAC_PRECOMP,
2264 },
2265 },
2266 {
2267 .aead = {
2268 .base = {
2269 .cra_name = "echainiv(authenc(hmac(sha1),"
2270 "cbc(aes)))",
2271 .cra_driver_name = "echainiv-authenc-"
2272 "hmac-sha1-cbc-aes-caam",
2273 .cra_blocksize = AES_BLOCK_SIZE,
2274 },
2275 .setkey = aead_setkey,
2276 .setauthsize = aead_setauthsize,
2277 .encrypt = aead_encrypt,
2278 .decrypt = aead_decrypt,
2279 .ivsize = AES_BLOCK_SIZE,
2280 .maxauthsize = SHA1_DIGEST_SIZE,
2281 },
2282 .caam = {
2283 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2284 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2285 OP_ALG_AAI_HMAC_PRECOMP,
2286 .geniv = true,
2287 },
2288 },
2289 {
2290 .aead = {
2291 .base = {
2292 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2293 .cra_driver_name = "authenc-hmac-sha224-"
2294 "cbc-aes-caam",
2295 .cra_blocksize = AES_BLOCK_SIZE,
2296 },
2297 .setkey = aead_setkey,
2298 .setauthsize = aead_setauthsize,
2299 .encrypt = aead_encrypt,
2300 .decrypt = aead_decrypt,
2301 .ivsize = AES_BLOCK_SIZE,
2302 .maxauthsize = SHA224_DIGEST_SIZE,
2303 },
2304 .caam = {
2305 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2306 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2307 OP_ALG_AAI_HMAC_PRECOMP,
2308 },
2309 },
2310 {
2311 .aead = {
2312 .base = {
2313 .cra_name = "echainiv(authenc(hmac(sha224),"
2314 "cbc(aes)))",
2315 .cra_driver_name = "echainiv-authenc-"
2316 "hmac-sha224-cbc-aes-caam",
2317 .cra_blocksize = AES_BLOCK_SIZE,
2318 },
2319 .setkey = aead_setkey,
2320 .setauthsize = aead_setauthsize,
2321 .encrypt = aead_encrypt,
2322 .decrypt = aead_decrypt,
2323 .ivsize = AES_BLOCK_SIZE,
2324 .maxauthsize = SHA224_DIGEST_SIZE,
2325 },
2326 .caam = {
2327 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2328 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2329 OP_ALG_AAI_HMAC_PRECOMP,
2330 .geniv = true,
2331 },
2332 },
2333 {
2334 .aead = {
2335 .base = {
2336 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2337 .cra_driver_name = "authenc-hmac-sha256-"
2338 "cbc-aes-caam",
2339 .cra_blocksize = AES_BLOCK_SIZE,
2340 },
2341 .setkey = aead_setkey,
2342 .setauthsize = aead_setauthsize,
2343 .encrypt = aead_encrypt,
2344 .decrypt = aead_decrypt,
2345 .ivsize = AES_BLOCK_SIZE,
2346 .maxauthsize = SHA256_DIGEST_SIZE,
2347 },
2348 .caam = {
2349 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2350 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2351 OP_ALG_AAI_HMAC_PRECOMP,
2352 },
2353 },
2354 {
2355 .aead = {
2356 .base = {
2357 .cra_name = "echainiv(authenc(hmac(sha256),"
2358 "cbc(aes)))",
2359 .cra_driver_name = "echainiv-authenc-"
2360 "hmac-sha256-cbc-aes-caam",
2361 .cra_blocksize = AES_BLOCK_SIZE,
2362 },
2363 .setkey = aead_setkey,
2364 .setauthsize = aead_setauthsize,
2365 .encrypt = aead_encrypt,
2366 .decrypt = aead_decrypt,
2367 .ivsize = AES_BLOCK_SIZE,
2368 .maxauthsize = SHA256_DIGEST_SIZE,
2369 },
2370 .caam = {
2371 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2372 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2373 OP_ALG_AAI_HMAC_PRECOMP,
2374 .geniv = true,
2375 },
2376 },
2377 {
2378 .aead = {
2379 .base = {
2380 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2381 .cra_driver_name = "authenc-hmac-sha384-"
2382 "cbc-aes-caam",
2383 .cra_blocksize = AES_BLOCK_SIZE,
2384 },
2385 .setkey = aead_setkey,
2386 .setauthsize = aead_setauthsize,
2387 .encrypt = aead_encrypt,
2388 .decrypt = aead_decrypt,
2389 .ivsize = AES_BLOCK_SIZE,
2390 .maxauthsize = SHA384_DIGEST_SIZE,
2391 },
2392 .caam = {
2393 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2394 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2395 OP_ALG_AAI_HMAC_PRECOMP,
2396 },
2397 },
2398 {
2399 .aead = {
2400 .base = {
2401 .cra_name = "echainiv(authenc(hmac(sha384),"
2402 "cbc(aes)))",
2403 .cra_driver_name = "echainiv-authenc-"
2404 "hmac-sha384-cbc-aes-caam",
2405 .cra_blocksize = AES_BLOCK_SIZE,
2406 },
2407 .setkey = aead_setkey,
2408 .setauthsize = aead_setauthsize,
2409 .encrypt = aead_encrypt,
2410 .decrypt = aead_decrypt,
2411 .ivsize = AES_BLOCK_SIZE,
2412 .maxauthsize = SHA384_DIGEST_SIZE,
2413 },
2414 .caam = {
2415 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2416 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2417 OP_ALG_AAI_HMAC_PRECOMP,
2418 .geniv = true,
2419 },
2420 },
2421 {
2422 .aead = {
2423 .base = {
2424 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2425 .cra_driver_name = "authenc-hmac-sha512-"
2426 "cbc-aes-caam",
2427 .cra_blocksize = AES_BLOCK_SIZE,
2428 },
2429 .setkey = aead_setkey,
2430 .setauthsize = aead_setauthsize,
2431 .encrypt = aead_encrypt,
2432 .decrypt = aead_decrypt,
2433 .ivsize = AES_BLOCK_SIZE,
2434 .maxauthsize = SHA512_DIGEST_SIZE,
2435 },
2436 .caam = {
2437 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2438 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2439 OP_ALG_AAI_HMAC_PRECOMP,
2440 },
2441 },
2442 {
2443 .aead = {
2444 .base = {
2445 .cra_name = "echainiv(authenc(hmac(sha512),"
2446 "cbc(aes)))",
2447 .cra_driver_name = "echainiv-authenc-"
2448 "hmac-sha512-cbc-aes-caam",
2449 .cra_blocksize = AES_BLOCK_SIZE,
2450 },
2451 .setkey = aead_setkey,
2452 .setauthsize = aead_setauthsize,
2453 .encrypt = aead_encrypt,
2454 .decrypt = aead_decrypt,
2455 .ivsize = AES_BLOCK_SIZE,
2456 .maxauthsize = SHA512_DIGEST_SIZE,
2457 },
2458 .caam = {
2459 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2460 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2461 OP_ALG_AAI_HMAC_PRECOMP,
2462 .geniv = true,
2463 },
2464 },
2465 {
2466 .aead = {
2467 .base = {
2468 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2469 .cra_driver_name = "authenc-hmac-md5-"
2470 "cbc-des3_ede-caam",
2471 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2472 },
2473 .setkey = des3_aead_setkey,
2474 .setauthsize = aead_setauthsize,
2475 .encrypt = aead_encrypt,
2476 .decrypt = aead_decrypt,
2477 .ivsize = DES3_EDE_BLOCK_SIZE,
2478 .maxauthsize = MD5_DIGEST_SIZE,
2479 },
2480 .caam = {
2481 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2482 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2483 OP_ALG_AAI_HMAC_PRECOMP,
2484 }
2485 },
2486 {
2487 .aead = {
2488 .base = {
2489 .cra_name = "echainiv(authenc(hmac(md5),"
2490 "cbc(des3_ede)))",
2491 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2492 "cbc-des3_ede-caam",
2493 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2494 },
2495 .setkey = des3_aead_setkey,
2496 .setauthsize = aead_setauthsize,
2497 .encrypt = aead_encrypt,
2498 .decrypt = aead_decrypt,
2499 .ivsize = DES3_EDE_BLOCK_SIZE,
2500 .maxauthsize = MD5_DIGEST_SIZE,
2501 },
2502 .caam = {
2503 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2504 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2505 OP_ALG_AAI_HMAC_PRECOMP,
2506 .geniv = true,
2507 }
2508 },
2509 {
2510 .aead = {
2511 .base = {
2512 .cra_name = "authenc(hmac(sha1),"
2513 "cbc(des3_ede))",
2514 .cra_driver_name = "authenc-hmac-sha1-"
2515 "cbc-des3_ede-caam",
2516 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2517 },
2518 .setkey = des3_aead_setkey,
2519 .setauthsize = aead_setauthsize,
2520 .encrypt = aead_encrypt,
2521 .decrypt = aead_decrypt,
2522 .ivsize = DES3_EDE_BLOCK_SIZE,
2523 .maxauthsize = SHA1_DIGEST_SIZE,
2524 },
2525 .caam = {
2526 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2527 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2528 OP_ALG_AAI_HMAC_PRECOMP,
2529 },
2530 },
2531 {
2532 .aead = {
2533 .base = {
2534 .cra_name = "echainiv(authenc(hmac(sha1),"
2535 "cbc(des3_ede)))",
2536 .cra_driver_name = "echainiv-authenc-"
2537 "hmac-sha1-"
2538 "cbc-des3_ede-caam",
2539 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2540 },
2541 .setkey = des3_aead_setkey,
2542 .setauthsize = aead_setauthsize,
2543 .encrypt = aead_encrypt,
2544 .decrypt = aead_decrypt,
2545 .ivsize = DES3_EDE_BLOCK_SIZE,
2546 .maxauthsize = SHA1_DIGEST_SIZE,
2547 },
2548 .caam = {
2549 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2550 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2551 OP_ALG_AAI_HMAC_PRECOMP,
2552 .geniv = true,
2553 },
2554 },
2555 {
2556 .aead = {
2557 .base = {
2558 .cra_name = "authenc(hmac(sha224),"
2559 "cbc(des3_ede))",
2560 .cra_driver_name = "authenc-hmac-sha224-"
2561 "cbc-des3_ede-caam",
2562 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2563 },
2564 .setkey = des3_aead_setkey,
2565 .setauthsize = aead_setauthsize,
2566 .encrypt = aead_encrypt,
2567 .decrypt = aead_decrypt,
2568 .ivsize = DES3_EDE_BLOCK_SIZE,
2569 .maxauthsize = SHA224_DIGEST_SIZE,
2570 },
2571 .caam = {
2572 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2573 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2574 OP_ALG_AAI_HMAC_PRECOMP,
2575 },
2576 },
2577 {
2578 .aead = {
2579 .base = {
2580 .cra_name = "echainiv(authenc(hmac(sha224),"
2581 "cbc(des3_ede)))",
2582 .cra_driver_name = "echainiv-authenc-"
2583 "hmac-sha224-"
2584 "cbc-des3_ede-caam",
2585 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2586 },
2587 .setkey = des3_aead_setkey,
2588 .setauthsize = aead_setauthsize,
2589 .encrypt = aead_encrypt,
2590 .decrypt = aead_decrypt,
2591 .ivsize = DES3_EDE_BLOCK_SIZE,
2592 .maxauthsize = SHA224_DIGEST_SIZE,
2593 },
2594 .caam = {
2595 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2596 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2597 OP_ALG_AAI_HMAC_PRECOMP,
2598 .geniv = true,
2599 },
2600 },
2601 {
2602 .aead = {
2603 .base = {
2604 .cra_name = "authenc(hmac(sha256),"
2605 "cbc(des3_ede))",
2606 .cra_driver_name = "authenc-hmac-sha256-"
2607 "cbc-des3_ede-caam",
2608 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2609 },
2610 .setkey = des3_aead_setkey,
2611 .setauthsize = aead_setauthsize,
2612 .encrypt = aead_encrypt,
2613 .decrypt = aead_decrypt,
2614 .ivsize = DES3_EDE_BLOCK_SIZE,
2615 .maxauthsize = SHA256_DIGEST_SIZE,
2616 },
2617 .caam = {
2618 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2619 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2620 OP_ALG_AAI_HMAC_PRECOMP,
2621 },
2622 },
2623 {
2624 .aead = {
2625 .base = {
2626 .cra_name = "echainiv(authenc(hmac(sha256),"
2627 "cbc(des3_ede)))",
2628 .cra_driver_name = "echainiv-authenc-"
2629 "hmac-sha256-"
2630 "cbc-des3_ede-caam",
2631 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2632 },
2633 .setkey = des3_aead_setkey,
2634 .setauthsize = aead_setauthsize,
2635 .encrypt = aead_encrypt,
2636 .decrypt = aead_decrypt,
2637 .ivsize = DES3_EDE_BLOCK_SIZE,
2638 .maxauthsize = SHA256_DIGEST_SIZE,
2639 },
2640 .caam = {
2641 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2642 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2643 OP_ALG_AAI_HMAC_PRECOMP,
2644 .geniv = true,
2645 },
2646 },
2647 {
2648 .aead = {
2649 .base = {
2650 .cra_name = "authenc(hmac(sha384),"
2651 "cbc(des3_ede))",
2652 .cra_driver_name = "authenc-hmac-sha384-"
2653 "cbc-des3_ede-caam",
2654 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2655 },
2656 .setkey = des3_aead_setkey,
2657 .setauthsize = aead_setauthsize,
2658 .encrypt = aead_encrypt,
2659 .decrypt = aead_decrypt,
2660 .ivsize = DES3_EDE_BLOCK_SIZE,
2661 .maxauthsize = SHA384_DIGEST_SIZE,
2662 },
2663 .caam = {
2664 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2665 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2666 OP_ALG_AAI_HMAC_PRECOMP,
2667 },
2668 },
2669 {
2670 .aead = {
2671 .base = {
2672 .cra_name = "echainiv(authenc(hmac(sha384),"
2673 "cbc(des3_ede)))",
2674 .cra_driver_name = "echainiv-authenc-"
2675 "hmac-sha384-"
2676 "cbc-des3_ede-caam",
2677 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2678 },
2679 .setkey = des3_aead_setkey,
2680 .setauthsize = aead_setauthsize,
2681 .encrypt = aead_encrypt,
2682 .decrypt = aead_decrypt,
2683 .ivsize = DES3_EDE_BLOCK_SIZE,
2684 .maxauthsize = SHA384_DIGEST_SIZE,
2685 },
2686 .caam = {
2687 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2688 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2689 OP_ALG_AAI_HMAC_PRECOMP,
2690 .geniv = true,
2691 },
2692 },
2693 {
2694 .aead = {
2695 .base = {
2696 .cra_name = "authenc(hmac(sha512),"
2697 "cbc(des3_ede))",
2698 .cra_driver_name = "authenc-hmac-sha512-"
2699 "cbc-des3_ede-caam",
2700 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2701 },
2702 .setkey = des3_aead_setkey,
2703 .setauthsize = aead_setauthsize,
2704 .encrypt = aead_encrypt,
2705 .decrypt = aead_decrypt,
2706 .ivsize = DES3_EDE_BLOCK_SIZE,
2707 .maxauthsize = SHA512_DIGEST_SIZE,
2708 },
2709 .caam = {
2710 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2711 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2712 OP_ALG_AAI_HMAC_PRECOMP,
2713 },
2714 },
2715 {
2716 .aead = {
2717 .base = {
2718 .cra_name = "echainiv(authenc(hmac(sha512),"
2719 "cbc(des3_ede)))",
2720 .cra_driver_name = "echainiv-authenc-"
2721 "hmac-sha512-"
2722 "cbc-des3_ede-caam",
2723 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2724 },
2725 .setkey = des3_aead_setkey,
2726 .setauthsize = aead_setauthsize,
2727 .encrypt = aead_encrypt,
2728 .decrypt = aead_decrypt,
2729 .ivsize = DES3_EDE_BLOCK_SIZE,
2730 .maxauthsize = SHA512_DIGEST_SIZE,
2731 },
2732 .caam = {
2733 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2734 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2735 OP_ALG_AAI_HMAC_PRECOMP,
2736 .geniv = true,
2737 },
2738 },
2739 {
2740 .aead = {
2741 .base = {
2742 .cra_name = "authenc(hmac(md5),cbc(des))",
2743 .cra_driver_name = "authenc-hmac-md5-"
2744 "cbc-des-caam",
2745 .cra_blocksize = DES_BLOCK_SIZE,
2746 },
2747 .setkey = aead_setkey,
2748 .setauthsize = aead_setauthsize,
2749 .encrypt = aead_encrypt,
2750 .decrypt = aead_decrypt,
2751 .ivsize = DES_BLOCK_SIZE,
2752 .maxauthsize = MD5_DIGEST_SIZE,
2753 },
2754 .caam = {
2755 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2756 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2757 OP_ALG_AAI_HMAC_PRECOMP,
2758 },
2759 },
2760 {
2761 .aead = {
2762 .base = {
2763 .cra_name = "echainiv(authenc(hmac(md5),"
2764 "cbc(des)))",
2765 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2766 "cbc-des-caam",
2767 .cra_blocksize = DES_BLOCK_SIZE,
2768 },
2769 .setkey = aead_setkey,
2770 .setauthsize = aead_setauthsize,
2771 .encrypt = aead_encrypt,
2772 .decrypt = aead_decrypt,
2773 .ivsize = DES_BLOCK_SIZE,
2774 .maxauthsize = MD5_DIGEST_SIZE,
2775 },
2776 .caam = {
2777 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2778 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2779 OP_ALG_AAI_HMAC_PRECOMP,
2780 .geniv = true,
2781 },
2782 },
2783 {
2784 .aead = {
2785 .base = {
2786 .cra_name = "authenc(hmac(sha1),cbc(des))",
2787 .cra_driver_name = "authenc-hmac-sha1-"
2788 "cbc-des-caam",
2789 .cra_blocksize = DES_BLOCK_SIZE,
2790 },
2791 .setkey = aead_setkey,
2792 .setauthsize = aead_setauthsize,
2793 .encrypt = aead_encrypt,
2794 .decrypt = aead_decrypt,
2795 .ivsize = DES_BLOCK_SIZE,
2796 .maxauthsize = SHA1_DIGEST_SIZE,
2797 },
2798 .caam = {
2799 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2800 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2801 OP_ALG_AAI_HMAC_PRECOMP,
2802 },
2803 },
2804 {
2805 .aead = {
2806 .base = {
2807 .cra_name = "echainiv(authenc(hmac(sha1),"
2808 "cbc(des)))",
2809 .cra_driver_name = "echainiv-authenc-"
2810 "hmac-sha1-cbc-des-caam",
2811 .cra_blocksize = DES_BLOCK_SIZE,
2812 },
2813 .setkey = aead_setkey,
2814 .setauthsize = aead_setauthsize,
2815 .encrypt = aead_encrypt,
2816 .decrypt = aead_decrypt,
2817 .ivsize = DES_BLOCK_SIZE,
2818 .maxauthsize = SHA1_DIGEST_SIZE,
2819 },
2820 .caam = {
2821 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2822 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2823 OP_ALG_AAI_HMAC_PRECOMP,
2824 .geniv = true,
2825 },
2826 },
2827 {
2828 .aead = {
2829 .base = {
2830 .cra_name = "authenc(hmac(sha224),cbc(des))",
2831 .cra_driver_name = "authenc-hmac-sha224-"
2832 "cbc-des-caam",
2833 .cra_blocksize = DES_BLOCK_SIZE,
2834 },
2835 .setkey = aead_setkey,
2836 .setauthsize = aead_setauthsize,
2837 .encrypt = aead_encrypt,
2838 .decrypt = aead_decrypt,
2839 .ivsize = DES_BLOCK_SIZE,
2840 .maxauthsize = SHA224_DIGEST_SIZE,
2841 },
2842 .caam = {
2843 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2844 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2845 OP_ALG_AAI_HMAC_PRECOMP,
2846 },
2847 },
2848 {
2849 .aead = {
2850 .base = {
2851 .cra_name = "echainiv(authenc(hmac(sha224),"
2852 "cbc(des)))",
2853 .cra_driver_name = "echainiv-authenc-"
2854 "hmac-sha224-cbc-des-caam",
2855 .cra_blocksize = DES_BLOCK_SIZE,
2856 },
2857 .setkey = aead_setkey,
2858 .setauthsize = aead_setauthsize,
2859 .encrypt = aead_encrypt,
2860 .decrypt = aead_decrypt,
2861 .ivsize = DES_BLOCK_SIZE,
2862 .maxauthsize = SHA224_DIGEST_SIZE,
2863 },
2864 .caam = {
2865 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2866 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2867 OP_ALG_AAI_HMAC_PRECOMP,
2868 .geniv = true,
2869 },
2870 },
2871 {
2872 .aead = {
2873 .base = {
2874 .cra_name = "authenc(hmac(sha256),cbc(des))",
2875 .cra_driver_name = "authenc-hmac-sha256-"
2876 "cbc-des-caam",
2877 .cra_blocksize = DES_BLOCK_SIZE,
2878 },
2879 .setkey = aead_setkey,
2880 .setauthsize = aead_setauthsize,
2881 .encrypt = aead_encrypt,
2882 .decrypt = aead_decrypt,
2883 .ivsize = DES_BLOCK_SIZE,
2884 .maxauthsize = SHA256_DIGEST_SIZE,
2885 },
2886 .caam = {
2887 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2888 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2889 OP_ALG_AAI_HMAC_PRECOMP,
2890 },
2891 },
2892 {
2893 .aead = {
2894 .base = {
2895 .cra_name = "echainiv(authenc(hmac(sha256),"
2896 "cbc(des)))",
2897 .cra_driver_name = "echainiv-authenc-"
2898 "hmac-sha256-cbc-des-caam",
2899 .cra_blocksize = DES_BLOCK_SIZE,
2900 },
2901 .setkey = aead_setkey,
2902 .setauthsize = aead_setauthsize,
2903 .encrypt = aead_encrypt,
2904 .decrypt = aead_decrypt,
2905 .ivsize = DES_BLOCK_SIZE,
2906 .maxauthsize = SHA256_DIGEST_SIZE,
2907 },
2908 .caam = {
2909 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2910 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2911 OP_ALG_AAI_HMAC_PRECOMP,
2912 .geniv = true,
2913 },
2914 },
2915 {
2916 .aead = {
2917 .base = {
2918 .cra_name = "authenc(hmac(sha384),cbc(des))",
2919 .cra_driver_name = "authenc-hmac-sha384-"
2920 "cbc-des-caam",
2921 .cra_blocksize = DES_BLOCK_SIZE,
2922 },
2923 .setkey = aead_setkey,
2924 .setauthsize = aead_setauthsize,
2925 .encrypt = aead_encrypt,
2926 .decrypt = aead_decrypt,
2927 .ivsize = DES_BLOCK_SIZE,
2928 .maxauthsize = SHA384_DIGEST_SIZE,
2929 },
2930 .caam = {
2931 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2932 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2933 OP_ALG_AAI_HMAC_PRECOMP,
2934 },
2935 },
2936 {
2937 .aead = {
2938 .base = {
2939 .cra_name = "echainiv(authenc(hmac(sha384),"
2940 "cbc(des)))",
2941 .cra_driver_name = "echainiv-authenc-"
2942 "hmac-sha384-cbc-des-caam",
2943 .cra_blocksize = DES_BLOCK_SIZE,
2944 },
2945 .setkey = aead_setkey,
2946 .setauthsize = aead_setauthsize,
2947 .encrypt = aead_encrypt,
2948 .decrypt = aead_decrypt,
2949 .ivsize = DES_BLOCK_SIZE,
2950 .maxauthsize = SHA384_DIGEST_SIZE,
2951 },
2952 .caam = {
2953 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2954 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2955 OP_ALG_AAI_HMAC_PRECOMP,
2956 .geniv = true,
2957 },
2958 },
2959 {
2960 .aead = {
2961 .base = {
2962 .cra_name = "authenc(hmac(sha512),cbc(des))",
2963 .cra_driver_name = "authenc-hmac-sha512-"
2964 "cbc-des-caam",
2965 .cra_blocksize = DES_BLOCK_SIZE,
2966 },
2967 .setkey = aead_setkey,
2968 .setauthsize = aead_setauthsize,
2969 .encrypt = aead_encrypt,
2970 .decrypt = aead_decrypt,
2971 .ivsize = DES_BLOCK_SIZE,
2972 .maxauthsize = SHA512_DIGEST_SIZE,
2973 },
2974 .caam = {
2975 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2976 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2977 OP_ALG_AAI_HMAC_PRECOMP,
2978 },
2979 },
2980 {
2981 .aead = {
2982 .base = {
2983 .cra_name = "echainiv(authenc(hmac(sha512),"
2984 "cbc(des)))",
2985 .cra_driver_name = "echainiv-authenc-"
2986 "hmac-sha512-cbc-des-caam",
2987 .cra_blocksize = DES_BLOCK_SIZE,
2988 },
2989 .setkey = aead_setkey,
2990 .setauthsize = aead_setauthsize,
2991 .encrypt = aead_encrypt,
2992 .decrypt = aead_decrypt,
2993 .ivsize = DES_BLOCK_SIZE,
2994 .maxauthsize = SHA512_DIGEST_SIZE,
2995 },
2996 .caam = {
2997 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2998 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2999 OP_ALG_AAI_HMAC_PRECOMP,
3000 .geniv = true,
3001 },
3002 },
3003 {
3004 .aead = {
3005 .base = {
3006 .cra_name = "authenc(hmac(md5),"
3007 "rfc3686(ctr(aes)))",
3008 .cra_driver_name = "authenc-hmac-md5-"
3009 "rfc3686-ctr-aes-caam",
3010 .cra_blocksize = 1,
3011 },
3012 .setkey = aead_setkey,
3013 .setauthsize = aead_setauthsize,
3014 .encrypt = aead_encrypt,
3015 .decrypt = aead_decrypt,
3016 .ivsize = CTR_RFC3686_IV_SIZE,
3017 .maxauthsize = MD5_DIGEST_SIZE,
3018 },
3019 .caam = {
3020 .class1_alg_type = OP_ALG_ALGSEL_AES |
3021 OP_ALG_AAI_CTR_MOD128,
3022 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3023 OP_ALG_AAI_HMAC_PRECOMP,
3024 .rfc3686 = true,
3025 },
3026 },
3027 {
3028 .aead = {
3029 .base = {
3030 .cra_name = "seqiv(authenc("
3031 "hmac(md5),rfc3686(ctr(aes))))",
3032 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3033 "rfc3686-ctr-aes-caam",
3034 .cra_blocksize = 1,
3035 },
3036 .setkey = aead_setkey,
3037 .setauthsize = aead_setauthsize,
3038 .encrypt = aead_encrypt,
3039 .decrypt = aead_decrypt,
3040 .ivsize = CTR_RFC3686_IV_SIZE,
3041 .maxauthsize = MD5_DIGEST_SIZE,
3042 },
3043 .caam = {
3044 .class1_alg_type = OP_ALG_ALGSEL_AES |
3045 OP_ALG_AAI_CTR_MOD128,
3046 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3047 OP_ALG_AAI_HMAC_PRECOMP,
3048 .rfc3686 = true,
3049 .geniv = true,
3050 },
3051 },
3052 {
3053 .aead = {
3054 .base = {
3055 .cra_name = "authenc(hmac(sha1),"
3056 "rfc3686(ctr(aes)))",
3057 .cra_driver_name = "authenc-hmac-sha1-"
3058 "rfc3686-ctr-aes-caam",
3059 .cra_blocksize = 1,
3060 },
3061 .setkey = aead_setkey,
3062 .setauthsize = aead_setauthsize,
3063 .encrypt = aead_encrypt,
3064 .decrypt = aead_decrypt,
3065 .ivsize = CTR_RFC3686_IV_SIZE,
3066 .maxauthsize = SHA1_DIGEST_SIZE,
3067 },
3068 .caam = {
3069 .class1_alg_type = OP_ALG_ALGSEL_AES |
3070 OP_ALG_AAI_CTR_MOD128,
3071 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3072 OP_ALG_AAI_HMAC_PRECOMP,
3073 .rfc3686 = true,
3074 },
3075 },
3076 {
3077 .aead = {
3078 .base = {
3079 .cra_name = "seqiv(authenc("
3080 "hmac(sha1),rfc3686(ctr(aes))))",
3081 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3082 "rfc3686-ctr-aes-caam",
3083 .cra_blocksize = 1,
3084 },
3085 .setkey = aead_setkey,
3086 .setauthsize = aead_setauthsize,
3087 .encrypt = aead_encrypt,
3088 .decrypt = aead_decrypt,
3089 .ivsize = CTR_RFC3686_IV_SIZE,
3090 .maxauthsize = SHA1_DIGEST_SIZE,
3091 },
3092 .caam = {
3093 .class1_alg_type = OP_ALG_ALGSEL_AES |
3094 OP_ALG_AAI_CTR_MOD128,
3095 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3096 OP_ALG_AAI_HMAC_PRECOMP,
3097 .rfc3686 = true,
3098 .geniv = true,
3099 },
3100 },
3101 {
3102 .aead = {
3103 .base = {
3104 .cra_name = "authenc(hmac(sha224),"
3105 "rfc3686(ctr(aes)))",
3106 .cra_driver_name = "authenc-hmac-sha224-"
3107 "rfc3686-ctr-aes-caam",
3108 .cra_blocksize = 1,
3109 },
3110 .setkey = aead_setkey,
3111 .setauthsize = aead_setauthsize,
3112 .encrypt = aead_encrypt,
3113 .decrypt = aead_decrypt,
3114 .ivsize = CTR_RFC3686_IV_SIZE,
3115 .maxauthsize = SHA224_DIGEST_SIZE,
3116 },
3117 .caam = {
3118 .class1_alg_type = OP_ALG_ALGSEL_AES |
3119 OP_ALG_AAI_CTR_MOD128,
3120 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3121 OP_ALG_AAI_HMAC_PRECOMP,
3122 .rfc3686 = true,
3123 },
3124 },
3125 {
3126 .aead = {
3127 .base = {
3128 .cra_name = "seqiv(authenc("
3129 "hmac(sha224),rfc3686(ctr(aes))))",
3130 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3131 "rfc3686-ctr-aes-caam",
3132 .cra_blocksize = 1,
3133 },
3134 .setkey = aead_setkey,
3135 .setauthsize = aead_setauthsize,
3136 .encrypt = aead_encrypt,
3137 .decrypt = aead_decrypt,
3138 .ivsize = CTR_RFC3686_IV_SIZE,
3139 .maxauthsize = SHA224_DIGEST_SIZE,
3140 },
3141 .caam = {
3142 .class1_alg_type = OP_ALG_ALGSEL_AES |
3143 OP_ALG_AAI_CTR_MOD128,
3144 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3145 OP_ALG_AAI_HMAC_PRECOMP,
3146 .rfc3686 = true,
3147 .geniv = true,
3148 },
3149 },
3150 {
3151 .aead = {
3152 .base = {
3153 .cra_name = "authenc(hmac(sha256),"
3154 "rfc3686(ctr(aes)))",
3155 .cra_driver_name = "authenc-hmac-sha256-"
3156 "rfc3686-ctr-aes-caam",
3157 .cra_blocksize = 1,
3158 },
3159 .setkey = aead_setkey,
3160 .setauthsize = aead_setauthsize,
3161 .encrypt = aead_encrypt,
3162 .decrypt = aead_decrypt,
3163 .ivsize = CTR_RFC3686_IV_SIZE,
3164 .maxauthsize = SHA256_DIGEST_SIZE,
3165 },
3166 .caam = {
3167 .class1_alg_type = OP_ALG_ALGSEL_AES |
3168 OP_ALG_AAI_CTR_MOD128,
3169 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3170 OP_ALG_AAI_HMAC_PRECOMP,
3171 .rfc3686 = true,
3172 },
3173 },
3174 {
3175 .aead = {
3176 .base = {
3177 .cra_name = "seqiv(authenc(hmac(sha256),"
3178 "rfc3686(ctr(aes))))",
3179 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3180 "rfc3686-ctr-aes-caam",
3181 .cra_blocksize = 1,
3182 },
3183 .setkey = aead_setkey,
3184 .setauthsize = aead_setauthsize,
3185 .encrypt = aead_encrypt,
3186 .decrypt = aead_decrypt,
3187 .ivsize = CTR_RFC3686_IV_SIZE,
3188 .maxauthsize = SHA256_DIGEST_SIZE,
3189 },
3190 .caam = {
3191 .class1_alg_type = OP_ALG_ALGSEL_AES |
3192 OP_ALG_AAI_CTR_MOD128,
3193 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3194 OP_ALG_AAI_HMAC_PRECOMP,
3195 .rfc3686 = true,
3196 .geniv = true,
3197 },
3198 },
3199 {
3200 .aead = {
3201 .base = {
3202 .cra_name = "authenc(hmac(sha384),"
3203 "rfc3686(ctr(aes)))",
3204 .cra_driver_name = "authenc-hmac-sha384-"
3205 "rfc3686-ctr-aes-caam",
3206 .cra_blocksize = 1,
3207 },
3208 .setkey = aead_setkey,
3209 .setauthsize = aead_setauthsize,
3210 .encrypt = aead_encrypt,
3211 .decrypt = aead_decrypt,
3212 .ivsize = CTR_RFC3686_IV_SIZE,
3213 .maxauthsize = SHA384_DIGEST_SIZE,
3214 },
3215 .caam = {
3216 .class1_alg_type = OP_ALG_ALGSEL_AES |
3217 OP_ALG_AAI_CTR_MOD128,
3218 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3219 OP_ALG_AAI_HMAC_PRECOMP,
3220 .rfc3686 = true,
3221 },
3222 },
3223 {
3224 .aead = {
3225 .base = {
3226 .cra_name = "seqiv(authenc(hmac(sha384),"
3227 "rfc3686(ctr(aes))))",
3228 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3229 "rfc3686-ctr-aes-caam",
3230 .cra_blocksize = 1,
3231 },
3232 .setkey = aead_setkey,
3233 .setauthsize = aead_setauthsize,
3234 .encrypt = aead_encrypt,
3235 .decrypt = aead_decrypt,
3236 .ivsize = CTR_RFC3686_IV_SIZE,
3237 .maxauthsize = SHA384_DIGEST_SIZE,
3238 },
3239 .caam = {
3240 .class1_alg_type = OP_ALG_ALGSEL_AES |
3241 OP_ALG_AAI_CTR_MOD128,
3242 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3243 OP_ALG_AAI_HMAC_PRECOMP,
3244 .rfc3686 = true,
3245 .geniv = true,
3246 },
3247 },
3248 {
3249 .aead = {
3250 .base = {
3251 .cra_name = "authenc(hmac(sha512),"
3252 "rfc3686(ctr(aes)))",
3253 .cra_driver_name = "authenc-hmac-sha512-"
3254 "rfc3686-ctr-aes-caam",
3255 .cra_blocksize = 1,
3256 },
3257 .setkey = aead_setkey,
3258 .setauthsize = aead_setauthsize,
3259 .encrypt = aead_encrypt,
3260 .decrypt = aead_decrypt,
3261 .ivsize = CTR_RFC3686_IV_SIZE,
3262 .maxauthsize = SHA512_DIGEST_SIZE,
3263 },
3264 .caam = {
3265 .class1_alg_type = OP_ALG_ALGSEL_AES |
3266 OP_ALG_AAI_CTR_MOD128,
3267 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3268 OP_ALG_AAI_HMAC_PRECOMP,
3269 .rfc3686 = true,
3270 },
3271 },
3272 {
3273 .aead = {
3274 .base = {
3275 .cra_name = "seqiv(authenc(hmac(sha512),"
3276 "rfc3686(ctr(aes))))",
3277 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3278 "rfc3686-ctr-aes-caam",
3279 .cra_blocksize = 1,
3280 },
3281 .setkey = aead_setkey,
3282 .setauthsize = aead_setauthsize,
3283 .encrypt = aead_encrypt,
3284 .decrypt = aead_decrypt,
3285 .ivsize = CTR_RFC3686_IV_SIZE,
3286 .maxauthsize = SHA512_DIGEST_SIZE,
3287 },
3288 .caam = {
3289 .class1_alg_type = OP_ALG_ALGSEL_AES |
3290 OP_ALG_AAI_CTR_MOD128,
3291 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3292 OP_ALG_AAI_HMAC_PRECOMP,
3293 .rfc3686 = true,
3294 .geniv = true,
3295 },
3296 },
3297 {
3298 .aead = {
3299 .base = {
3300 .cra_name = "rfc7539(chacha20,poly1305)",
3301 .cra_driver_name = "rfc7539-chacha20-poly1305-"
3302 "caam",
3303 .cra_blocksize = 1,
3304 },
3305 .setkey = chachapoly_setkey,
3306 .setauthsize = chachapoly_setauthsize,
3307 .encrypt = chachapoly_encrypt,
3308 .decrypt = chachapoly_decrypt,
3309 .ivsize = CHACHAPOLY_IV_SIZE,
3310 .maxauthsize = POLY1305_DIGEST_SIZE,
3311 },
3312 .caam = {
3313 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3314 OP_ALG_AAI_AEAD,
3315 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3316 OP_ALG_AAI_AEAD,
3317 .nodkp = true,
3318 },
3319 },
3320 {
3321 .aead = {
3322 .base = {
3323 .cra_name = "rfc7539esp(chacha20,poly1305)",
3324 .cra_driver_name = "rfc7539esp-chacha20-"
3325 "poly1305-caam",
3326 .cra_blocksize = 1,
3327 },
3328 .setkey = chachapoly_setkey,
3329 .setauthsize = chachapoly_setauthsize,
3330 .encrypt = chachapoly_encrypt,
3331 .decrypt = chachapoly_decrypt,
3332 .ivsize = 8,
3333 .maxauthsize = POLY1305_DIGEST_SIZE,
3334 },
3335 .caam = {
3336 .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3337 OP_ALG_AAI_AEAD,
3338 .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3339 OP_ALG_AAI_AEAD,
3340 .nodkp = true,
3341 },
3342 },
3343 };
3344
caam_init_common(struct caam_ctx * ctx,struct caam_alg_entry * caam,bool uses_dkp)3345 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3346 bool uses_dkp)
3347 {
3348 dma_addr_t dma_addr;
3349 struct caam_drv_private *priv;
3350 const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3351 sh_desc_enc);
3352
3353 ctx->jrdev = caam_jr_alloc();
3354 if (IS_ERR(ctx->jrdev)) {
3355 pr_err("Job Ring Device allocation for transform failed\n");
3356 return PTR_ERR(ctx->jrdev);
3357 }
3358
3359 priv = dev_get_drvdata(ctx->jrdev->parent);
3360 if (priv->era >= 6 && uses_dkp)
3361 ctx->dir = DMA_BIDIRECTIONAL;
3362 else
3363 ctx->dir = DMA_TO_DEVICE;
3364
3365 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3366 offsetof(struct caam_ctx,
3367 sh_desc_enc_dma) -
3368 sh_desc_enc_offset,
3369 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3370 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3371 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3372 caam_jr_free(ctx->jrdev);
3373 return -ENOMEM;
3374 }
3375
3376 ctx->sh_desc_enc_dma = dma_addr;
3377 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3378 sh_desc_dec) -
3379 sh_desc_enc_offset;
3380 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3381 sh_desc_enc_offset;
3382
3383 /* copy descriptor header template value */
3384 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3385 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3386
3387 return 0;
3388 }
3389
caam_cra_init(struct crypto_skcipher * tfm)3390 static int caam_cra_init(struct crypto_skcipher *tfm)
3391 {
3392 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3393 struct caam_skcipher_alg *caam_alg =
3394 container_of(alg, typeof(*caam_alg), skcipher);
3395 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3396 u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3397 int ret = 0;
3398
3399 ctx->enginectx.op.do_one_request = skcipher_do_one_req;
3400
3401 if (alg_aai == OP_ALG_AAI_XTS) {
3402 const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
3403 struct crypto_skcipher *fallback;
3404
3405 fallback = crypto_alloc_skcipher(tfm_name, 0,
3406 CRYPTO_ALG_NEED_FALLBACK);
3407 if (IS_ERR(fallback)) {
3408 pr_err("Failed to allocate %s fallback: %ld\n",
3409 tfm_name, PTR_ERR(fallback));
3410 return PTR_ERR(fallback);
3411 }
3412
3413 ctx->fallback = fallback;
3414 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
3415 crypto_skcipher_reqsize(fallback));
3416 } else {
3417 crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3418 }
3419
3420 ret = caam_init_common(ctx, &caam_alg->caam, false);
3421 if (ret && ctx->fallback)
3422 crypto_free_skcipher(ctx->fallback);
3423
3424 return ret;
3425 }
3426
caam_aead_init(struct crypto_aead * tfm)3427 static int caam_aead_init(struct crypto_aead *tfm)
3428 {
3429 struct aead_alg *alg = crypto_aead_alg(tfm);
3430 struct caam_aead_alg *caam_alg =
3431 container_of(alg, struct caam_aead_alg, aead);
3432 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3433
3434 crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3435
3436 ctx->enginectx.op.do_one_request = aead_do_one_req;
3437
3438 return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3439 }
3440
caam_exit_common(struct caam_ctx * ctx)3441 static void caam_exit_common(struct caam_ctx *ctx)
3442 {
3443 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3444 offsetof(struct caam_ctx, sh_desc_enc_dma) -
3445 offsetof(struct caam_ctx, sh_desc_enc),
3446 ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3447 caam_jr_free(ctx->jrdev);
3448 }
3449
caam_cra_exit(struct crypto_skcipher * tfm)3450 static void caam_cra_exit(struct crypto_skcipher *tfm)
3451 {
3452 struct caam_ctx *ctx = crypto_skcipher_ctx(tfm);
3453
3454 if (ctx->fallback)
3455 crypto_free_skcipher(ctx->fallback);
3456 caam_exit_common(ctx);
3457 }
3458
caam_aead_exit(struct crypto_aead * tfm)3459 static void caam_aead_exit(struct crypto_aead *tfm)
3460 {
3461 caam_exit_common(crypto_aead_ctx(tfm));
3462 }
3463
caam_algapi_exit(void)3464 void caam_algapi_exit(void)
3465 {
3466 int i;
3467
3468 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3469 struct caam_aead_alg *t_alg = driver_aeads + i;
3470
3471 if (t_alg->registered)
3472 crypto_unregister_aead(&t_alg->aead);
3473 }
3474
3475 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3476 struct caam_skcipher_alg *t_alg = driver_algs + i;
3477
3478 if (t_alg->registered)
3479 crypto_unregister_skcipher(&t_alg->skcipher);
3480 }
3481 }
3482
caam_skcipher_alg_init(struct caam_skcipher_alg * t_alg)3483 static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
3484 {
3485 struct skcipher_alg *alg = &t_alg->skcipher;
3486
3487 alg->base.cra_module = THIS_MODULE;
3488 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3489 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3490 alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3491 CRYPTO_ALG_KERN_DRIVER_ONLY);
3492
3493 alg->init = caam_cra_init;
3494 alg->exit = caam_cra_exit;
3495 }
3496
caam_aead_alg_init(struct caam_aead_alg * t_alg)3497 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3498 {
3499 struct aead_alg *alg = &t_alg->aead;
3500
3501 alg->base.cra_module = THIS_MODULE;
3502 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3503 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3504 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3505 CRYPTO_ALG_KERN_DRIVER_ONLY;
3506
3507 alg->init = caam_aead_init;
3508 alg->exit = caam_aead_exit;
3509 }
3510
caam_algapi_init(struct device * ctrldev)3511 int caam_algapi_init(struct device *ctrldev)
3512 {
3513 struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
3514 int i = 0, err = 0;
3515 u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3516 unsigned int md_limit = SHA512_DIGEST_SIZE;
3517 bool registered = false, gcm_support;
3518
3519 /*
3520 * Register crypto algorithms the device supports.
3521 * First, detect presence and attributes of DES, AES, and MD blocks.
3522 */
3523 if (priv->era < 10) {
3524 u32 cha_vid, cha_inst, aes_rn;
3525
3526 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3527 aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3528 md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3529
3530 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3531 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3532 CHA_ID_LS_DES_SHIFT;
3533 aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3534 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3535 ccha_inst = 0;
3536 ptha_inst = 0;
3537
3538 aes_rn = rd_reg32(&priv->ctrl->perfmon.cha_rev_ls) &
3539 CHA_ID_LS_AES_MASK;
3540 gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3541 } else {
3542 u32 aesa, mdha;
3543
3544 aesa = rd_reg32(&priv->ctrl->vreg.aesa);
3545 mdha = rd_reg32(&priv->ctrl->vreg.mdha);
3546
3547 aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3548 md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3549
3550 des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
3551 aes_inst = aesa & CHA_VER_NUM_MASK;
3552 md_inst = mdha & CHA_VER_NUM_MASK;
3553 ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
3554 ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
3555
3556 gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3557 }
3558
3559 /* If MD is present, limit digest size based on LP256 */
3560 if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
3561 md_limit = SHA256_DIGEST_SIZE;
3562
3563 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3564 struct caam_skcipher_alg *t_alg = driver_algs + i;
3565 u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
3566
3567 /* Skip DES algorithms if not supported by device */
3568 if (!des_inst &&
3569 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3570 (alg_sel == OP_ALG_ALGSEL_DES)))
3571 continue;
3572
3573 /* Skip AES algorithms if not supported by device */
3574 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3575 continue;
3576
3577 /*
3578 * Check support for AES modes not available
3579 * on LP devices.
3580 */
3581 if (aes_vid == CHA_VER_VID_AES_LP &&
3582 (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3583 OP_ALG_AAI_XTS)
3584 continue;
3585
3586 caam_skcipher_alg_init(t_alg);
3587
3588 err = crypto_register_skcipher(&t_alg->skcipher);
3589 if (err) {
3590 pr_warn("%s alg registration failed\n",
3591 t_alg->skcipher.base.cra_driver_name);
3592 continue;
3593 }
3594
3595 t_alg->registered = true;
3596 registered = true;
3597 }
3598
3599 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3600 struct caam_aead_alg *t_alg = driver_aeads + i;
3601 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3602 OP_ALG_ALGSEL_MASK;
3603 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3604 OP_ALG_ALGSEL_MASK;
3605 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3606
3607 /* Skip DES algorithms if not supported by device */
3608 if (!des_inst &&
3609 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3610 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3611 continue;
3612
3613 /* Skip AES algorithms if not supported by device */
3614 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3615 continue;
3616
3617 /* Skip CHACHA20 algorithms if not supported by device */
3618 if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3619 continue;
3620
3621 /* Skip POLY1305 algorithms if not supported by device */
3622 if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3623 continue;
3624
3625 /* Skip GCM algorithms if not supported by device */
3626 if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3627 alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3628 continue;
3629
3630 /*
3631 * Skip algorithms requiring message digests
3632 * if MD or MD size is not supported by device.
3633 */
3634 if (is_mdha(c2_alg_sel) &&
3635 (!md_inst || t_alg->aead.maxauthsize > md_limit))
3636 continue;
3637
3638 caam_aead_alg_init(t_alg);
3639
3640 err = crypto_register_aead(&t_alg->aead);
3641 if (err) {
3642 pr_warn("%s alg registration failed\n",
3643 t_alg->aead.base.cra_driver_name);
3644 continue;
3645 }
3646
3647 t_alg->registered = true;
3648 registered = true;
3649 }
3650
3651 if (registered)
3652 pr_info("caam algorithms registered in /proc/crypto\n");
3653
3654 return err;
3655 }
3656