1 // SPDX-License-Identifier: GPL-2.0-only
2 // SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
3 /*
4 * Crypto driver to handle HASH algorithms using NVIDIA Security Engine.
5 */
6
7 #include <linux/clk.h>
8 #include <linux/dma-mapping.h>
9 #include <linux/module.h>
10 #include <linux/of_device.h>
11 #include <linux/platform_device.h>
12
13 #include <crypto/aes.h>
14 #include <crypto/sha1.h>
15 #include <crypto/sha2.h>
16 #include <crypto/sha3.h>
17 #include <crypto/internal/des.h>
18 #include <crypto/engine.h>
19 #include <crypto/scatterwalk.h>
20 #include <crypto/internal/hash.h>
21
22 #include "tegra-se.h"
23
24 struct tegra_sha_ctx {
25 struct tegra_se *se;
26 unsigned int alg;
27 bool fallback;
28 u32 key_id;
29 struct crypto_ahash *fallback_tfm;
30 };
31
32 struct tegra_sha_reqctx {
33 struct scatterlist *src_sg;
34 struct tegra_se_datbuf datbuf;
35 struct tegra_se_datbuf residue;
36 struct tegra_se_datbuf digest;
37 unsigned int alg;
38 unsigned int config;
39 unsigned int total_len;
40 unsigned int blk_size;
41 unsigned int task;
42 u32 key_id;
43 u32 result[HASH_RESULT_REG_COUNT];
44 struct ahash_request fallback_req;
45 };
46
tegra_sha_get_config(u32 alg)47 static int tegra_sha_get_config(u32 alg)
48 {
49 int cfg = 0;
50
51 switch (alg) {
52 case SE_ALG_SHA1:
53 cfg |= SE_SHA_ENC_ALG_SHA;
54 cfg |= SE_SHA_ENC_MODE_SHA1;
55 break;
56
57 case SE_ALG_HMAC_SHA224:
58 cfg |= SE_SHA_ENC_ALG_HMAC;
59 fallthrough;
60 case SE_ALG_SHA224:
61 cfg |= SE_SHA_ENC_ALG_SHA;
62 cfg |= SE_SHA_ENC_MODE_SHA224;
63 break;
64
65 case SE_ALG_HMAC_SHA256:
66 cfg |= SE_SHA_ENC_ALG_HMAC;
67 fallthrough;
68 case SE_ALG_SHA256:
69 cfg |= SE_SHA_ENC_ALG_SHA;
70 cfg |= SE_SHA_ENC_MODE_SHA256;
71 break;
72
73 case SE_ALG_HMAC_SHA384:
74 cfg |= SE_SHA_ENC_ALG_HMAC;
75 fallthrough;
76 case SE_ALG_SHA384:
77 cfg |= SE_SHA_ENC_ALG_SHA;
78 cfg |= SE_SHA_ENC_MODE_SHA384;
79 break;
80
81 case SE_ALG_HMAC_SHA512:
82 cfg |= SE_SHA_ENC_ALG_HMAC;
83 fallthrough;
84 case SE_ALG_SHA512:
85 cfg |= SE_SHA_ENC_ALG_SHA;
86 cfg |= SE_SHA_ENC_MODE_SHA512;
87 break;
88
89 case SE_ALG_SHA3_224:
90 cfg |= SE_SHA_ENC_ALG_SHA;
91 cfg |= SE_SHA_ENC_MODE_SHA3_224;
92 break;
93 case SE_ALG_SHA3_256:
94 cfg |= SE_SHA_ENC_ALG_SHA;
95 cfg |= SE_SHA_ENC_MODE_SHA3_256;
96 break;
97 case SE_ALG_SHA3_384:
98 cfg |= SE_SHA_ENC_ALG_SHA;
99 cfg |= SE_SHA_ENC_MODE_SHA3_384;
100 break;
101 case SE_ALG_SHA3_512:
102 cfg |= SE_SHA_ENC_ALG_SHA;
103 cfg |= SE_SHA_ENC_MODE_SHA3_512;
104 break;
105 default:
106 return -EINVAL;
107 }
108
109 return cfg;
110 }
111
tegra_sha_fallback_init(struct ahash_request * req)112 static int tegra_sha_fallback_init(struct ahash_request *req)
113 {
114 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
115 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
116 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
117
118 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
119 rctx->fallback_req.base.flags = req->base.flags &
120 CRYPTO_TFM_REQ_MAY_SLEEP;
121
122 return crypto_ahash_init(&rctx->fallback_req);
123 }
124
tegra_sha_fallback_update(struct ahash_request * req)125 static int tegra_sha_fallback_update(struct ahash_request *req)
126 {
127 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
128 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
129 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
130
131 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
132 rctx->fallback_req.base.flags = req->base.flags &
133 CRYPTO_TFM_REQ_MAY_SLEEP;
134 rctx->fallback_req.nbytes = req->nbytes;
135 rctx->fallback_req.src = req->src;
136
137 return crypto_ahash_update(&rctx->fallback_req);
138 }
139
tegra_sha_fallback_final(struct ahash_request * req)140 static int tegra_sha_fallback_final(struct ahash_request *req)
141 {
142 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
143 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
144 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
145
146 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
147 rctx->fallback_req.base.flags = req->base.flags &
148 CRYPTO_TFM_REQ_MAY_SLEEP;
149 rctx->fallback_req.result = req->result;
150
151 return crypto_ahash_final(&rctx->fallback_req);
152 }
153
tegra_sha_fallback_finup(struct ahash_request * req)154 static int tegra_sha_fallback_finup(struct ahash_request *req)
155 {
156 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
157 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
158 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
159
160 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
161 rctx->fallback_req.base.flags = req->base.flags &
162 CRYPTO_TFM_REQ_MAY_SLEEP;
163
164 rctx->fallback_req.nbytes = req->nbytes;
165 rctx->fallback_req.src = req->src;
166 rctx->fallback_req.result = req->result;
167
168 return crypto_ahash_finup(&rctx->fallback_req);
169 }
170
tegra_sha_fallback_digest(struct ahash_request * req)171 static int tegra_sha_fallback_digest(struct ahash_request *req)
172 {
173 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
174 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
175 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
176
177 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
178 rctx->fallback_req.base.flags = req->base.flags &
179 CRYPTO_TFM_REQ_MAY_SLEEP;
180
181 rctx->fallback_req.nbytes = req->nbytes;
182 rctx->fallback_req.src = req->src;
183 rctx->fallback_req.result = req->result;
184
185 return crypto_ahash_digest(&rctx->fallback_req);
186 }
187
tegra_sha_fallback_import(struct ahash_request * req,const void * in)188 static int tegra_sha_fallback_import(struct ahash_request *req, const void *in)
189 {
190 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
191 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
192 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
193
194 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
195 rctx->fallback_req.base.flags = req->base.flags &
196 CRYPTO_TFM_REQ_MAY_SLEEP;
197
198 return crypto_ahash_import(&rctx->fallback_req, in);
199 }
200
tegra_sha_fallback_export(struct ahash_request * req,void * out)201 static int tegra_sha_fallback_export(struct ahash_request *req, void *out)
202 {
203 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
204 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
205 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
206
207 ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm);
208 rctx->fallback_req.base.flags = req->base.flags &
209 CRYPTO_TFM_REQ_MAY_SLEEP;
210
211 return crypto_ahash_export(&rctx->fallback_req, out);
212 }
213
tegra_sha_prep_cmd(struct tegra_se * se,u32 * cpuvaddr,struct tegra_sha_reqctx * rctx)214 static int tegra_sha_prep_cmd(struct tegra_se *se, u32 *cpuvaddr,
215 struct tegra_sha_reqctx *rctx)
216 {
217 u64 msg_len, msg_left;
218 int i = 0;
219
220 msg_len = rctx->total_len * 8;
221 msg_left = rctx->datbuf.size * 8;
222
223 /*
224 * If IN_ADDR_HI_0.SZ > SHA_MSG_LEFT_[0-3] to the HASH engine,
225 * HW treats it as the last buffer and process the data.
226 * Therefore, add an extra byte to msg_left if it is not the
227 * last buffer.
228 */
229 if (rctx->task & SHA_UPDATE) {
230 msg_left += 8;
231 msg_len += 8;
232 }
233
234 cpuvaddr[i++] = host1x_opcode_setpayload(8);
235 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_MSG_LENGTH);
236 cpuvaddr[i++] = lower_32_bits(msg_len);
237 cpuvaddr[i++] = upper_32_bits(msg_len);
238 cpuvaddr[i++] = 0;
239 cpuvaddr[i++] = 0;
240 cpuvaddr[i++] = lower_32_bits(msg_left);
241 cpuvaddr[i++] = upper_32_bits(msg_left);
242 cpuvaddr[i++] = 0;
243 cpuvaddr[i++] = 0;
244 cpuvaddr[i++] = host1x_opcode_setpayload(6);
245 cpuvaddr[i++] = se_host1x_opcode_incr_w(SE_SHA_CFG);
246 cpuvaddr[i++] = rctx->config;
247
248 if (rctx->task & SHA_FIRST) {
249 cpuvaddr[i++] = SE_SHA_TASK_HASH_INIT;
250 rctx->task &= ~SHA_FIRST;
251 } else {
252 cpuvaddr[i++] = 0;
253 }
254
255 cpuvaddr[i++] = rctx->datbuf.addr;
256 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->datbuf.addr)) |
257 SE_ADDR_HI_SZ(rctx->datbuf.size));
258 cpuvaddr[i++] = rctx->digest.addr;
259 cpuvaddr[i++] = (u32)(SE_ADDR_HI_MSB(upper_32_bits(rctx->digest.addr)) |
260 SE_ADDR_HI_SZ(rctx->digest.size));
261 if (rctx->key_id) {
262 cpuvaddr[i++] = host1x_opcode_setpayload(1);
263 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_CRYPTO_CFG);
264 cpuvaddr[i++] = SE_AES_KEY_INDEX(rctx->key_id);
265 }
266
267 cpuvaddr[i++] = host1x_opcode_setpayload(1);
268 cpuvaddr[i++] = se_host1x_opcode_nonincr_w(SE_SHA_OPERATION);
269 cpuvaddr[i++] = SE_SHA_OP_WRSTALL |
270 SE_SHA_OP_START |
271 SE_SHA_OP_LASTBUF;
272 cpuvaddr[i++] = se_host1x_opcode_nonincr(host1x_uclass_incr_syncpt_r(), 1);
273 cpuvaddr[i++] = host1x_uclass_incr_syncpt_cond_f(1) |
274 host1x_uclass_incr_syncpt_indx_f(se->syncpt_id);
275
276 dev_dbg(se->dev, "msg len %llu msg left %llu cfg %#x",
277 msg_len, msg_left, rctx->config);
278
279 return i;
280 }
281
tegra_sha_copy_hash_result(struct tegra_se * se,struct tegra_sha_reqctx * rctx)282 static void tegra_sha_copy_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx)
283 {
284 int i;
285
286 for (i = 0; i < HASH_RESULT_REG_COUNT; i++)
287 rctx->result[i] = readl(se->base + se->hw->regs->result + (i * 4));
288 }
289
tegra_sha_paste_hash_result(struct tegra_se * se,struct tegra_sha_reqctx * rctx)290 static void tegra_sha_paste_hash_result(struct tegra_se *se, struct tegra_sha_reqctx *rctx)
291 {
292 int i;
293
294 for (i = 0; i < HASH_RESULT_REG_COUNT; i++)
295 writel(rctx->result[i],
296 se->base + se->hw->regs->result + (i * 4));
297 }
298
tegra_sha_do_update(struct ahash_request * req)299 static int tegra_sha_do_update(struct ahash_request *req)
300 {
301 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
302 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
303 struct tegra_se *se = ctx->se;
304 unsigned int nblks, nresidue, size, ret;
305 u32 *cpuvaddr = se->cmdbuf->addr;
306
307 nresidue = (req->nbytes + rctx->residue.size) % rctx->blk_size;
308 nblks = (req->nbytes + rctx->residue.size) / rctx->blk_size;
309
310 /*
311 * If nbytes is a multiple of block size and there is no residue,
312 * then reserve the last block as residue during final() to process.
313 */
314 if (!nresidue && nblks) {
315 nresidue = rctx->blk_size;
316 nblks--;
317 }
318
319 rctx->src_sg = req->src;
320 rctx->datbuf.size = (req->nbytes + rctx->residue.size) - nresidue;
321 rctx->total_len += rctx->datbuf.size;
322
323 /*
324 * If nbytes are less than a block size, copy it residue and
325 * return. The bytes will be processed in final()
326 */
327 if (nblks < 1) {
328 scatterwalk_map_and_copy(rctx->residue.buf + rctx->residue.size,
329 rctx->src_sg, 0, req->nbytes, 0);
330
331 rctx->residue.size += req->nbytes;
332 return 0;
333 }
334
335 rctx->datbuf.buf = dma_alloc_coherent(ctx->se->dev, rctx->datbuf.size,
336 &rctx->datbuf.addr, GFP_KERNEL);
337 if (!rctx->datbuf.buf)
338 return -ENOMEM;
339
340 /* Copy the previous residue first */
341 if (rctx->residue.size)
342 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
343
344 scatterwalk_map_and_copy(rctx->datbuf.buf + rctx->residue.size,
345 rctx->src_sg, 0, req->nbytes - nresidue, 0);
346
347 scatterwalk_map_and_copy(rctx->residue.buf, rctx->src_sg,
348 req->nbytes - nresidue, nresidue, 0);
349
350 /* Update residue value with the residue after current block */
351 rctx->residue.size = nresidue;
352
353 rctx->config = tegra_sha_get_config(rctx->alg) |
354 SE_SHA_DST_HASH_REG;
355
356 /*
357 * If this is not the first 'update' call, paste the previous copied
358 * intermediate results to the registers so that it gets picked up.
359 * This is to support the import/export functionality.
360 */
361 if (!(rctx->task & SHA_FIRST))
362 tegra_sha_paste_hash_result(se, rctx);
363
364 size = tegra_sha_prep_cmd(se, cpuvaddr, rctx);
365
366 ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
367
368 /*
369 * If this is not the final update, copy the intermediate results
370 * from the registers so that it can be used in the next 'update'
371 * call. This is to support the import/export functionality.
372 */
373 if (!(rctx->task & SHA_FINAL))
374 tegra_sha_copy_hash_result(se, rctx);
375
376 dma_free_coherent(ctx->se->dev, rctx->datbuf.size,
377 rctx->datbuf.buf, rctx->datbuf.addr);
378
379 return ret;
380 }
381
tegra_sha_do_final(struct ahash_request * req)382 static int tegra_sha_do_final(struct ahash_request *req)
383 {
384 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
385 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
386 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
387 struct tegra_se *se = ctx->se;
388 u32 *cpuvaddr = se->cmdbuf->addr;
389 int size, ret = 0;
390
391 if (rctx->residue.size) {
392 rctx->datbuf.buf = dma_alloc_coherent(se->dev, rctx->residue.size,
393 &rctx->datbuf.addr, GFP_KERNEL);
394 if (!rctx->datbuf.buf) {
395 ret = -ENOMEM;
396 goto out_free;
397 }
398
399 memcpy(rctx->datbuf.buf, rctx->residue.buf, rctx->residue.size);
400 }
401
402 rctx->datbuf.size = rctx->residue.size;
403 rctx->total_len += rctx->residue.size;
404
405 rctx->config = tegra_sha_get_config(rctx->alg) |
406 SE_SHA_DST_MEMORY;
407
408 size = tegra_sha_prep_cmd(se, cpuvaddr, rctx);
409
410 ret = tegra_se_host1x_submit(se, se->cmdbuf, size);
411 if (ret)
412 goto out;
413
414 /* Copy result */
415 memcpy(req->result, rctx->digest.buf, rctx->digest.size);
416
417 out:
418 if (rctx->residue.size)
419 dma_free_coherent(se->dev, rctx->datbuf.size,
420 rctx->datbuf.buf, rctx->datbuf.addr);
421 out_free:
422 dma_free_coherent(se->dev, crypto_ahash_blocksize(tfm),
423 rctx->residue.buf, rctx->residue.addr);
424 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
425 rctx->digest.addr);
426 return ret;
427 }
428
tegra_sha_do_one_req(struct crypto_engine * engine,void * areq)429 static int tegra_sha_do_one_req(struct crypto_engine *engine, void *areq)
430 {
431 struct ahash_request *req = ahash_request_cast(areq);
432 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
433 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
434 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
435 struct tegra_se *se = ctx->se;
436 int ret = 0;
437
438 if (rctx->task & SHA_UPDATE) {
439 ret = tegra_sha_do_update(req);
440 if (ret)
441 goto out;
442
443 rctx->task &= ~SHA_UPDATE;
444 }
445
446 if (rctx->task & SHA_FINAL) {
447 ret = tegra_sha_do_final(req);
448 if (ret)
449 goto out;
450
451 rctx->task &= ~SHA_FINAL;
452 }
453
454 out:
455 crypto_finalize_hash_request(se->engine, req, ret);
456
457 return 0;
458 }
459
tegra_sha_init_fallback(struct crypto_ahash * tfm,struct tegra_sha_ctx * ctx,const char * algname)460 static void tegra_sha_init_fallback(struct crypto_ahash *tfm, struct tegra_sha_ctx *ctx,
461 const char *algname)
462 {
463 unsigned int statesize;
464
465 ctx->fallback_tfm = crypto_alloc_ahash(algname, 0, CRYPTO_ALG_ASYNC |
466 CRYPTO_ALG_NEED_FALLBACK);
467
468 if (IS_ERR(ctx->fallback_tfm)) {
469 dev_warn(ctx->se->dev,
470 "failed to allocate fallback for %s\n", algname);
471 ctx->fallback_tfm = NULL;
472 return;
473 }
474
475 statesize = crypto_ahash_statesize(ctx->fallback_tfm);
476
477 if (statesize > sizeof(struct tegra_sha_reqctx))
478 crypto_ahash_set_statesize(tfm, statesize);
479
480 /* Update reqsize if fallback is added */
481 crypto_ahash_set_reqsize(tfm,
482 sizeof(struct tegra_sha_reqctx) +
483 crypto_ahash_reqsize(ctx->fallback_tfm));
484 }
485
tegra_sha_cra_init(struct crypto_tfm * tfm)486 static int tegra_sha_cra_init(struct crypto_tfm *tfm)
487 {
488 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
489 struct crypto_ahash *ahash_tfm = __crypto_ahash_cast(tfm);
490 struct ahash_alg *alg = __crypto_ahash_alg(tfm->__crt_alg);
491 struct tegra_se_alg *se_alg;
492 const char *algname;
493 int ret;
494
495 algname = crypto_tfm_alg_name(tfm);
496 se_alg = container_of(alg, struct tegra_se_alg, alg.ahash.base);
497
498 crypto_ahash_set_reqsize(ahash_tfm, sizeof(struct tegra_sha_reqctx));
499
500 ctx->se = se_alg->se_dev;
501 ctx->fallback = false;
502 ctx->key_id = 0;
503
504 ret = se_algname_to_algid(algname);
505 if (ret < 0) {
506 dev_err(ctx->se->dev, "invalid algorithm\n");
507 return ret;
508 }
509
510 if (se_alg->alg_base)
511 tegra_sha_init_fallback(ahash_tfm, ctx, algname);
512
513 ctx->alg = ret;
514
515 return 0;
516 }
517
tegra_sha_cra_exit(struct crypto_tfm * tfm)518 static void tegra_sha_cra_exit(struct crypto_tfm *tfm)
519 {
520 struct tegra_sha_ctx *ctx = crypto_tfm_ctx(tfm);
521
522 if (ctx->fallback_tfm)
523 crypto_free_ahash(ctx->fallback_tfm);
524
525 tegra_key_invalidate(ctx->se, ctx->key_id, ctx->alg);
526 }
527
tegra_sha_init(struct ahash_request * req)528 static int tegra_sha_init(struct ahash_request *req)
529 {
530 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
531 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
532 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
533 struct tegra_se *se = ctx->se;
534
535 if (ctx->fallback)
536 return tegra_sha_fallback_init(req);
537
538 rctx->total_len = 0;
539 rctx->datbuf.size = 0;
540 rctx->residue.size = 0;
541 rctx->key_id = ctx->key_id;
542 rctx->task = SHA_FIRST;
543 rctx->alg = ctx->alg;
544 rctx->blk_size = crypto_ahash_blocksize(tfm);
545 rctx->digest.size = crypto_ahash_digestsize(tfm);
546
547 rctx->digest.buf = dma_alloc_coherent(se->dev, rctx->digest.size,
548 &rctx->digest.addr, GFP_KERNEL);
549 if (!rctx->digest.buf)
550 goto digbuf_fail;
551
552 rctx->residue.buf = dma_alloc_coherent(se->dev, rctx->blk_size,
553 &rctx->residue.addr, GFP_KERNEL);
554 if (!rctx->residue.buf)
555 goto resbuf_fail;
556
557 return 0;
558
559 resbuf_fail:
560 dma_free_coherent(se->dev, rctx->digest.size, rctx->digest.buf,
561 rctx->digest.addr);
562 digbuf_fail:
563 return -ENOMEM;
564 }
565
tegra_hmac_fallback_setkey(struct tegra_sha_ctx * ctx,const u8 * key,unsigned int keylen)566 static int tegra_hmac_fallback_setkey(struct tegra_sha_ctx *ctx, const u8 *key,
567 unsigned int keylen)
568 {
569 if (!ctx->fallback_tfm) {
570 dev_dbg(ctx->se->dev, "invalid key length (%d)\n", keylen);
571 return -EINVAL;
572 }
573
574 ctx->fallback = true;
575 return crypto_ahash_setkey(ctx->fallback_tfm, key, keylen);
576 }
577
tegra_hmac_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)578 static int tegra_hmac_setkey(struct crypto_ahash *tfm, const u8 *key,
579 unsigned int keylen)
580 {
581 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
582 int ret;
583
584 if (aes_check_keylen(keylen))
585 return tegra_hmac_fallback_setkey(ctx, key, keylen);
586
587 ret = tegra_key_submit(ctx->se, key, keylen, ctx->alg, &ctx->key_id);
588 if (ret)
589 return tegra_hmac_fallback_setkey(ctx, key, keylen);
590
591 ctx->fallback = false;
592
593 return 0;
594 }
595
tegra_sha_update(struct ahash_request * req)596 static int tegra_sha_update(struct ahash_request *req)
597 {
598 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
599 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
600 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
601
602 if (ctx->fallback)
603 return tegra_sha_fallback_update(req);
604
605 rctx->task |= SHA_UPDATE;
606
607 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
608 }
609
tegra_sha_final(struct ahash_request * req)610 static int tegra_sha_final(struct ahash_request *req)
611 {
612 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
613 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
614 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
615
616 if (ctx->fallback)
617 return tegra_sha_fallback_final(req);
618
619 rctx->task |= SHA_FINAL;
620
621 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
622 }
623
tegra_sha_finup(struct ahash_request * req)624 static int tegra_sha_finup(struct ahash_request *req)
625 {
626 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
627 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
628 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
629
630 if (ctx->fallback)
631 return tegra_sha_fallback_finup(req);
632
633 rctx->task |= SHA_UPDATE | SHA_FINAL;
634
635 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
636 }
637
tegra_sha_digest(struct ahash_request * req)638 static int tegra_sha_digest(struct ahash_request *req)
639 {
640 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
641 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
642 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
643 int ret;
644
645 if (ctx->fallback)
646 return tegra_sha_fallback_digest(req);
647
648 ret = tegra_sha_init(req);
649 if (ret)
650 return ret;
651
652 rctx->task |= SHA_UPDATE | SHA_FINAL;
653 return crypto_transfer_hash_request_to_engine(ctx->se->engine, req);
654 }
655
tegra_sha_export(struct ahash_request * req,void * out)656 static int tegra_sha_export(struct ahash_request *req, void *out)
657 {
658 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
659 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
660 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
661
662 if (ctx->fallback)
663 return tegra_sha_fallback_export(req, out);
664
665 memcpy(out, rctx, sizeof(*rctx));
666
667 return 0;
668 }
669
tegra_sha_import(struct ahash_request * req,const void * in)670 static int tegra_sha_import(struct ahash_request *req, const void *in)
671 {
672 struct tegra_sha_reqctx *rctx = ahash_request_ctx(req);
673 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
674 struct tegra_sha_ctx *ctx = crypto_ahash_ctx(tfm);
675
676 if (ctx->fallback)
677 return tegra_sha_fallback_import(req, in);
678
679 memcpy(rctx, in, sizeof(*rctx));
680
681 return 0;
682 }
683
684 static struct tegra_se_alg tegra_hash_algs[] = {
685 {
686 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
687 .alg.ahash.base = {
688 .init = tegra_sha_init,
689 .update = tegra_sha_update,
690 .final = tegra_sha_final,
691 .finup = tegra_sha_finup,
692 .digest = tegra_sha_digest,
693 .export = tegra_sha_export,
694 .import = tegra_sha_import,
695 .halg.digestsize = SHA1_DIGEST_SIZE,
696 .halg.statesize = sizeof(struct tegra_sha_reqctx),
697 .halg.base = {
698 .cra_name = "sha1",
699 .cra_driver_name = "tegra-se-sha1",
700 .cra_priority = 300,
701 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
702 .cra_blocksize = SHA1_BLOCK_SIZE,
703 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
704 .cra_alignmask = 0,
705 .cra_module = THIS_MODULE,
706 .cra_init = tegra_sha_cra_init,
707 .cra_exit = tegra_sha_cra_exit,
708 }
709 }
710 }, {
711 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
712 .alg.ahash.base = {
713 .init = tegra_sha_init,
714 .update = tegra_sha_update,
715 .final = tegra_sha_final,
716 .finup = tegra_sha_finup,
717 .digest = tegra_sha_digest,
718 .export = tegra_sha_export,
719 .import = tegra_sha_import,
720 .halg.digestsize = SHA224_DIGEST_SIZE,
721 .halg.statesize = sizeof(struct tegra_sha_reqctx),
722 .halg.base = {
723 .cra_name = "sha224",
724 .cra_driver_name = "tegra-se-sha224",
725 .cra_priority = 300,
726 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
727 .cra_blocksize = SHA224_BLOCK_SIZE,
728 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
729 .cra_alignmask = 0,
730 .cra_module = THIS_MODULE,
731 .cra_init = tegra_sha_cra_init,
732 .cra_exit = tegra_sha_cra_exit,
733 }
734 }
735 }, {
736 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
737 .alg.ahash.base = {
738 .init = tegra_sha_init,
739 .update = tegra_sha_update,
740 .final = tegra_sha_final,
741 .finup = tegra_sha_finup,
742 .digest = tegra_sha_digest,
743 .export = tegra_sha_export,
744 .import = tegra_sha_import,
745 .halg.digestsize = SHA256_DIGEST_SIZE,
746 .halg.statesize = sizeof(struct tegra_sha_reqctx),
747 .halg.base = {
748 .cra_name = "sha256",
749 .cra_driver_name = "tegra-se-sha256",
750 .cra_priority = 300,
751 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
752 .cra_blocksize = SHA256_BLOCK_SIZE,
753 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
754 .cra_alignmask = 0,
755 .cra_module = THIS_MODULE,
756 .cra_init = tegra_sha_cra_init,
757 .cra_exit = tegra_sha_cra_exit,
758 }
759 }
760 }, {
761 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
762 .alg.ahash.base = {
763 .init = tegra_sha_init,
764 .update = tegra_sha_update,
765 .final = tegra_sha_final,
766 .finup = tegra_sha_finup,
767 .digest = tegra_sha_digest,
768 .export = tegra_sha_export,
769 .import = tegra_sha_import,
770 .halg.digestsize = SHA384_DIGEST_SIZE,
771 .halg.statesize = sizeof(struct tegra_sha_reqctx),
772 .halg.base = {
773 .cra_name = "sha384",
774 .cra_driver_name = "tegra-se-sha384",
775 .cra_priority = 300,
776 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
777 .cra_blocksize = SHA384_BLOCK_SIZE,
778 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
779 .cra_alignmask = 0,
780 .cra_module = THIS_MODULE,
781 .cra_init = tegra_sha_cra_init,
782 .cra_exit = tegra_sha_cra_exit,
783 }
784 }
785 }, {
786 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
787 .alg.ahash.base = {
788 .init = tegra_sha_init,
789 .update = tegra_sha_update,
790 .final = tegra_sha_final,
791 .finup = tegra_sha_finup,
792 .digest = tegra_sha_digest,
793 .export = tegra_sha_export,
794 .import = tegra_sha_import,
795 .halg.digestsize = SHA512_DIGEST_SIZE,
796 .halg.statesize = sizeof(struct tegra_sha_reqctx),
797 .halg.base = {
798 .cra_name = "sha512",
799 .cra_driver_name = "tegra-se-sha512",
800 .cra_priority = 300,
801 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
802 .cra_blocksize = SHA512_BLOCK_SIZE,
803 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
804 .cra_alignmask = 0,
805 .cra_module = THIS_MODULE,
806 .cra_init = tegra_sha_cra_init,
807 .cra_exit = tegra_sha_cra_exit,
808 }
809 }
810 }, {
811 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
812 .alg.ahash.base = {
813 .init = tegra_sha_init,
814 .update = tegra_sha_update,
815 .final = tegra_sha_final,
816 .finup = tegra_sha_finup,
817 .digest = tegra_sha_digest,
818 .export = tegra_sha_export,
819 .import = tegra_sha_import,
820 .halg.digestsize = SHA3_224_DIGEST_SIZE,
821 .halg.statesize = sizeof(struct tegra_sha_reqctx),
822 .halg.base = {
823 .cra_name = "sha3-224",
824 .cra_driver_name = "tegra-se-sha3-224",
825 .cra_priority = 300,
826 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
827 .cra_blocksize = SHA3_224_BLOCK_SIZE,
828 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
829 .cra_alignmask = 0,
830 .cra_module = THIS_MODULE,
831 .cra_init = tegra_sha_cra_init,
832 .cra_exit = tegra_sha_cra_exit,
833 }
834 }
835 }, {
836 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
837 .alg.ahash.base = {
838 .init = tegra_sha_init,
839 .update = tegra_sha_update,
840 .final = tegra_sha_final,
841 .finup = tegra_sha_finup,
842 .digest = tegra_sha_digest,
843 .export = tegra_sha_export,
844 .import = tegra_sha_import,
845 .halg.digestsize = SHA3_256_DIGEST_SIZE,
846 .halg.statesize = sizeof(struct tegra_sha_reqctx),
847 .halg.base = {
848 .cra_name = "sha3-256",
849 .cra_driver_name = "tegra-se-sha3-256",
850 .cra_priority = 300,
851 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
852 .cra_blocksize = SHA3_256_BLOCK_SIZE,
853 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
854 .cra_alignmask = 0,
855 .cra_module = THIS_MODULE,
856 .cra_init = tegra_sha_cra_init,
857 .cra_exit = tegra_sha_cra_exit,
858 }
859 }
860 }, {
861 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
862 .alg.ahash.base = {
863 .init = tegra_sha_init,
864 .update = tegra_sha_update,
865 .final = tegra_sha_final,
866 .finup = tegra_sha_finup,
867 .digest = tegra_sha_digest,
868 .export = tegra_sha_export,
869 .import = tegra_sha_import,
870 .halg.digestsize = SHA3_384_DIGEST_SIZE,
871 .halg.statesize = sizeof(struct tegra_sha_reqctx),
872 .halg.base = {
873 .cra_name = "sha3-384",
874 .cra_driver_name = "tegra-se-sha3-384",
875 .cra_priority = 300,
876 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
877 .cra_blocksize = SHA3_384_BLOCK_SIZE,
878 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
879 .cra_alignmask = 0,
880 .cra_module = THIS_MODULE,
881 .cra_init = tegra_sha_cra_init,
882 .cra_exit = tegra_sha_cra_exit,
883 }
884 }
885 }, {
886 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
887 .alg.ahash.base = {
888 .init = tegra_sha_init,
889 .update = tegra_sha_update,
890 .final = tegra_sha_final,
891 .finup = tegra_sha_finup,
892 .digest = tegra_sha_digest,
893 .export = tegra_sha_export,
894 .import = tegra_sha_import,
895 .halg.digestsize = SHA3_512_DIGEST_SIZE,
896 .halg.statesize = sizeof(struct tegra_sha_reqctx),
897 .halg.base = {
898 .cra_name = "sha3-512",
899 .cra_driver_name = "tegra-se-sha3-512",
900 .cra_priority = 300,
901 .cra_flags = CRYPTO_ALG_TYPE_AHASH,
902 .cra_blocksize = SHA3_512_BLOCK_SIZE,
903 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
904 .cra_alignmask = 0,
905 .cra_module = THIS_MODULE,
906 .cra_init = tegra_sha_cra_init,
907 .cra_exit = tegra_sha_cra_exit,
908 }
909 }
910 }, {
911 .alg_base = "sha224",
912 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
913 .alg.ahash.base = {
914 .init = tegra_sha_init,
915 .update = tegra_sha_update,
916 .final = tegra_sha_final,
917 .finup = tegra_sha_finup,
918 .digest = tegra_sha_digest,
919 .export = tegra_sha_export,
920 .import = tegra_sha_import,
921 .setkey = tegra_hmac_setkey,
922 .halg.digestsize = SHA224_DIGEST_SIZE,
923 .halg.statesize = sizeof(struct tegra_sha_reqctx),
924 .halg.base = {
925 .cra_name = "hmac(sha224)",
926 .cra_driver_name = "tegra-se-hmac-sha224",
927 .cra_priority = 300,
928 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
929 .cra_blocksize = SHA224_BLOCK_SIZE,
930 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
931 .cra_alignmask = 0,
932 .cra_module = THIS_MODULE,
933 .cra_init = tegra_sha_cra_init,
934 .cra_exit = tegra_sha_cra_exit,
935 }
936 }
937 }, {
938 .alg_base = "sha256",
939 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
940 .alg.ahash.base = {
941 .init = tegra_sha_init,
942 .update = tegra_sha_update,
943 .final = tegra_sha_final,
944 .finup = tegra_sha_finup,
945 .digest = tegra_sha_digest,
946 .export = tegra_sha_export,
947 .import = tegra_sha_import,
948 .setkey = tegra_hmac_setkey,
949 .halg.digestsize = SHA256_DIGEST_SIZE,
950 .halg.statesize = sizeof(struct tegra_sha_reqctx),
951 .halg.base = {
952 .cra_name = "hmac(sha256)",
953 .cra_driver_name = "tegra-se-hmac-sha256",
954 .cra_priority = 300,
955 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
956 .cra_blocksize = SHA256_BLOCK_SIZE,
957 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
958 .cra_alignmask = 0,
959 .cra_module = THIS_MODULE,
960 .cra_init = tegra_sha_cra_init,
961 .cra_exit = tegra_sha_cra_exit,
962 }
963 }
964 }, {
965 .alg_base = "sha384",
966 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
967 .alg.ahash.base = {
968 .init = tegra_sha_init,
969 .update = tegra_sha_update,
970 .final = tegra_sha_final,
971 .finup = tegra_sha_finup,
972 .digest = tegra_sha_digest,
973 .export = tegra_sha_export,
974 .import = tegra_sha_import,
975 .setkey = tegra_hmac_setkey,
976 .halg.digestsize = SHA384_DIGEST_SIZE,
977 .halg.statesize = sizeof(struct tegra_sha_reqctx),
978 .halg.base = {
979 .cra_name = "hmac(sha384)",
980 .cra_driver_name = "tegra-se-hmac-sha384",
981 .cra_priority = 300,
982 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
983 .cra_blocksize = SHA384_BLOCK_SIZE,
984 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
985 .cra_alignmask = 0,
986 .cra_module = THIS_MODULE,
987 .cra_init = tegra_sha_cra_init,
988 .cra_exit = tegra_sha_cra_exit,
989 }
990 }
991 }, {
992 .alg_base = "sha512",
993 .alg.ahash.op.do_one_request = tegra_sha_do_one_req,
994 .alg.ahash.base = {
995 .init = tegra_sha_init,
996 .update = tegra_sha_update,
997 .final = tegra_sha_final,
998 .finup = tegra_sha_finup,
999 .digest = tegra_sha_digest,
1000 .export = tegra_sha_export,
1001 .import = tegra_sha_import,
1002 .setkey = tegra_hmac_setkey,
1003 .halg.digestsize = SHA512_DIGEST_SIZE,
1004 .halg.statesize = sizeof(struct tegra_sha_reqctx),
1005 .halg.base = {
1006 .cra_name = "hmac(sha512)",
1007 .cra_driver_name = "tegra-se-hmac-sha512",
1008 .cra_priority = 300,
1009 .cra_flags = CRYPTO_ALG_TYPE_AHASH | CRYPTO_ALG_NEED_FALLBACK,
1010 .cra_blocksize = SHA512_BLOCK_SIZE,
1011 .cra_ctxsize = sizeof(struct tegra_sha_ctx),
1012 .cra_alignmask = 0,
1013 .cra_module = THIS_MODULE,
1014 .cra_init = tegra_sha_cra_init,
1015 .cra_exit = tegra_sha_cra_exit,
1016 }
1017 }
1018 }
1019 };
1020
tegra_hash_kac_manifest(u32 user,u32 alg,u32 keylen)1021 static int tegra_hash_kac_manifest(u32 user, u32 alg, u32 keylen)
1022 {
1023 int manifest;
1024
1025 manifest = SE_KAC_USER_NS;
1026
1027 switch (alg) {
1028 case SE_ALG_HMAC_SHA224:
1029 case SE_ALG_HMAC_SHA256:
1030 case SE_ALG_HMAC_SHA384:
1031 case SE_ALG_HMAC_SHA512:
1032 manifest |= SE_KAC_HMAC;
1033 break;
1034 default:
1035 return -EINVAL;
1036 }
1037
1038 switch (keylen) {
1039 case AES_KEYSIZE_128:
1040 manifest |= SE_KAC_SIZE_128;
1041 break;
1042 case AES_KEYSIZE_192:
1043 manifest |= SE_KAC_SIZE_192;
1044 break;
1045 case AES_KEYSIZE_256:
1046 default:
1047 manifest |= SE_KAC_SIZE_256;
1048 break;
1049 }
1050
1051 return manifest;
1052 }
1053
tegra_init_hash(struct tegra_se * se)1054 int tegra_init_hash(struct tegra_se *se)
1055 {
1056 struct ahash_engine_alg *alg;
1057 int i, ret;
1058
1059 se->manifest = tegra_hash_kac_manifest;
1060
1061 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++) {
1062 tegra_hash_algs[i].se_dev = se;
1063 alg = &tegra_hash_algs[i].alg.ahash;
1064
1065 ret = crypto_engine_register_ahash(alg);
1066 if (ret) {
1067 dev_err(se->dev, "failed to register %s\n",
1068 alg->base.halg.base.cra_name);
1069 goto sha_err;
1070 }
1071 }
1072
1073 return 0;
1074
1075 sha_err:
1076 while (i--)
1077 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1078
1079 return ret;
1080 }
1081
tegra_deinit_hash(struct tegra_se * se)1082 void tegra_deinit_hash(struct tegra_se *se)
1083 {
1084 int i;
1085
1086 for (i = 0; i < ARRAY_SIZE(tegra_hash_algs); i++)
1087 crypto_engine_unregister_ahash(&tegra_hash_algs[i].alg.ahash);
1088 }
1089