1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/string.h>
16 #include <net/netlink.h>
17
18 #include "hash.h"
19
20 #define MAX_SHASH_ALIGNMASK 63
21
22 static const struct crypto_type crypto_shash_type;
23
shash_get_stat(struct shash_alg * alg)24 static inline struct crypto_istat_hash *shash_get_stat(struct shash_alg *alg)
25 {
26 return hash_get_stat(&alg->halg);
27 }
28
crypto_shash_errstat(struct shash_alg * alg,int err)29 static inline int crypto_shash_errstat(struct shash_alg *alg, int err)
30 {
31 return crypto_hash_errstat(&alg->halg, err);
32 }
33
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)34 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
35 unsigned int keylen)
36 {
37 return -ENOSYS;
38 }
39 EXPORT_SYMBOL_GPL(shash_no_setkey);
40
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)41 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
42 unsigned int keylen)
43 {
44 struct shash_alg *shash = crypto_shash_alg(tfm);
45 unsigned long alignmask = crypto_shash_alignmask(tfm);
46 unsigned long absize;
47 u8 *buffer, *alignbuffer;
48 int err;
49
50 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
51 buffer = kmalloc(absize, GFP_ATOMIC);
52 if (!buffer)
53 return -ENOMEM;
54
55 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
56 memcpy(alignbuffer, key, keylen);
57 err = shash->setkey(tfm, alignbuffer, keylen);
58 kfree_sensitive(buffer);
59 return err;
60 }
61
shash_set_needkey(struct crypto_shash * tfm,struct shash_alg * alg)62 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
63 {
64 if (crypto_shash_alg_needs_key(alg))
65 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
66 }
67
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)68 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
69 unsigned int keylen)
70 {
71 struct shash_alg *shash = crypto_shash_alg(tfm);
72 unsigned long alignmask = crypto_shash_alignmask(tfm);
73 int err;
74
75 if ((unsigned long)key & alignmask)
76 err = shash_setkey_unaligned(tfm, key, keylen);
77 else
78 err = shash->setkey(tfm, key, keylen);
79
80 if (unlikely(err)) {
81 shash_set_needkey(tfm, shash);
82 return err;
83 }
84
85 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
86 return 0;
87 }
88 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
89
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)90 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
91 unsigned int len)
92 {
93 struct crypto_shash *tfm = desc->tfm;
94 struct shash_alg *shash = crypto_shash_alg(tfm);
95 unsigned long alignmask = crypto_shash_alignmask(tfm);
96 unsigned int unaligned_len = alignmask + 1 -
97 ((unsigned long)data & alignmask);
98 /*
99 * We cannot count on __aligned() working for large values:
100 * https://patchwork.kernel.org/patch/9507697/
101 */
102 u8 ubuf[MAX_SHASH_ALIGNMASK * 2];
103 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
104 int err;
105
106 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
107 return -EINVAL;
108
109 if (unaligned_len > len)
110 unaligned_len = len;
111
112 memcpy(buf, data, unaligned_len);
113 err = shash->update(desc, buf, unaligned_len);
114 memset(buf, 0, unaligned_len);
115
116 return err ?:
117 shash->update(desc, data + unaligned_len, len - unaligned_len);
118 }
119
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)120 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
121 unsigned int len)
122 {
123 struct crypto_shash *tfm = desc->tfm;
124 struct shash_alg *shash = crypto_shash_alg(tfm);
125 unsigned long alignmask = crypto_shash_alignmask(tfm);
126 int err;
127
128 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
129 atomic64_add(len, &shash_get_stat(shash)->hash_tlen);
130
131 if ((unsigned long)data & alignmask)
132 err = shash_update_unaligned(desc, data, len);
133 else
134 err = shash->update(desc, data, len);
135
136 return crypto_shash_errstat(shash, err);
137 }
138 EXPORT_SYMBOL_GPL(crypto_shash_update);
139
shash_final_unaligned(struct shash_desc * desc,u8 * out)140 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
141 {
142 struct crypto_shash *tfm = desc->tfm;
143 unsigned long alignmask = crypto_shash_alignmask(tfm);
144 struct shash_alg *shash = crypto_shash_alg(tfm);
145 unsigned int ds = crypto_shash_digestsize(tfm);
146 /*
147 * We cannot count on __aligned() working for large values:
148 * https://patchwork.kernel.org/patch/9507697/
149 */
150 u8 ubuf[MAX_SHASH_ALIGNMASK + HASH_MAX_DIGESTSIZE];
151 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
152 int err;
153
154 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
155 return -EINVAL;
156
157 err = shash->final(desc, buf);
158 if (err)
159 goto out;
160
161 memcpy(out, buf, ds);
162
163 out:
164 memset(buf, 0, ds);
165 return err;
166 }
167
crypto_shash_final(struct shash_desc * desc,u8 * out)168 int crypto_shash_final(struct shash_desc *desc, u8 *out)
169 {
170 struct crypto_shash *tfm = desc->tfm;
171 struct shash_alg *shash = crypto_shash_alg(tfm);
172 unsigned long alignmask = crypto_shash_alignmask(tfm);
173 int err;
174
175 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
176 atomic64_inc(&shash_get_stat(shash)->hash_cnt);
177
178 if ((unsigned long)out & alignmask)
179 err = shash_final_unaligned(desc, out);
180 else
181 err = shash->final(desc, out);
182
183 return crypto_shash_errstat(shash, err);
184 }
185 EXPORT_SYMBOL_GPL(crypto_shash_final);
186
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)187 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
188 unsigned int len, u8 *out)
189 {
190 return shash_update_unaligned(desc, data, len) ?:
191 shash_final_unaligned(desc, out);
192 }
193
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)194 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
195 unsigned int len, u8 *out)
196 {
197 struct crypto_shash *tfm = desc->tfm;
198 struct shash_alg *shash = crypto_shash_alg(tfm);
199 unsigned long alignmask = crypto_shash_alignmask(tfm);
200 int err;
201
202 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
203 struct crypto_istat_hash *istat = shash_get_stat(shash);
204
205 atomic64_inc(&istat->hash_cnt);
206 atomic64_add(len, &istat->hash_tlen);
207 }
208
209 if (((unsigned long)data | (unsigned long)out) & alignmask)
210 err = shash_finup_unaligned(desc, data, len, out);
211 else
212 err = shash->finup(desc, data, len, out);
213
214
215 return crypto_shash_errstat(shash, err);
216 }
217 EXPORT_SYMBOL_GPL(crypto_shash_finup);
218
219 static noinline_for_stack int
shash_finup_mb_fallback(struct shash_desc * desc,const u8 * const data[],unsigned int len,u8 * const outs[],unsigned int num_msgs)220 shash_finup_mb_fallback(struct shash_desc *desc, const u8 * const data[],
221 unsigned int len, u8 * const outs[],
222 unsigned int num_msgs)
223 {
224 struct crypto_shash *tfm = desc->tfm;
225 SHASH_DESC_ON_STACK(desc2, tfm);
226 unsigned int i;
227 int err;
228
229 for (i = 0; i < num_msgs - 1; i++) {
230 desc2->tfm = tfm;
231 memcpy(shash_desc_ctx(desc2), shash_desc_ctx(desc),
232 crypto_shash_descsize(tfm));
233 err = crypto_shash_finup(desc2, data[i], len, outs[i]);
234 if (err)
235 return err;
236 }
237 return crypto_shash_finup(desc, data[i], len, outs[i]);
238 }
239
crypto_shash_finup_mb(struct shash_desc * desc,const u8 * const data[],unsigned int len,u8 * const outs[],unsigned int num_msgs)240 int crypto_shash_finup_mb(struct shash_desc *desc, const u8 * const data[],
241 unsigned int len, u8 * const outs[],
242 unsigned int num_msgs)
243 {
244 struct shash_alg *alg = crypto_shash_alg(desc->tfm);
245 int err;
246
247 if (num_msgs == 1)
248 return crypto_shash_finup(desc, data[0], len, outs[0]);
249
250 if (num_msgs == 0)
251 return 0;
252
253 if (WARN_ON_ONCE(num_msgs > alg->mb_max_msgs))
254 goto fallback;
255
256 err = alg->finup_mb(desc, data, len, outs, num_msgs);
257 if (unlikely(err == -EOPNOTSUPP))
258 goto fallback;
259 return err;
260
261 fallback:
262 return shash_finup_mb_fallback(desc, data, len, outs, num_msgs);
263 }
264 EXPORT_SYMBOL_GPL(crypto_shash_finup_mb);
265
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)266 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
267 unsigned int len, u8 *out)
268 {
269 return crypto_shash_init(desc) ?:
270 shash_update_unaligned(desc, data, len) ?:
271 shash_final_unaligned(desc, out);
272 }
273
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)274 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
275 unsigned int len, u8 *out)
276 {
277 struct crypto_shash *tfm = desc->tfm;
278 struct shash_alg *shash = crypto_shash_alg(tfm);
279 unsigned long alignmask = crypto_shash_alignmask(tfm);
280 int err;
281
282 if (IS_ENABLED(CONFIG_CRYPTO_STATS)) {
283 struct crypto_istat_hash *istat = shash_get_stat(shash);
284
285 atomic64_inc(&istat->hash_cnt);
286 atomic64_add(len, &istat->hash_tlen);
287 }
288
289 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
290 err = -ENOKEY;
291 else if (((unsigned long)data | (unsigned long)out) & alignmask)
292 err = shash_digest_unaligned(desc, data, len, out);
293 else
294 err = shash->digest(desc, data, len, out);
295
296 return crypto_shash_errstat(shash, err);
297 }
298 EXPORT_SYMBOL_GPL(crypto_shash_digest);
299
crypto_shash_tfm_digest(struct crypto_shash * tfm,const u8 * data,unsigned int len,u8 * out)300 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
301 unsigned int len, u8 *out)
302 {
303 SHASH_DESC_ON_STACK(desc, tfm);
304 int err;
305
306 desc->tfm = tfm;
307
308 err = crypto_shash_digest(desc, data, len, out);
309
310 shash_desc_zero(desc);
311
312 return err;
313 }
314 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
315
shash_default_export(struct shash_desc * desc,void * out)316 static int shash_default_export(struct shash_desc *desc, void *out)
317 {
318 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
319 return 0;
320 }
321
shash_default_import(struct shash_desc * desc,const void * in)322 static int shash_default_import(struct shash_desc *desc, const void *in)
323 {
324 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
325 return 0;
326 }
327
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)328 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
329 unsigned int keylen)
330 {
331 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
332
333 return crypto_shash_setkey(*ctx, key, keylen);
334 }
335
shash_async_init(struct ahash_request * req)336 static int shash_async_init(struct ahash_request *req)
337 {
338 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
339 struct shash_desc *desc = ahash_request_ctx(req);
340
341 desc->tfm = *ctx;
342
343 return crypto_shash_init(desc);
344 }
345
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)346 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
347 {
348 struct crypto_hash_walk walk;
349 int nbytes;
350
351 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
352 nbytes = crypto_hash_walk_done(&walk, nbytes))
353 nbytes = crypto_shash_update(desc, walk.data, nbytes);
354
355 return nbytes;
356 }
357 EXPORT_SYMBOL_GPL(shash_ahash_update);
358
shash_async_update(struct ahash_request * req)359 static int shash_async_update(struct ahash_request *req)
360 {
361 return shash_ahash_update(req, ahash_request_ctx(req));
362 }
363
shash_async_final(struct ahash_request * req)364 static int shash_async_final(struct ahash_request *req)
365 {
366 return crypto_shash_final(ahash_request_ctx(req), req->result);
367 }
368
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)369 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
370 {
371 struct crypto_hash_walk walk;
372 int nbytes;
373
374 nbytes = crypto_hash_walk_first(req, &walk);
375 if (!nbytes)
376 return crypto_shash_final(desc, req->result);
377
378 do {
379 nbytes = crypto_hash_walk_last(&walk) ?
380 crypto_shash_finup(desc, walk.data, nbytes,
381 req->result) :
382 crypto_shash_update(desc, walk.data, nbytes);
383 nbytes = crypto_hash_walk_done(&walk, nbytes);
384 } while (nbytes > 0);
385
386 return nbytes;
387 }
388 EXPORT_SYMBOL_GPL(shash_ahash_finup);
389
shash_async_finup(struct ahash_request * req)390 static int shash_async_finup(struct ahash_request *req)
391 {
392 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
393 struct shash_desc *desc = ahash_request_ctx(req);
394
395 desc->tfm = *ctx;
396
397 return shash_ahash_finup(req, desc);
398 }
399
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)400 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
401 {
402 unsigned int nbytes = req->nbytes;
403 struct scatterlist *sg;
404 unsigned int offset;
405 int err;
406
407 if (nbytes &&
408 (sg = req->src, offset = sg->offset,
409 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
410 void *data;
411
412 data = kmap_local_page(sg_page(sg));
413 err = crypto_shash_digest(desc, data + offset, nbytes,
414 req->result);
415 kunmap_local(data);
416 } else
417 err = crypto_shash_init(desc) ?:
418 shash_ahash_finup(req, desc);
419
420 return err;
421 }
422 EXPORT_SYMBOL_GPL(shash_ahash_digest);
423
shash_async_digest(struct ahash_request * req)424 static int shash_async_digest(struct ahash_request *req)
425 {
426 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
427 struct shash_desc *desc = ahash_request_ctx(req);
428
429 desc->tfm = *ctx;
430
431 return shash_ahash_digest(req, desc);
432 }
433
shash_async_export(struct ahash_request * req,void * out)434 static int shash_async_export(struct ahash_request *req, void *out)
435 {
436 return crypto_shash_export(ahash_request_ctx(req), out);
437 }
438
shash_async_import(struct ahash_request * req,const void * in)439 static int shash_async_import(struct ahash_request *req, const void *in)
440 {
441 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
442 struct shash_desc *desc = ahash_request_ctx(req);
443
444 desc->tfm = *ctx;
445
446 return crypto_shash_import(desc, in);
447 }
448
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)449 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
450 {
451 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
452
453 crypto_free_shash(*ctx);
454 }
455
crypto_init_shash_ops_async(struct crypto_tfm * tfm)456 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
457 {
458 struct crypto_alg *calg = tfm->__crt_alg;
459 struct shash_alg *alg = __crypto_shash_alg(calg);
460 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
461 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
462 struct crypto_shash *shash;
463
464 if (!crypto_mod_get(calg))
465 return -EAGAIN;
466
467 shash = crypto_create_tfm(calg, &crypto_shash_type);
468 if (IS_ERR(shash)) {
469 crypto_mod_put(calg);
470 return PTR_ERR(shash);
471 }
472
473 *ctx = shash;
474 tfm->exit = crypto_exit_shash_ops_async;
475
476 crt->init = shash_async_init;
477 crt->update = shash_async_update;
478 crt->final = shash_async_final;
479 crt->finup = shash_async_finup;
480 crt->digest = shash_async_digest;
481 if (crypto_shash_alg_has_setkey(alg))
482 crt->setkey = shash_async_setkey;
483
484 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
485 CRYPTO_TFM_NEED_KEY);
486
487 crt->export = shash_async_export;
488 crt->import = shash_async_import;
489
490 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
491
492 return 0;
493 }
494
crypto_clone_shash_ops_async(struct crypto_ahash * nhash,struct crypto_ahash * hash)495 struct crypto_ahash *crypto_clone_shash_ops_async(struct crypto_ahash *nhash,
496 struct crypto_ahash *hash)
497 {
498 struct crypto_shash **nctx = crypto_ahash_ctx(nhash);
499 struct crypto_shash **ctx = crypto_ahash_ctx(hash);
500 struct crypto_shash *shash;
501
502 shash = crypto_clone_shash(*ctx);
503 if (IS_ERR(shash)) {
504 crypto_free_ahash(nhash);
505 return ERR_CAST(shash);
506 }
507
508 *nctx = shash;
509
510 return nhash;
511 }
512
crypto_shash_exit_tfm(struct crypto_tfm * tfm)513 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
514 {
515 struct crypto_shash *hash = __crypto_shash_cast(tfm);
516 struct shash_alg *alg = crypto_shash_alg(hash);
517
518 alg->exit_tfm(hash);
519 }
520
crypto_shash_init_tfm(struct crypto_tfm * tfm)521 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
522 {
523 struct crypto_shash *hash = __crypto_shash_cast(tfm);
524 struct shash_alg *alg = crypto_shash_alg(hash);
525 int err;
526
527 hash->descsize = alg->descsize;
528
529 shash_set_needkey(hash, alg);
530
531 if (alg->exit_tfm)
532 tfm->exit = crypto_shash_exit_tfm;
533
534 if (!alg->init_tfm)
535 return 0;
536
537 err = alg->init_tfm(hash);
538 if (err)
539 return err;
540
541 /* ->init_tfm() may have increased the descsize. */
542 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
543 if (alg->exit_tfm)
544 alg->exit_tfm(hash);
545 return -EINVAL;
546 }
547
548 return 0;
549 }
550
crypto_shash_free_instance(struct crypto_instance * inst)551 static void crypto_shash_free_instance(struct crypto_instance *inst)
552 {
553 struct shash_instance *shash = shash_instance(inst);
554
555 shash->free(shash);
556 }
557
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)558 static int __maybe_unused crypto_shash_report(
559 struct sk_buff *skb, struct crypto_alg *alg)
560 {
561 struct crypto_report_hash rhash;
562 struct shash_alg *salg = __crypto_shash_alg(alg);
563
564 memset(&rhash, 0, sizeof(rhash));
565
566 strscpy(rhash.type, "shash", sizeof(rhash.type));
567
568 rhash.blocksize = alg->cra_blocksize;
569 rhash.digestsize = salg->digestsize;
570
571 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
572 }
573
574 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
575 __maybe_unused;
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)576 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
577 {
578 struct shash_alg *salg = __crypto_shash_alg(alg);
579
580 seq_printf(m, "type : shash\n");
581 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
582 seq_printf(m, "digestsize : %u\n", salg->digestsize);
583 }
584
crypto_shash_report_stat(struct sk_buff * skb,struct crypto_alg * alg)585 static int __maybe_unused crypto_shash_report_stat(
586 struct sk_buff *skb, struct crypto_alg *alg)
587 {
588 return crypto_hash_report_stat(skb, alg, "shash");
589 }
590
591 static const struct crypto_type crypto_shash_type = {
592 .extsize = crypto_alg_extsize,
593 .init_tfm = crypto_shash_init_tfm,
594 .free = crypto_shash_free_instance,
595 #ifdef CONFIG_PROC_FS
596 .show = crypto_shash_show,
597 #endif
598 #if IS_ENABLED(CONFIG_CRYPTO_USER)
599 .report = crypto_shash_report,
600 #endif
601 #ifdef CONFIG_CRYPTO_STATS
602 .report_stat = crypto_shash_report_stat,
603 #endif
604 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
605 .maskset = CRYPTO_ALG_TYPE_MASK,
606 .type = CRYPTO_ALG_TYPE_SHASH,
607 .tfmsize = offsetof(struct crypto_shash, base),
608 };
609
crypto_grab_shash(struct crypto_shash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)610 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
611 struct crypto_instance *inst,
612 const char *name, u32 type, u32 mask)
613 {
614 spawn->base.frontend = &crypto_shash_type;
615 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
616 }
617 EXPORT_SYMBOL_GPL(crypto_grab_shash);
618
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)619 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
620 u32 mask)
621 {
622 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
623 }
624 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
625
crypto_has_shash(const char * alg_name,u32 type,u32 mask)626 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
627 {
628 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
629 }
630 EXPORT_SYMBOL_GPL(crypto_has_shash);
631
crypto_clone_shash(struct crypto_shash * hash)632 struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
633 {
634 struct crypto_tfm *tfm = crypto_shash_tfm(hash);
635 struct shash_alg *alg = crypto_shash_alg(hash);
636 struct crypto_shash *nhash;
637 int err;
638
639 if (!crypto_shash_alg_has_setkey(alg)) {
640 tfm = crypto_tfm_get(tfm);
641 if (IS_ERR(tfm))
642 return ERR_CAST(tfm);
643
644 return hash;
645 }
646
647 if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
648 return ERR_PTR(-ENOSYS);
649
650 nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
651 if (IS_ERR(nhash))
652 return nhash;
653
654 nhash->descsize = hash->descsize;
655
656 if (alg->clone_tfm) {
657 err = alg->clone_tfm(nhash, hash);
658 if (err) {
659 crypto_free_shash(nhash);
660 return ERR_PTR(err);
661 }
662 }
663
664 return nhash;
665 }
666 EXPORT_SYMBOL_GPL(crypto_clone_shash);
667
hash_prepare_alg(struct hash_alg_common * alg)668 int hash_prepare_alg(struct hash_alg_common *alg)
669 {
670 struct crypto_istat_hash *istat = hash_get_stat(alg);
671 struct crypto_alg *base = &alg->base;
672
673 if (alg->digestsize > HASH_MAX_DIGESTSIZE)
674 return -EINVAL;
675
676 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
677
678 if (IS_ENABLED(CONFIG_CRYPTO_STATS))
679 memset(istat, 0, sizeof(*istat));
680
681 return 0;
682 }
683
shash_prepare_alg(struct shash_alg * alg)684 static int shash_prepare_alg(struct shash_alg *alg)
685 {
686 struct crypto_alg *base = &alg->halg.base;
687 int err;
688
689 if (alg->descsize > HASH_MAX_DESCSIZE)
690 return -EINVAL;
691
692 if (base->cra_alignmask > MAX_SHASH_ALIGNMASK)
693 return -EINVAL;
694
695 if ((alg->export && !alg->import) || (alg->import && !alg->export))
696 return -EINVAL;
697
698 if (alg->mb_max_msgs > 1) {
699 if (alg->mb_max_msgs > HASH_MAX_MB_MSGS)
700 return -EINVAL;
701 if (!alg->finup_mb)
702 return -EINVAL;
703 } else {
704 if (alg->finup_mb)
705 return -EINVAL;
706 alg->mb_max_msgs = 1;
707 }
708
709 err = hash_prepare_alg(&alg->halg);
710 if (err)
711 return err;
712
713 base->cra_type = &crypto_shash_type;
714 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
715
716 if (!alg->finup)
717 alg->finup = shash_finup_unaligned;
718 if (!alg->digest)
719 alg->digest = shash_digest_unaligned;
720 if (!alg->export) {
721 alg->export = shash_default_export;
722 alg->import = shash_default_import;
723 alg->halg.statesize = alg->descsize;
724 }
725 if (!alg->setkey)
726 alg->setkey = shash_no_setkey;
727
728 return 0;
729 }
730
crypto_register_shash(struct shash_alg * alg)731 int crypto_register_shash(struct shash_alg *alg)
732 {
733 struct crypto_alg *base = &alg->base;
734 int err;
735
736 err = shash_prepare_alg(alg);
737 if (err)
738 return err;
739
740 return crypto_register_alg(base);
741 }
742 EXPORT_SYMBOL_GPL(crypto_register_shash);
743
crypto_unregister_shash(struct shash_alg * alg)744 void crypto_unregister_shash(struct shash_alg *alg)
745 {
746 crypto_unregister_alg(&alg->base);
747 }
748 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
749
crypto_register_shashes(struct shash_alg * algs,int count)750 int crypto_register_shashes(struct shash_alg *algs, int count)
751 {
752 int i, ret;
753
754 for (i = 0; i < count; i++) {
755 ret = crypto_register_shash(&algs[i]);
756 if (ret)
757 goto err;
758 }
759
760 return 0;
761
762 err:
763 for (--i; i >= 0; --i)
764 crypto_unregister_shash(&algs[i]);
765
766 return ret;
767 }
768 EXPORT_SYMBOL_GPL(crypto_register_shashes);
769
crypto_unregister_shashes(struct shash_alg * algs,int count)770 void crypto_unregister_shashes(struct shash_alg *algs, int count)
771 {
772 int i;
773
774 for (i = count - 1; i >= 0; --i)
775 crypto_unregister_shash(&algs[i]);
776 }
777 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
778
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)779 int shash_register_instance(struct crypto_template *tmpl,
780 struct shash_instance *inst)
781 {
782 int err;
783
784 if (WARN_ON(!inst->free))
785 return -EINVAL;
786
787 err = shash_prepare_alg(&inst->alg);
788 if (err)
789 return err;
790
791 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
792 }
793 EXPORT_SYMBOL_GPL(shash_register_instance);
794
shash_free_singlespawn_instance(struct shash_instance * inst)795 void shash_free_singlespawn_instance(struct shash_instance *inst)
796 {
797 crypto_drop_spawn(shash_instance_ctx(inst));
798 kfree(inst);
799 }
800 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
801
802 MODULE_LICENSE("GPL");
803 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
804