• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Synchronous Cryptographic Hash operations.
3  *
4  * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License as published by the Free
8  * Software Foundation; either version 2 of the License, or (at your option)
9  * any later version.
10  *
11  */
12 
13 #include <crypto/scatterwalk.h>
14 #include <crypto/internal/hash.h>
15 #include <linux/err.h>
16 #include <linux/kernel.h>
17 #include <linux/module.h>
18 #include <linux/slab.h>
19 #include <linux/seq_file.h>
20 #include <linux/cryptouser.h>
21 #include <net/netlink.h>
22 
23 #include "internal.h"
24 
25 static const struct crypto_type crypto_shash_type;
26 
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)27 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 		    unsigned int keylen)
29 {
30 	return -ENOSYS;
31 }
32 EXPORT_SYMBOL_GPL(shash_no_setkey);
33 
shash_setkey_unaligned(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)34 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
35 				  unsigned int keylen)
36 {
37 	struct shash_alg *shash = crypto_shash_alg(tfm);
38 	unsigned long alignmask = crypto_shash_alignmask(tfm);
39 	unsigned long absize;
40 	u8 *buffer, *alignbuffer;
41 	int err;
42 
43 	absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
44 	buffer = kmalloc(absize, GFP_KERNEL);
45 	if (!buffer)
46 		return -ENOMEM;
47 
48 	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
49 	memcpy(alignbuffer, key, keylen);
50 	err = shash->setkey(tfm, alignbuffer, keylen);
51 	kzfree(buffer);
52 	return err;
53 }
54 
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)55 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
56 			unsigned int keylen)
57 {
58 	struct shash_alg *shash = crypto_shash_alg(tfm);
59 	unsigned long alignmask = crypto_shash_alignmask(tfm);
60 
61 	if ((unsigned long)key & alignmask)
62 		return shash_setkey_unaligned(tfm, key, keylen);
63 
64 	return shash->setkey(tfm, key, keylen);
65 }
66 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
67 
shash_align_buffer_size(unsigned len,unsigned long mask)68 static inline unsigned int shash_align_buffer_size(unsigned len,
69 						   unsigned long mask)
70 {
71 	typedef u8 __attribute__ ((aligned)) u8_aligned;
72 	return len + (mask & ~(__alignof__(u8_aligned) - 1));
73 }
74 
shash_update_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len)75 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
76 				  unsigned int len)
77 {
78 	struct crypto_shash *tfm = desc->tfm;
79 	struct shash_alg *shash = crypto_shash_alg(tfm);
80 	unsigned long alignmask = crypto_shash_alignmask(tfm);
81 	unsigned int unaligned_len = alignmask + 1 -
82 				     ((unsigned long)data & alignmask);
83 	u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)]
84 		__attribute__ ((aligned));
85 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
86 	int err;
87 
88 	if (unaligned_len > len)
89 		unaligned_len = len;
90 
91 	memcpy(buf, data, unaligned_len);
92 	err = shash->update(desc, buf, unaligned_len);
93 	memset(buf, 0, unaligned_len);
94 
95 	return err ?:
96 	       shash->update(desc, data + unaligned_len, len - unaligned_len);
97 }
98 
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)99 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
100 			unsigned int len)
101 {
102 	struct crypto_shash *tfm = desc->tfm;
103 	struct shash_alg *shash = crypto_shash_alg(tfm);
104 	unsigned long alignmask = crypto_shash_alignmask(tfm);
105 
106 	if ((unsigned long)data & alignmask)
107 		return shash_update_unaligned(desc, data, len);
108 
109 	return shash->update(desc, data, len);
110 }
111 EXPORT_SYMBOL_GPL(crypto_shash_update);
112 
shash_final_unaligned(struct shash_desc * desc,u8 * out)113 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
114 {
115 	struct crypto_shash *tfm = desc->tfm;
116 	unsigned long alignmask = crypto_shash_alignmask(tfm);
117 	struct shash_alg *shash = crypto_shash_alg(tfm);
118 	unsigned int ds = crypto_shash_digestsize(tfm);
119 	u8 ubuf[shash_align_buffer_size(ds, alignmask)]
120 		__attribute__ ((aligned));
121 	u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
122 	int err;
123 
124 	err = shash->final(desc, buf);
125 	if (err)
126 		goto out;
127 
128 	memcpy(out, buf, ds);
129 
130 out:
131 	memset(buf, 0, ds);
132 	return err;
133 }
134 
crypto_shash_final(struct shash_desc * desc,u8 * out)135 int crypto_shash_final(struct shash_desc *desc, u8 *out)
136 {
137 	struct crypto_shash *tfm = desc->tfm;
138 	struct shash_alg *shash = crypto_shash_alg(tfm);
139 	unsigned long alignmask = crypto_shash_alignmask(tfm);
140 
141 	if ((unsigned long)out & alignmask)
142 		return shash_final_unaligned(desc, out);
143 
144 	return shash->final(desc, out);
145 }
146 EXPORT_SYMBOL_GPL(crypto_shash_final);
147 
shash_finup_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)148 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
149 				 unsigned int len, u8 *out)
150 {
151 	return crypto_shash_update(desc, data, len) ?:
152 	       crypto_shash_final(desc, out);
153 }
154 
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)155 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
156 		       unsigned int len, u8 *out)
157 {
158 	struct crypto_shash *tfm = desc->tfm;
159 	struct shash_alg *shash = crypto_shash_alg(tfm);
160 	unsigned long alignmask = crypto_shash_alignmask(tfm);
161 
162 	if (((unsigned long)data | (unsigned long)out) & alignmask)
163 		return shash_finup_unaligned(desc, data, len, out);
164 
165 	return shash->finup(desc, data, len, out);
166 }
167 EXPORT_SYMBOL_GPL(crypto_shash_finup);
168 
shash_digest_unaligned(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)169 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
170 				  unsigned int len, u8 *out)
171 {
172 	return crypto_shash_init(desc) ?:
173 	       crypto_shash_finup(desc, data, len, out);
174 }
175 
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)176 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
177 			unsigned int len, u8 *out)
178 {
179 	struct crypto_shash *tfm = desc->tfm;
180 	struct shash_alg *shash = crypto_shash_alg(tfm);
181 	unsigned long alignmask = crypto_shash_alignmask(tfm);
182 
183 	if (((unsigned long)data | (unsigned long)out) & alignmask)
184 		return shash_digest_unaligned(desc, data, len, out);
185 
186 	return shash->digest(desc, data, len, out);
187 }
188 EXPORT_SYMBOL_GPL(crypto_shash_digest);
189 
shash_default_export(struct shash_desc * desc,void * out)190 static int shash_default_export(struct shash_desc *desc, void *out)
191 {
192 	memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
193 	return 0;
194 }
195 
shash_default_import(struct shash_desc * desc,const void * in)196 static int shash_default_import(struct shash_desc *desc, const void *in)
197 {
198 	memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
199 	return 0;
200 }
201 
shash_async_setkey(struct crypto_ahash * tfm,const u8 * key,unsigned int keylen)202 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
203 			      unsigned int keylen)
204 {
205 	struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
206 
207 	return crypto_shash_setkey(*ctx, key, keylen);
208 }
209 
shash_async_init(struct ahash_request * req)210 static int shash_async_init(struct ahash_request *req)
211 {
212 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
213 	struct shash_desc *desc = ahash_request_ctx(req);
214 
215 	desc->tfm = *ctx;
216 	desc->flags = req->base.flags;
217 
218 	return crypto_shash_init(desc);
219 }
220 
shash_ahash_update(struct ahash_request * req,struct shash_desc * desc)221 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
222 {
223 	struct crypto_hash_walk walk;
224 	int nbytes;
225 
226 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
227 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
228 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
229 
230 	return nbytes;
231 }
232 EXPORT_SYMBOL_GPL(shash_ahash_update);
233 
shash_async_update(struct ahash_request * req)234 static int shash_async_update(struct ahash_request *req)
235 {
236 	return shash_ahash_update(req, ahash_request_ctx(req));
237 }
238 
shash_async_final(struct ahash_request * req)239 static int shash_async_final(struct ahash_request *req)
240 {
241 	return crypto_shash_final(ahash_request_ctx(req), req->result);
242 }
243 
shash_ahash_finup(struct ahash_request * req,struct shash_desc * desc)244 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
245 {
246 	struct crypto_hash_walk walk;
247 	int nbytes;
248 
249 	nbytes = crypto_hash_walk_first(req, &walk);
250 	if (!nbytes)
251 		return crypto_shash_final(desc, req->result);
252 
253 	do {
254 		nbytes = crypto_hash_walk_last(&walk) ?
255 			 crypto_shash_finup(desc, walk.data, nbytes,
256 					    req->result) :
257 			 crypto_shash_update(desc, walk.data, nbytes);
258 		nbytes = crypto_hash_walk_done(&walk, nbytes);
259 	} while (nbytes > 0);
260 
261 	return nbytes;
262 }
263 EXPORT_SYMBOL_GPL(shash_ahash_finup);
264 
shash_async_finup(struct ahash_request * req)265 static int shash_async_finup(struct ahash_request *req)
266 {
267 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
268 	struct shash_desc *desc = ahash_request_ctx(req);
269 
270 	desc->tfm = *ctx;
271 	desc->flags = req->base.flags;
272 
273 	return shash_ahash_finup(req, desc);
274 }
275 
shash_ahash_digest(struct ahash_request * req,struct shash_desc * desc)276 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
277 {
278 	unsigned int nbytes = req->nbytes;
279 	struct scatterlist *sg;
280 	unsigned int offset;
281 	int err;
282 
283 	if (nbytes &&
284 	    (sg = req->src, offset = sg->offset,
285 	     nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
286 		void *data;
287 
288 		data = kmap_atomic(sg_page(sg));
289 		err = crypto_shash_digest(desc, data + offset, nbytes,
290 					  req->result);
291 		kunmap_atomic(data);
292 		crypto_yield(desc->flags);
293 	} else
294 		err = crypto_shash_init(desc) ?:
295 		      shash_ahash_finup(req, desc);
296 
297 	return err;
298 }
299 EXPORT_SYMBOL_GPL(shash_ahash_digest);
300 
shash_async_digest(struct ahash_request * req)301 static int shash_async_digest(struct ahash_request *req)
302 {
303 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
304 	struct shash_desc *desc = ahash_request_ctx(req);
305 
306 	desc->tfm = *ctx;
307 	desc->flags = req->base.flags;
308 
309 	return shash_ahash_digest(req, desc);
310 }
311 
shash_async_export(struct ahash_request * req,void * out)312 static int shash_async_export(struct ahash_request *req, void *out)
313 {
314 	return crypto_shash_export(ahash_request_ctx(req), out);
315 }
316 
shash_async_import(struct ahash_request * req,const void * in)317 static int shash_async_import(struct ahash_request *req, const void *in)
318 {
319 	struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320 	struct shash_desc *desc = ahash_request_ctx(req);
321 
322 	desc->tfm = *ctx;
323 	desc->flags = req->base.flags;
324 
325 	return crypto_shash_import(desc, in);
326 }
327 
crypto_exit_shash_ops_async(struct crypto_tfm * tfm)328 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
329 {
330 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
331 
332 	crypto_free_shash(*ctx);
333 }
334 
crypto_init_shash_ops_async(struct crypto_tfm * tfm)335 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
336 {
337 	struct crypto_alg *calg = tfm->__crt_alg;
338 	struct shash_alg *alg = __crypto_shash_alg(calg);
339 	struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
340 	struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
341 	struct crypto_shash *shash;
342 
343 	if (!crypto_mod_get(calg))
344 		return -EAGAIN;
345 
346 	shash = crypto_create_tfm(calg, &crypto_shash_type);
347 	if (IS_ERR(shash)) {
348 		crypto_mod_put(calg);
349 		return PTR_ERR(shash);
350 	}
351 
352 	*ctx = shash;
353 	tfm->exit = crypto_exit_shash_ops_async;
354 
355 	crt->init = shash_async_init;
356 	crt->update = shash_async_update;
357 	crt->final = shash_async_final;
358 	crt->finup = shash_async_finup;
359 	crt->digest = shash_async_digest;
360 	crt->setkey = shash_async_setkey;
361 
362 	crt->has_setkey = alg->setkey != shash_no_setkey;
363 
364 	if (alg->export)
365 		crt->export = shash_async_export;
366 	if (alg->import)
367 		crt->import = shash_async_import;
368 
369 	crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
370 
371 	return 0;
372 }
373 
shash_compat_setkey(struct crypto_hash * tfm,const u8 * key,unsigned int keylen)374 static int shash_compat_setkey(struct crypto_hash *tfm, const u8 *key,
375 			       unsigned int keylen)
376 {
377 	struct shash_desc **descp = crypto_hash_ctx(tfm);
378 	struct shash_desc *desc = *descp;
379 
380 	return crypto_shash_setkey(desc->tfm, key, keylen);
381 }
382 
shash_compat_init(struct hash_desc * hdesc)383 static int shash_compat_init(struct hash_desc *hdesc)
384 {
385 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
386 	struct shash_desc *desc = *descp;
387 
388 	desc->flags = hdesc->flags;
389 
390 	return crypto_shash_init(desc);
391 }
392 
shash_compat_update(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int len)393 static int shash_compat_update(struct hash_desc *hdesc, struct scatterlist *sg,
394 			       unsigned int len)
395 {
396 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
397 	struct shash_desc *desc = *descp;
398 	struct crypto_hash_walk walk;
399 	int nbytes;
400 
401 	for (nbytes = crypto_hash_walk_first_compat(hdesc, &walk, sg, len);
402 	     nbytes > 0; nbytes = crypto_hash_walk_done(&walk, nbytes))
403 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
404 
405 	return nbytes;
406 }
407 
shash_compat_final(struct hash_desc * hdesc,u8 * out)408 static int shash_compat_final(struct hash_desc *hdesc, u8 *out)
409 {
410 	struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
411 
412 	return crypto_shash_final(*descp, out);
413 }
414 
shash_compat_digest(struct hash_desc * hdesc,struct scatterlist * sg,unsigned int nbytes,u8 * out)415 static int shash_compat_digest(struct hash_desc *hdesc, struct scatterlist *sg,
416 			       unsigned int nbytes, u8 *out)
417 {
418 	unsigned int offset = sg->offset;
419 	int err;
420 
421 	if (nbytes < min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset)) {
422 		struct shash_desc **descp = crypto_hash_ctx(hdesc->tfm);
423 		struct shash_desc *desc = *descp;
424 		void *data;
425 
426 		desc->flags = hdesc->flags;
427 
428 		data = kmap_atomic(sg_page(sg));
429 		err = crypto_shash_digest(desc, data + offset, nbytes, out);
430 		kunmap_atomic(data);
431 		crypto_yield(desc->flags);
432 		goto out;
433 	}
434 
435 	err = shash_compat_init(hdesc);
436 	if (err)
437 		goto out;
438 
439 	err = shash_compat_update(hdesc, sg, nbytes);
440 	if (err)
441 		goto out;
442 
443 	err = shash_compat_final(hdesc, out);
444 
445 out:
446 	return err;
447 }
448 
crypto_exit_shash_ops_compat(struct crypto_tfm * tfm)449 static void crypto_exit_shash_ops_compat(struct crypto_tfm *tfm)
450 {
451 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
452 	struct shash_desc *desc = *descp;
453 
454 	crypto_free_shash(desc->tfm);
455 	kzfree(desc);
456 }
457 
crypto_init_shash_ops_compat(struct crypto_tfm * tfm)458 static int crypto_init_shash_ops_compat(struct crypto_tfm *tfm)
459 {
460 	struct hash_tfm *crt = &tfm->crt_hash;
461 	struct crypto_alg *calg = tfm->__crt_alg;
462 	struct shash_alg *alg = __crypto_shash_alg(calg);
463 	struct shash_desc **descp = crypto_tfm_ctx(tfm);
464 	struct crypto_shash *shash;
465 	struct shash_desc *desc;
466 
467 	if (!crypto_mod_get(calg))
468 		return -EAGAIN;
469 
470 	shash = crypto_create_tfm(calg, &crypto_shash_type);
471 	if (IS_ERR(shash)) {
472 		crypto_mod_put(calg);
473 		return PTR_ERR(shash);
474 	}
475 
476 	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(shash),
477 		       GFP_KERNEL);
478 	if (!desc) {
479 		crypto_free_shash(shash);
480 		return -ENOMEM;
481 	}
482 
483 	*descp = desc;
484 	desc->tfm = shash;
485 	tfm->exit = crypto_exit_shash_ops_compat;
486 
487 	crt->init = shash_compat_init;
488 	crt->update = shash_compat_update;
489 	crt->final  = shash_compat_final;
490 	crt->digest = shash_compat_digest;
491 	crt->setkey = shash_compat_setkey;
492 
493 	crt->digestsize = alg->digestsize;
494 
495 	return 0;
496 }
497 
crypto_init_shash_ops(struct crypto_tfm * tfm,u32 type,u32 mask)498 static int crypto_init_shash_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
499 {
500 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
501 	case CRYPTO_ALG_TYPE_HASH_MASK:
502 		return crypto_init_shash_ops_compat(tfm);
503 	}
504 
505 	return -EINVAL;
506 }
507 
crypto_shash_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)508 static unsigned int crypto_shash_ctxsize(struct crypto_alg *alg, u32 type,
509 					 u32 mask)
510 {
511 	switch (mask & CRYPTO_ALG_TYPE_MASK) {
512 	case CRYPTO_ALG_TYPE_HASH_MASK:
513 		return sizeof(struct shash_desc *);
514 	}
515 
516 	return 0;
517 }
518 
crypto_shash_init_tfm(struct crypto_tfm * tfm)519 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
520 {
521 	struct crypto_shash *hash = __crypto_shash_cast(tfm);
522 
523 	hash->descsize = crypto_shash_alg(hash)->descsize;
524 	return 0;
525 }
526 
crypto_shash_extsize(struct crypto_alg * alg)527 static unsigned int crypto_shash_extsize(struct crypto_alg *alg)
528 {
529 	return alg->cra_ctxsize;
530 }
531 
532 #ifdef CONFIG_NET
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)533 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
534 {
535 	struct crypto_report_hash rhash;
536 	struct shash_alg *salg = __crypto_shash_alg(alg);
537 
538 	strncpy(rhash.type, "shash", sizeof(rhash.type));
539 
540 	rhash.blocksize = alg->cra_blocksize;
541 	rhash.digestsize = salg->digestsize;
542 
543 	if (nla_put(skb, CRYPTOCFGA_REPORT_HASH,
544 		    sizeof(struct crypto_report_hash), &rhash))
545 		goto nla_put_failure;
546 	return 0;
547 
548 nla_put_failure:
549 	return -EMSGSIZE;
550 }
551 #else
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)552 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
553 {
554 	return -ENOSYS;
555 }
556 #endif
557 
558 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
559 	__attribute__ ((unused));
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)560 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
561 {
562 	struct shash_alg *salg = __crypto_shash_alg(alg);
563 
564 	seq_printf(m, "type         : shash\n");
565 	seq_printf(m, "blocksize    : %u\n", alg->cra_blocksize);
566 	seq_printf(m, "digestsize   : %u\n", salg->digestsize);
567 }
568 
569 static const struct crypto_type crypto_shash_type = {
570 	.ctxsize = crypto_shash_ctxsize,
571 	.extsize = crypto_shash_extsize,
572 	.init = crypto_init_shash_ops,
573 	.init_tfm = crypto_shash_init_tfm,
574 #ifdef CONFIG_PROC_FS
575 	.show = crypto_shash_show,
576 #endif
577 	.report = crypto_shash_report,
578 	.maskclear = ~CRYPTO_ALG_TYPE_MASK,
579 	.maskset = CRYPTO_ALG_TYPE_MASK,
580 	.type = CRYPTO_ALG_TYPE_SHASH,
581 	.tfmsize = offsetof(struct crypto_shash, base),
582 };
583 
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)584 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
585 					u32 mask)
586 {
587 	return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
588 }
589 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
590 
shash_prepare_alg(struct shash_alg * alg)591 static int shash_prepare_alg(struct shash_alg *alg)
592 {
593 	struct crypto_alg *base = &alg->base;
594 
595 	if (alg->digestsize > PAGE_SIZE / 8 ||
596 	    alg->descsize > PAGE_SIZE / 8 ||
597 	    alg->statesize > PAGE_SIZE / 8)
598 		return -EINVAL;
599 
600 	base->cra_type = &crypto_shash_type;
601 	base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
602 	base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
603 
604 	if (!alg->finup)
605 		alg->finup = shash_finup_unaligned;
606 	if (!alg->digest)
607 		alg->digest = shash_digest_unaligned;
608 	if (!alg->export) {
609 		alg->export = shash_default_export;
610 		alg->import = shash_default_import;
611 		alg->statesize = alg->descsize;
612 	}
613 	if (!alg->setkey)
614 		alg->setkey = shash_no_setkey;
615 
616 	return 0;
617 }
618 
crypto_register_shash(struct shash_alg * alg)619 int crypto_register_shash(struct shash_alg *alg)
620 {
621 	struct crypto_alg *base = &alg->base;
622 	int err;
623 
624 	err = shash_prepare_alg(alg);
625 	if (err)
626 		return err;
627 
628 	return crypto_register_alg(base);
629 }
630 EXPORT_SYMBOL_GPL(crypto_register_shash);
631 
crypto_unregister_shash(struct shash_alg * alg)632 int crypto_unregister_shash(struct shash_alg *alg)
633 {
634 	return crypto_unregister_alg(&alg->base);
635 }
636 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
637 
crypto_register_shashes(struct shash_alg * algs,int count)638 int crypto_register_shashes(struct shash_alg *algs, int count)
639 {
640 	int i, ret;
641 
642 	for (i = 0; i < count; i++) {
643 		ret = crypto_register_shash(&algs[i]);
644 		if (ret)
645 			goto err;
646 	}
647 
648 	return 0;
649 
650 err:
651 	for (--i; i >= 0; --i)
652 		crypto_unregister_shash(&algs[i]);
653 
654 	return ret;
655 }
656 EXPORT_SYMBOL_GPL(crypto_register_shashes);
657 
crypto_unregister_shashes(struct shash_alg * algs,int count)658 int crypto_unregister_shashes(struct shash_alg *algs, int count)
659 {
660 	int i, ret;
661 
662 	for (i = count - 1; i >= 0; --i) {
663 		ret = crypto_unregister_shash(&algs[i]);
664 		if (ret)
665 			pr_err("Failed to unregister %s %s: %d\n",
666 			       algs[i].base.cra_driver_name,
667 			       algs[i].base.cra_name, ret);
668 	}
669 
670 	return 0;
671 }
672 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
673 
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)674 int shash_register_instance(struct crypto_template *tmpl,
675 			    struct shash_instance *inst)
676 {
677 	int err;
678 
679 	err = shash_prepare_alg(&inst->alg);
680 	if (err)
681 		return err;
682 
683 	return crypto_register_instance(tmpl, shash_crypto_instance(inst));
684 }
685 EXPORT_SYMBOL_GPL(shash_register_instance);
686 
shash_free_instance(struct crypto_instance * inst)687 void shash_free_instance(struct crypto_instance *inst)
688 {
689 	crypto_drop_spawn(crypto_instance_ctx(inst));
690 	kfree(shash_instance(inst));
691 }
692 EXPORT_SYMBOL_GPL(shash_free_instance);
693 
crypto_init_shash_spawn(struct crypto_shash_spawn * spawn,struct shash_alg * alg,struct crypto_instance * inst)694 int crypto_init_shash_spawn(struct crypto_shash_spawn *spawn,
695 			    struct shash_alg *alg,
696 			    struct crypto_instance *inst)
697 {
698 	return crypto_init_spawn2(&spawn->base, &alg->base, inst,
699 				  &crypto_shash_type);
700 }
701 EXPORT_SYMBOL_GPL(crypto_init_shash_spawn);
702 
shash_attr_alg(struct rtattr * rta,u32 type,u32 mask)703 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask)
704 {
705 	struct crypto_alg *alg;
706 
707 	alg = crypto_attr_alg2(rta, &crypto_shash_type, type, mask);
708 	return IS_ERR(alg) ? ERR_CAST(alg) :
709 	       container_of(alg, struct shash_alg, base);
710 }
711 EXPORT_SYMBOL_GPL(shash_attr_alg);
712 
713 MODULE_LICENSE("GPL");
714 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
715