• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 
20 #include "internal.h"
21 
22 static LIST_HEAD(crypto_template_list);
23 
crypto_check_module_sig(struct module * mod)24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 	if (fips_enabled && mod && !module_sig_ok(mod))
27 		panic("Module %s signature verification failed in FIPS mode\n",
28 		      module_name(mod));
29 }
30 
crypto_check_alg(struct crypto_alg * alg)31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 	crypto_check_module_sig(alg->cra_module);
34 
35 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 		return -EINVAL;
37 
38 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 		return -EINVAL;
40 
41 	/* General maximums for all algs. */
42 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 		return -EINVAL;
44 
45 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 		return -EINVAL;
47 
48 	/* Lower maximums for specific alg types. */
49 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 			       CRYPTO_ALG_TYPE_CIPHER) {
51 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 			return -EINVAL;
53 
54 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 			return -EINVAL;
56 	}
57 
58 	if (alg->cra_priority < 0)
59 		return -EINVAL;
60 
61 	refcount_set(&alg->cra_refcnt, 1);
62 
63 	return 0;
64 }
65 
crypto_free_instance(struct crypto_instance * inst)66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 	inst->alg.cra_type->free(inst);
69 }
70 
crypto_destroy_instance(struct crypto_alg * alg)71 static void crypto_destroy_instance(struct crypto_alg *alg)
72 {
73 	struct crypto_instance *inst = (void *)alg;
74 	struct crypto_template *tmpl = inst->tmpl;
75 
76 	crypto_free_instance(inst);
77 	crypto_tmpl_put(tmpl);
78 }
79 
80 /*
81  * This function adds a spawn to the list secondary_spawns which
82  * will be used at the end of crypto_remove_spawns to unregister
83  * instances, unless the spawn happens to be one that is depended
84  * on by the new algorithm (nalg in crypto_remove_spawns).
85  *
86  * This function is also responsible for resurrecting any algorithms
87  * in the dependency chain of nalg by unsetting n->dead.
88  */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 					    struct list_head *stack,
91 					    struct list_head *top,
92 					    struct list_head *secondary_spawns)
93 {
94 	struct crypto_spawn *spawn, *n;
95 
96 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 	if (!spawn)
98 		return NULL;
99 
100 	n = list_prev_entry(spawn, list);
101 	list_move(&spawn->list, secondary_spawns);
102 
103 	if (list_is_last(&n->list, stack))
104 		return top;
105 
106 	n = list_next_entry(n, list);
107 	if (!spawn->dead)
108 		n->dead = false;
109 
110 	return &n->inst->alg.cra_users;
111 }
112 
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)113 static void crypto_remove_instance(struct crypto_instance *inst,
114 				   struct list_head *list)
115 {
116 	struct crypto_template *tmpl = inst->tmpl;
117 
118 	if (crypto_is_dead(&inst->alg))
119 		return;
120 
121 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122 
123 	if (!tmpl || !crypto_tmpl_get(tmpl))
124 		return;
125 
126 	list_move(&inst->alg.cra_list, list);
127 	hlist_del(&inst->list);
128 	inst->alg.cra_destroy = crypto_destroy_instance;
129 
130 	BUG_ON(!list_empty(&inst->alg.cra_users));
131 }
132 
133 /*
134  * Given an algorithm alg, remove all algorithms that depend on it
135  * through spawns.  If nalg is not null, then exempt any algorithms
136  * that is depended on by nalg.  This is useful when nalg itself
137  * depends on alg.
138  */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 			  struct crypto_alg *nalg)
141 {
142 	u32 new_type = (nalg ?: alg)->cra_flags;
143 	struct crypto_spawn *spawn, *n;
144 	LIST_HEAD(secondary_spawns);
145 	struct list_head *spawns;
146 	LIST_HEAD(stack);
147 	LIST_HEAD(top);
148 
149 	spawns = &alg->cra_users;
150 	list_for_each_entry_safe(spawn, n, spawns, list) {
151 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152 			continue;
153 
154 		list_move(&spawn->list, &top);
155 	}
156 
157 	/*
158 	 * Perform a depth-first walk starting from alg through
159 	 * the cra_users tree.  The list stack records the path
160 	 * from alg to the current spawn.
161 	 */
162 	spawns = &top;
163 	do {
164 		while (!list_empty(spawns)) {
165 			struct crypto_instance *inst;
166 
167 			spawn = list_first_entry(spawns, struct crypto_spawn,
168 						 list);
169 			inst = spawn->inst;
170 
171 			list_move(&spawn->list, &stack);
172 			spawn->dead = !spawn->registered || &inst->alg != nalg;
173 
174 			if (!spawn->registered)
175 				break;
176 
177 			BUG_ON(&inst->alg == alg);
178 
179 			if (&inst->alg == nalg)
180 				break;
181 
182 			spawns = &inst->alg.cra_users;
183 
184 			/*
185 			 * Even if spawn->registered is true, the
186 			 * instance itself may still be unregistered.
187 			 * This is because it may have failed during
188 			 * registration.  Therefore we still need to
189 			 * make the following test.
190 			 *
191 			 * We may encounter an unregistered instance here, since
192 			 * an instance's spawns are set up prior to the instance
193 			 * being registered.  An unregistered instance will have
194 			 * NULL ->cra_users.next, since ->cra_users isn't
195 			 * properly initialized until registration.  But an
196 			 * unregistered instance cannot have any users, so treat
197 			 * it the same as ->cra_users being empty.
198 			 */
199 			if (spawns->next == NULL)
200 				break;
201 		}
202 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 					      &secondary_spawns)));
204 
205 	/*
206 	 * Remove all instances that are marked as dead.  Also
207 	 * complete the resurrection of the others by moving them
208 	 * back to the cra_users list.
209 	 */
210 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211 		if (!spawn->dead)
212 			list_move(&spawn->list, &spawn->alg->cra_users);
213 		else if (spawn->registered)
214 			crypto_remove_instance(spawn->inst, list);
215 	}
216 }
217 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218 
__crypto_register_alg(struct crypto_alg * alg)219 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
220 {
221 	struct crypto_alg *q;
222 	struct crypto_larval *larval;
223 	int ret = -EAGAIN;
224 
225 	if (crypto_is_dead(alg))
226 		goto err;
227 
228 	INIT_LIST_HEAD(&alg->cra_users);
229 
230 	/* No cheating! */
231 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
232 
233 	ret = -EEXIST;
234 
235 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
236 		if (q == alg)
237 			goto err;
238 
239 		if (crypto_is_moribund(q))
240 			continue;
241 
242 		if (crypto_is_larval(q)) {
243 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
244 				goto err;
245 			continue;
246 		}
247 
248 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
249 		    !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
250 		    !strcmp(q->cra_name, alg->cra_driver_name))
251 			goto err;
252 	}
253 
254 	larval = crypto_larval_alloc(alg->cra_name,
255 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
256 	if (IS_ERR(larval))
257 		goto out;
258 
259 	ret = -ENOENT;
260 	larval->adult = crypto_mod_get(alg);
261 	if (!larval->adult)
262 		goto free_larval;
263 
264 	refcount_set(&larval->alg.cra_refcnt, 1);
265 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
266 	       CRYPTO_MAX_ALG_NAME);
267 	larval->alg.cra_priority = alg->cra_priority;
268 
269 	list_add(&alg->cra_list, &crypto_alg_list);
270 	list_add(&larval->alg.cra_list, &crypto_alg_list);
271 
272 	crypto_stats_init(alg);
273 
274 out:
275 	return larval;
276 
277 free_larval:
278 	kfree(larval);
279 err:
280 	larval = ERR_PTR(ret);
281 	goto out;
282 }
283 
crypto_alg_tested(const char * name,int err)284 void crypto_alg_tested(const char *name, int err)
285 {
286 	struct crypto_larval *test;
287 	struct crypto_alg *alg;
288 	struct crypto_alg *q;
289 	LIST_HEAD(list);
290 	bool best;
291 
292 	down_write(&crypto_alg_sem);
293 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
294 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
295 			continue;
296 
297 		test = (struct crypto_larval *)q;
298 
299 		if (!strcmp(q->cra_driver_name, name))
300 			goto found;
301 	}
302 
303 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
304 	goto unlock;
305 
306 found:
307 	q->cra_flags |= CRYPTO_ALG_DEAD;
308 	alg = test->adult;
309 	if (err || list_empty(&alg->cra_list))
310 		goto complete;
311 
312 	alg->cra_flags |= CRYPTO_ALG_TESTED;
313 
314 	/* Only satisfy larval waiters if we are the best. */
315 	best = true;
316 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
317 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
318 			continue;
319 
320 		if (strcmp(alg->cra_name, q->cra_name))
321 			continue;
322 
323 		if (q->cra_priority > alg->cra_priority) {
324 			best = false;
325 			break;
326 		}
327 	}
328 
329 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
330 		if (q == alg)
331 			continue;
332 
333 		if (crypto_is_moribund(q))
334 			continue;
335 
336 		if (crypto_is_larval(q)) {
337 			struct crypto_larval *larval = (void *)q;
338 
339 			/*
340 			 * Check to see if either our generic name or
341 			 * specific name can satisfy the name requested
342 			 * by the larval entry q.
343 			 */
344 			if (strcmp(alg->cra_name, q->cra_name) &&
345 			    strcmp(alg->cra_driver_name, q->cra_name))
346 				continue;
347 
348 			if (larval->adult)
349 				continue;
350 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
351 				continue;
352 
353 			if (best && crypto_mod_get(alg))
354 				larval->adult = alg;
355 			else
356 				larval->adult = ERR_PTR(-EAGAIN);
357 
358 			continue;
359 		}
360 
361 		if (strcmp(alg->cra_name, q->cra_name))
362 			continue;
363 
364 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
365 		    q->cra_priority > alg->cra_priority)
366 			continue;
367 
368 		crypto_remove_spawns(q, &list, alg);
369 	}
370 
371 complete:
372 	complete_all(&test->completion);
373 
374 unlock:
375 	up_write(&crypto_alg_sem);
376 
377 	crypto_remove_final(&list);
378 }
379 EXPORT_SYMBOL_GPL(crypto_alg_tested);
380 
crypto_remove_final(struct list_head * list)381 void crypto_remove_final(struct list_head *list)
382 {
383 	struct crypto_alg *alg;
384 	struct crypto_alg *n;
385 
386 	list_for_each_entry_safe(alg, n, list, cra_list) {
387 		list_del_init(&alg->cra_list);
388 		crypto_alg_put(alg);
389 	}
390 }
391 EXPORT_SYMBOL_GPL(crypto_remove_final);
392 
crypto_wait_for_test(struct crypto_larval * larval)393 static void crypto_wait_for_test(struct crypto_larval *larval)
394 {
395 	int err;
396 
397 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
398 	if (err != NOTIFY_STOP) {
399 		if (WARN_ON(err != NOTIFY_DONE))
400 			goto out;
401 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
402 	}
403 
404 	err = wait_for_completion_killable(&larval->completion);
405 	WARN_ON(err);
406 	if (!err)
407 		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
408 
409 out:
410 	crypto_larval_kill(&larval->alg);
411 }
412 
crypto_register_alg(struct crypto_alg * alg)413 int crypto_register_alg(struct crypto_alg *alg)
414 {
415 	struct crypto_larval *larval;
416 	int err;
417 
418 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
419 	err = crypto_check_alg(alg);
420 	if (err)
421 		return err;
422 
423 	down_write(&crypto_alg_sem);
424 	larval = __crypto_register_alg(alg);
425 	up_write(&crypto_alg_sem);
426 
427 	if (IS_ERR(larval))
428 		return PTR_ERR(larval);
429 
430 	crypto_wait_for_test(larval);
431 	return 0;
432 }
433 EXPORT_SYMBOL_GPL(crypto_register_alg);
434 
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)435 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
436 {
437 	if (unlikely(list_empty(&alg->cra_list)))
438 		return -ENOENT;
439 
440 	alg->cra_flags |= CRYPTO_ALG_DEAD;
441 
442 	list_del_init(&alg->cra_list);
443 	crypto_remove_spawns(alg, list, NULL);
444 
445 	return 0;
446 }
447 
crypto_unregister_alg(struct crypto_alg * alg)448 void crypto_unregister_alg(struct crypto_alg *alg)
449 {
450 	int ret;
451 	LIST_HEAD(list);
452 
453 	down_write(&crypto_alg_sem);
454 	ret = crypto_remove_alg(alg, &list);
455 	up_write(&crypto_alg_sem);
456 
457 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
458 		return;
459 
460 	if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
461 		return;
462 
463 	if (alg->cra_destroy)
464 		alg->cra_destroy(alg);
465 
466 	crypto_remove_final(&list);
467 }
468 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
469 
crypto_register_algs(struct crypto_alg * algs,int count)470 int crypto_register_algs(struct crypto_alg *algs, int count)
471 {
472 	int i, ret;
473 
474 	for (i = 0; i < count; i++) {
475 		ret = crypto_register_alg(&algs[i]);
476 		if (ret)
477 			goto err;
478 	}
479 
480 	return 0;
481 
482 err:
483 	for (--i; i >= 0; --i)
484 		crypto_unregister_alg(&algs[i]);
485 
486 	return ret;
487 }
488 EXPORT_SYMBOL_GPL(crypto_register_algs);
489 
crypto_unregister_algs(struct crypto_alg * algs,int count)490 void crypto_unregister_algs(struct crypto_alg *algs, int count)
491 {
492 	int i;
493 
494 	for (i = 0; i < count; i++)
495 		crypto_unregister_alg(&algs[i]);
496 }
497 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
498 
crypto_register_template(struct crypto_template * tmpl)499 int crypto_register_template(struct crypto_template *tmpl)
500 {
501 	struct crypto_template *q;
502 	int err = -EEXIST;
503 
504 	down_write(&crypto_alg_sem);
505 
506 	crypto_check_module_sig(tmpl->module);
507 
508 	list_for_each_entry(q, &crypto_template_list, list) {
509 		if (q == tmpl)
510 			goto out;
511 	}
512 
513 	list_add(&tmpl->list, &crypto_template_list);
514 	err = 0;
515 out:
516 	up_write(&crypto_alg_sem);
517 	return err;
518 }
519 EXPORT_SYMBOL_GPL(crypto_register_template);
520 
crypto_register_templates(struct crypto_template * tmpls,int count)521 int crypto_register_templates(struct crypto_template *tmpls, int count)
522 {
523 	int i, err;
524 
525 	for (i = 0; i < count; i++) {
526 		err = crypto_register_template(&tmpls[i]);
527 		if (err)
528 			goto out;
529 	}
530 	return 0;
531 
532 out:
533 	for (--i; i >= 0; --i)
534 		crypto_unregister_template(&tmpls[i]);
535 	return err;
536 }
537 EXPORT_SYMBOL_GPL(crypto_register_templates);
538 
crypto_unregister_template(struct crypto_template * tmpl)539 void crypto_unregister_template(struct crypto_template *tmpl)
540 {
541 	struct crypto_instance *inst;
542 	struct hlist_node *n;
543 	struct hlist_head *list;
544 	LIST_HEAD(users);
545 
546 	down_write(&crypto_alg_sem);
547 
548 	BUG_ON(list_empty(&tmpl->list));
549 	list_del_init(&tmpl->list);
550 
551 	list = &tmpl->instances;
552 	hlist_for_each_entry(inst, list, list) {
553 		int err = crypto_remove_alg(&inst->alg, &users);
554 
555 		BUG_ON(err);
556 	}
557 
558 	up_write(&crypto_alg_sem);
559 
560 	hlist_for_each_entry_safe(inst, n, list, list) {
561 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
562 		crypto_free_instance(inst);
563 	}
564 	crypto_remove_final(&users);
565 }
566 EXPORT_SYMBOL_GPL(crypto_unregister_template);
567 
crypto_unregister_templates(struct crypto_template * tmpls,int count)568 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
569 {
570 	int i;
571 
572 	for (i = count - 1; i >= 0; --i)
573 		crypto_unregister_template(&tmpls[i]);
574 }
575 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
576 
__crypto_lookup_template(const char * name)577 static struct crypto_template *__crypto_lookup_template(const char *name)
578 {
579 	struct crypto_template *q, *tmpl = NULL;
580 
581 	down_read(&crypto_alg_sem);
582 	list_for_each_entry(q, &crypto_template_list, list) {
583 		if (strcmp(q->name, name))
584 			continue;
585 		if (unlikely(!crypto_tmpl_get(q)))
586 			continue;
587 
588 		tmpl = q;
589 		break;
590 	}
591 	up_read(&crypto_alg_sem);
592 
593 	return tmpl;
594 }
595 
crypto_lookup_template(const char * name)596 struct crypto_template *crypto_lookup_template(const char *name)
597 {
598 	return try_then_request_module(__crypto_lookup_template(name),
599 				       "crypto-%s", name);
600 }
601 EXPORT_SYMBOL_GPL(crypto_lookup_template);
602 
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)603 int crypto_register_instance(struct crypto_template *tmpl,
604 			     struct crypto_instance *inst)
605 {
606 	struct crypto_larval *larval;
607 	struct crypto_spawn *spawn;
608 	int err;
609 
610 	err = crypto_check_alg(&inst->alg);
611 	if (err)
612 		return err;
613 
614 	inst->alg.cra_module = tmpl->module;
615 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
616 
617 	down_write(&crypto_alg_sem);
618 
619 	larval = ERR_PTR(-EAGAIN);
620 	for (spawn = inst->spawns; spawn;) {
621 		struct crypto_spawn *next;
622 
623 		if (spawn->dead)
624 			goto unlock;
625 
626 		next = spawn->next;
627 		spawn->inst = inst;
628 		spawn->registered = true;
629 
630 		crypto_mod_put(spawn->alg);
631 
632 		spawn = next;
633 	}
634 
635 	larval = __crypto_register_alg(&inst->alg);
636 	if (IS_ERR(larval))
637 		goto unlock;
638 
639 	hlist_add_head(&inst->list, &tmpl->instances);
640 	inst->tmpl = tmpl;
641 
642 unlock:
643 	up_write(&crypto_alg_sem);
644 
645 	err = PTR_ERR(larval);
646 	if (IS_ERR(larval))
647 		goto err;
648 
649 	crypto_wait_for_test(larval);
650 	err = 0;
651 
652 err:
653 	return err;
654 }
655 EXPORT_SYMBOL_GPL(crypto_register_instance);
656 
crypto_unregister_instance(struct crypto_instance * inst)657 void crypto_unregister_instance(struct crypto_instance *inst)
658 {
659 	LIST_HEAD(list);
660 
661 	down_write(&crypto_alg_sem);
662 
663 	crypto_remove_spawns(&inst->alg, &list, NULL);
664 	crypto_remove_instance(inst, &list);
665 
666 	up_write(&crypto_alg_sem);
667 
668 	crypto_remove_final(&list);
669 }
670 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
671 
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)672 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
673 		      const char *name, u32 type, u32 mask)
674 {
675 	struct crypto_alg *alg;
676 	int err = -EAGAIN;
677 
678 	if (WARN_ON_ONCE(inst == NULL))
679 		return -EINVAL;
680 
681 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
682 	if (IS_ERR(name))
683 		return PTR_ERR(name);
684 
685 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
686 	if (IS_ERR(alg))
687 		return PTR_ERR(alg);
688 
689 	down_write(&crypto_alg_sem);
690 	if (!crypto_is_moribund(alg)) {
691 		list_add(&spawn->list, &alg->cra_users);
692 		spawn->alg = alg;
693 		spawn->mask = mask;
694 		spawn->next = inst->spawns;
695 		inst->spawns = spawn;
696 		inst->alg.cra_flags |=
697 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
698 		err = 0;
699 	}
700 	up_write(&crypto_alg_sem);
701 	if (err)
702 		crypto_mod_put(alg);
703 	return err;
704 }
705 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
706 
crypto_drop_spawn(struct crypto_spawn * spawn)707 void crypto_drop_spawn(struct crypto_spawn *spawn)
708 {
709 	if (!spawn->alg) /* not yet initialized? */
710 		return;
711 
712 	down_write(&crypto_alg_sem);
713 	if (!spawn->dead)
714 		list_del(&spawn->list);
715 	up_write(&crypto_alg_sem);
716 
717 	if (!spawn->registered)
718 		crypto_mod_put(spawn->alg);
719 }
720 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
721 
crypto_spawn_alg(struct crypto_spawn * spawn)722 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
723 {
724 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
725 	struct crypto_alg *target;
726 	bool shoot = false;
727 
728 	down_read(&crypto_alg_sem);
729 	if (!spawn->dead) {
730 		alg = spawn->alg;
731 		if (!crypto_mod_get(alg)) {
732 			target = crypto_alg_get(alg);
733 			shoot = true;
734 			alg = ERR_PTR(-EAGAIN);
735 		}
736 	}
737 	up_read(&crypto_alg_sem);
738 
739 	if (shoot) {
740 		crypto_shoot_alg(target);
741 		crypto_alg_put(target);
742 	}
743 
744 	return alg;
745 }
746 
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)747 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
748 				    u32 mask)
749 {
750 	struct crypto_alg *alg;
751 	struct crypto_tfm *tfm;
752 
753 	alg = crypto_spawn_alg(spawn);
754 	if (IS_ERR(alg))
755 		return ERR_CAST(alg);
756 
757 	tfm = ERR_PTR(-EINVAL);
758 	if (unlikely((alg->cra_flags ^ type) & mask))
759 		goto out_put_alg;
760 
761 	tfm = __crypto_alloc_tfm(alg, type, mask);
762 	if (IS_ERR(tfm))
763 		goto out_put_alg;
764 
765 	return tfm;
766 
767 out_put_alg:
768 	crypto_mod_put(alg);
769 	return tfm;
770 }
771 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
772 
crypto_spawn_tfm2(struct crypto_spawn * spawn)773 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
774 {
775 	struct crypto_alg *alg;
776 	struct crypto_tfm *tfm;
777 
778 	alg = crypto_spawn_alg(spawn);
779 	if (IS_ERR(alg))
780 		return ERR_CAST(alg);
781 
782 	tfm = crypto_create_tfm(alg, spawn->frontend);
783 	if (IS_ERR(tfm))
784 		goto out_put_alg;
785 
786 	return tfm;
787 
788 out_put_alg:
789 	crypto_mod_put(alg);
790 	return tfm;
791 }
792 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
793 
crypto_register_notifier(struct notifier_block * nb)794 int crypto_register_notifier(struct notifier_block *nb)
795 {
796 	return blocking_notifier_chain_register(&crypto_chain, nb);
797 }
798 EXPORT_SYMBOL_GPL(crypto_register_notifier);
799 
crypto_unregister_notifier(struct notifier_block * nb)800 int crypto_unregister_notifier(struct notifier_block *nb)
801 {
802 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
803 }
804 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
805 
crypto_get_attr_type(struct rtattr ** tb)806 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
807 {
808 	struct rtattr *rta = tb[0];
809 	struct crypto_attr_type *algt;
810 
811 	if (!rta)
812 		return ERR_PTR(-ENOENT);
813 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
814 		return ERR_PTR(-EINVAL);
815 	if (rta->rta_type != CRYPTOA_TYPE)
816 		return ERR_PTR(-EINVAL);
817 
818 	algt = RTA_DATA(rta);
819 
820 	return algt;
821 }
822 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
823 
824 /**
825  * crypto_check_attr_type() - check algorithm type and compute inherited mask
826  * @tb: the template parameters
827  * @type: the algorithm type the template would be instantiated as
828  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
829  *	      to restrict the flags of any inner algorithms
830  *
831  * Validate that the algorithm type the user requested is compatible with the
832  * one the template would actually be instantiated as.  E.g., if the user is
833  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
834  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
835  *
836  * Also compute the mask to use to restrict the flags of any inner algorithms.
837  *
838  * Return: 0 on success; -errno on failure
839  */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)840 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
841 {
842 	struct crypto_attr_type *algt;
843 
844 	algt = crypto_get_attr_type(tb);
845 	if (IS_ERR(algt))
846 		return PTR_ERR(algt);
847 
848 	if ((algt->type ^ type) & algt->mask)
849 		return -EINVAL;
850 
851 	*mask_ret = crypto_algt_inherited_mask(algt);
852 	return 0;
853 }
854 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
855 
crypto_attr_alg_name(struct rtattr * rta)856 const char *crypto_attr_alg_name(struct rtattr *rta)
857 {
858 	struct crypto_attr_alg *alga;
859 
860 	if (!rta)
861 		return ERR_PTR(-ENOENT);
862 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
863 		return ERR_PTR(-EINVAL);
864 	if (rta->rta_type != CRYPTOA_ALG)
865 		return ERR_PTR(-EINVAL);
866 
867 	alga = RTA_DATA(rta);
868 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
869 
870 	return alga->name;
871 }
872 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
873 
crypto_attr_u32(struct rtattr * rta,u32 * num)874 int crypto_attr_u32(struct rtattr *rta, u32 *num)
875 {
876 	struct crypto_attr_u32 *nu32;
877 
878 	if (!rta)
879 		return -ENOENT;
880 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
881 		return -EINVAL;
882 	if (rta->rta_type != CRYPTOA_U32)
883 		return -EINVAL;
884 
885 	nu32 = RTA_DATA(rta);
886 	*num = nu32->num;
887 
888 	return 0;
889 }
890 EXPORT_SYMBOL_GPL(crypto_attr_u32);
891 
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)892 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
893 			struct crypto_alg *alg)
894 {
895 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
896 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
897 		return -ENAMETOOLONG;
898 
899 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
900 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
901 		return -ENAMETOOLONG;
902 
903 	return 0;
904 }
905 EXPORT_SYMBOL_GPL(crypto_inst_setname);
906 
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)907 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
908 {
909 	INIT_LIST_HEAD(&queue->list);
910 	queue->backlog = &queue->list;
911 	queue->qlen = 0;
912 	queue->max_qlen = max_qlen;
913 }
914 EXPORT_SYMBOL_GPL(crypto_init_queue);
915 
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)916 int crypto_enqueue_request(struct crypto_queue *queue,
917 			   struct crypto_async_request *request)
918 {
919 	int err = -EINPROGRESS;
920 
921 	if (unlikely(queue->qlen >= queue->max_qlen)) {
922 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
923 			err = -ENOSPC;
924 			goto out;
925 		}
926 		err = -EBUSY;
927 		if (queue->backlog == &queue->list)
928 			queue->backlog = &request->list;
929 	}
930 
931 	queue->qlen++;
932 	list_add_tail(&request->list, &queue->list);
933 
934 out:
935 	return err;
936 }
937 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
938 
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)939 void crypto_enqueue_request_head(struct crypto_queue *queue,
940 				 struct crypto_async_request *request)
941 {
942 	queue->qlen++;
943 	list_add(&request->list, &queue->list);
944 }
945 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
946 
crypto_dequeue_request(struct crypto_queue * queue)947 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
948 {
949 	struct list_head *request;
950 
951 	if (unlikely(!queue->qlen))
952 		return NULL;
953 
954 	queue->qlen--;
955 
956 	if (queue->backlog != &queue->list)
957 		queue->backlog = queue->backlog->next;
958 
959 	request = queue->list.next;
960 	list_del(request);
961 
962 	return list_entry(request, struct crypto_async_request, list);
963 }
964 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
965 
crypto_inc_byte(u8 * a,unsigned int size)966 static inline void crypto_inc_byte(u8 *a, unsigned int size)
967 {
968 	u8 *b = (a + size);
969 	u8 c;
970 
971 	for (; size; size--) {
972 		c = *--b + 1;
973 		*b = c;
974 		if (c)
975 			break;
976 	}
977 }
978 
crypto_inc(u8 * a,unsigned int size)979 void crypto_inc(u8 *a, unsigned int size)
980 {
981 	__be32 *b = (__be32 *)(a + size);
982 	u32 c;
983 
984 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
985 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
986 		for (; size >= 4; size -= 4) {
987 			c = be32_to_cpu(*--b) + 1;
988 			*b = cpu_to_be32(c);
989 			if (likely(c))
990 				return;
991 		}
992 
993 	crypto_inc_byte(a, size);
994 }
995 EXPORT_SYMBOL_GPL(crypto_inc);
996 
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)997 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
998 {
999 	int relalign = 0;
1000 
1001 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1002 		int size = sizeof(unsigned long);
1003 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1004 			 ((unsigned long)dst ^ (unsigned long)src2)) &
1005 			(size - 1);
1006 
1007 		relalign = d ? 1 << __ffs(d) : size;
1008 
1009 		/*
1010 		 * If we care about alignment, process as many bytes as
1011 		 * needed to advance dst and src to values whose alignments
1012 		 * equal their relative alignment. This will allow us to
1013 		 * process the remainder of the input using optimal strides.
1014 		 */
1015 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1016 			*dst++ = *src1++ ^ *src2++;
1017 			len--;
1018 		}
1019 	}
1020 
1021 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1022 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1023 		dst += 8;
1024 		src1 += 8;
1025 		src2 += 8;
1026 		len -= 8;
1027 	}
1028 
1029 	while (len >= 4 && !(relalign & 3)) {
1030 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1031 		dst += 4;
1032 		src1 += 4;
1033 		src2 += 4;
1034 		len -= 4;
1035 	}
1036 
1037 	while (len >= 2 && !(relalign & 1)) {
1038 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1039 		dst += 2;
1040 		src1 += 2;
1041 		src2 += 2;
1042 		len -= 2;
1043 	}
1044 
1045 	while (len--)
1046 		*dst++ = *src1++ ^ *src2++;
1047 }
1048 EXPORT_SYMBOL_GPL(__crypto_xor);
1049 
crypto_alg_extsize(struct crypto_alg * alg)1050 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1051 {
1052 	return alg->cra_ctxsize +
1053 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1054 }
1055 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1056 
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1057 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1058 			u32 type, u32 mask)
1059 {
1060 	int ret = 0;
1061 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1062 
1063 	if (!IS_ERR(alg)) {
1064 		crypto_mod_put(alg);
1065 		ret = 1;
1066 	}
1067 
1068 	return ret;
1069 }
1070 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1071 
1072 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1073 void crypto_stats_init(struct crypto_alg *alg)
1074 {
1075 	memset(&alg->stats, 0, sizeof(alg->stats));
1076 }
1077 EXPORT_SYMBOL_GPL(crypto_stats_init);
1078 
crypto_stats_get(struct crypto_alg * alg)1079 void crypto_stats_get(struct crypto_alg *alg)
1080 {
1081 	crypto_alg_get(alg);
1082 }
1083 EXPORT_SYMBOL_GPL(crypto_stats_get);
1084 
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1085 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1086 			       int ret)
1087 {
1088 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1089 		atomic64_inc(&alg->stats.aead.err_cnt);
1090 	} else {
1091 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1092 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1093 	}
1094 	crypto_alg_put(alg);
1095 }
1096 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1097 
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1098 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1099 			       int ret)
1100 {
1101 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1102 		atomic64_inc(&alg->stats.aead.err_cnt);
1103 	} else {
1104 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1105 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1106 	}
1107 	crypto_alg_put(alg);
1108 }
1109 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1110 
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1111 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1112 				   struct crypto_alg *alg)
1113 {
1114 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1115 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1116 	} else {
1117 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1118 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1119 	}
1120 	crypto_alg_put(alg);
1121 }
1122 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1123 
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1124 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1125 				   struct crypto_alg *alg)
1126 {
1127 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1128 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1129 	} else {
1130 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1131 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1132 	}
1133 	crypto_alg_put(alg);
1134 }
1135 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1136 
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1137 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1138 {
1139 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1140 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1141 	else
1142 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1143 	crypto_alg_put(alg);
1144 }
1145 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1146 
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1147 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1148 {
1149 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1150 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1151 	else
1152 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1153 	crypto_alg_put(alg);
1154 }
1155 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1156 
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1157 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1158 {
1159 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1160 		atomic64_inc(&alg->stats.compress.err_cnt);
1161 	} else {
1162 		atomic64_inc(&alg->stats.compress.compress_cnt);
1163 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1164 	}
1165 	crypto_alg_put(alg);
1166 }
1167 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1168 
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1169 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1170 {
1171 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1172 		atomic64_inc(&alg->stats.compress.err_cnt);
1173 	} else {
1174 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1175 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1176 	}
1177 	crypto_alg_put(alg);
1178 }
1179 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1180 
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1181 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1182 			       struct crypto_alg *alg)
1183 {
1184 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1185 		atomic64_inc(&alg->stats.hash.err_cnt);
1186 	else
1187 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1188 	crypto_alg_put(alg);
1189 }
1190 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1191 
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1192 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1193 			      struct crypto_alg *alg)
1194 {
1195 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1196 		atomic64_inc(&alg->stats.hash.err_cnt);
1197 	} else {
1198 		atomic64_inc(&alg->stats.hash.hash_cnt);
1199 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1200 	}
1201 	crypto_alg_put(alg);
1202 }
1203 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1204 
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1205 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1206 {
1207 	if (ret)
1208 		atomic64_inc(&alg->stats.kpp.err_cnt);
1209 	else
1210 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1211 	crypto_alg_put(alg);
1212 }
1213 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1214 
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1215 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1216 {
1217 	if (ret)
1218 		atomic64_inc(&alg->stats.kpp.err_cnt);
1219 	else
1220 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1221 	crypto_alg_put(alg);
1222 }
1223 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1224 
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1225 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1226 {
1227 	if (ret)
1228 		atomic64_inc(&alg->stats.kpp.err_cnt);
1229 	else
1230 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1231 	crypto_alg_put(alg);
1232 }
1233 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1234 
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1235 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1236 {
1237 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1238 		atomic64_inc(&alg->stats.rng.err_cnt);
1239 	else
1240 		atomic64_inc(&alg->stats.rng.seed_cnt);
1241 	crypto_alg_put(alg);
1242 }
1243 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1244 
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1245 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1246 			       int ret)
1247 {
1248 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1249 		atomic64_inc(&alg->stats.rng.err_cnt);
1250 	} else {
1251 		atomic64_inc(&alg->stats.rng.generate_cnt);
1252 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1253 	}
1254 	crypto_alg_put(alg);
1255 }
1256 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1257 
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1258 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1259 				   struct crypto_alg *alg)
1260 {
1261 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1262 		atomic64_inc(&alg->stats.cipher.err_cnt);
1263 	} else {
1264 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1265 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1266 	}
1267 	crypto_alg_put(alg);
1268 }
1269 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1270 
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1271 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1272 				   struct crypto_alg *alg)
1273 {
1274 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1275 		atomic64_inc(&alg->stats.cipher.err_cnt);
1276 	} else {
1277 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1278 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1279 	}
1280 	crypto_alg_put(alg);
1281 }
1282 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1283 #endif
1284 
crypto_algapi_init(void)1285 static int __init crypto_algapi_init(void)
1286 {
1287 	crypto_init_proc();
1288 	return 0;
1289 }
1290 
crypto_algapi_exit(void)1291 static void __exit crypto_algapi_exit(void)
1292 {
1293 	crypto_exit_proc();
1294 }
1295 
1296 module_init(crypto_algapi_init);
1297 module_exit(crypto_algapi_exit);
1298 
1299 MODULE_LICENSE("GPL");
1300 MODULE_DESCRIPTION("Cryptographic algorithms API");
1301 MODULE_SOFTDEP("pre: cryptomgr");
1302