• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3  * Cryptographic API for algorithms (i.e., low-level API).
4  *
5  * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6  */
7 
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19 #include <linux/workqueue.h>
20 
21 #include "internal.h"
22 
23 static LIST_HEAD(crypto_template_list);
24 
crypto_check_module_sig(struct module * mod)25 static inline void crypto_check_module_sig(struct module *mod)
26 {
27 	if (fips_enabled && mod && !module_sig_ok(mod))
28 		panic("Module %s signature verification failed in FIPS mode\n",
29 		      module_name(mod));
30 }
31 
crypto_check_alg(struct crypto_alg * alg)32 static int crypto_check_alg(struct crypto_alg *alg)
33 {
34 	crypto_check_module_sig(alg->cra_module);
35 
36 	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
37 		return -EINVAL;
38 
39 	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
40 		return -EINVAL;
41 
42 	/* General maximums for all algs. */
43 	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
44 		return -EINVAL;
45 
46 	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
47 		return -EINVAL;
48 
49 	/* Lower maximums for specific alg types. */
50 	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
51 			       CRYPTO_ALG_TYPE_CIPHER) {
52 		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
53 			return -EINVAL;
54 
55 		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
56 			return -EINVAL;
57 	}
58 
59 	if (alg->cra_priority < 0)
60 		return -EINVAL;
61 
62 	refcount_set(&alg->cra_refcnt, 1);
63 
64 	return 0;
65 }
66 
crypto_free_instance(struct crypto_instance * inst)67 static void crypto_free_instance(struct crypto_instance *inst)
68 {
69 	inst->alg.cra_type->free(inst);
70 }
71 
crypto_destroy_instance_workfn(struct work_struct * w)72 static void crypto_destroy_instance_workfn(struct work_struct *w)
73 {
74 	struct crypto_instance *inst = container_of(w, struct crypto_instance,
75 						    free_work);
76 	struct crypto_template *tmpl = inst->tmpl;
77 
78 	crypto_free_instance(inst);
79 	crypto_tmpl_put(tmpl);
80 }
81 
crypto_destroy_instance(struct crypto_alg * alg)82 static void crypto_destroy_instance(struct crypto_alg *alg)
83 {
84 	struct crypto_instance *inst = container_of(alg,
85 						    struct crypto_instance,
86 						    alg);
87 
88 	INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
89 	schedule_work(&inst->free_work);
90 }
91 
92 /*
93  * This function adds a spawn to the list secondary_spawns which
94  * will be used at the end of crypto_remove_spawns to unregister
95  * instances, unless the spawn happens to be one that is depended
96  * on by the new algorithm (nalg in crypto_remove_spawns).
97  *
98  * This function is also responsible for resurrecting any algorithms
99  * in the dependency chain of nalg by unsetting n->dead.
100  */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)101 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
102 					    struct list_head *stack,
103 					    struct list_head *top,
104 					    struct list_head *secondary_spawns)
105 {
106 	struct crypto_spawn *spawn, *n;
107 
108 	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
109 	if (!spawn)
110 		return NULL;
111 
112 	n = list_prev_entry(spawn, list);
113 	list_move(&spawn->list, secondary_spawns);
114 
115 	if (list_is_last(&n->list, stack))
116 		return top;
117 
118 	n = list_next_entry(n, list);
119 	if (!spawn->dead)
120 		n->dead = false;
121 
122 	return &n->inst->alg.cra_users;
123 }
124 
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)125 static void crypto_remove_instance(struct crypto_instance *inst,
126 				   struct list_head *list)
127 {
128 	struct crypto_template *tmpl = inst->tmpl;
129 
130 	if (crypto_is_dead(&inst->alg))
131 		return;
132 
133 	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
134 
135 	if (!tmpl || !crypto_tmpl_get(tmpl))
136 		return;
137 
138 	list_move(&inst->alg.cra_list, list);
139 	hlist_del(&inst->list);
140 	inst->alg.cra_destroy = crypto_destroy_instance;
141 
142 	BUG_ON(!list_empty(&inst->alg.cra_users));
143 }
144 
145 /*
146  * Given an algorithm alg, remove all algorithms that depend on it
147  * through spawns.  If nalg is not null, then exempt any algorithms
148  * that is depended on by nalg.  This is useful when nalg itself
149  * depends on alg.
150  */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)151 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
152 			  struct crypto_alg *nalg)
153 {
154 	u32 new_type = (nalg ?: alg)->cra_flags;
155 	struct crypto_spawn *spawn, *n;
156 	LIST_HEAD(secondary_spawns);
157 	struct list_head *spawns;
158 	LIST_HEAD(stack);
159 	LIST_HEAD(top);
160 
161 	spawns = &alg->cra_users;
162 	list_for_each_entry_safe(spawn, n, spawns, list) {
163 		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
164 			continue;
165 
166 		list_move(&spawn->list, &top);
167 	}
168 
169 	/*
170 	 * Perform a depth-first walk starting from alg through
171 	 * the cra_users tree.  The list stack records the path
172 	 * from alg to the current spawn.
173 	 */
174 	spawns = &top;
175 	do {
176 		while (!list_empty(spawns)) {
177 			struct crypto_instance *inst;
178 
179 			spawn = list_first_entry(spawns, struct crypto_spawn,
180 						 list);
181 			inst = spawn->inst;
182 
183 			list_move(&spawn->list, &stack);
184 			spawn->dead = !spawn->registered || &inst->alg != nalg;
185 
186 			if (!spawn->registered)
187 				break;
188 
189 			BUG_ON(&inst->alg == alg);
190 
191 			if (&inst->alg == nalg)
192 				break;
193 
194 			spawns = &inst->alg.cra_users;
195 
196 			/*
197 			 * Even if spawn->registered is true, the
198 			 * instance itself may still be unregistered.
199 			 * This is because it may have failed during
200 			 * registration.  Therefore we still need to
201 			 * make the following test.
202 			 *
203 			 * We may encounter an unregistered instance here, since
204 			 * an instance's spawns are set up prior to the instance
205 			 * being registered.  An unregistered instance will have
206 			 * NULL ->cra_users.next, since ->cra_users isn't
207 			 * properly initialized until registration.  But an
208 			 * unregistered instance cannot have any users, so treat
209 			 * it the same as ->cra_users being empty.
210 			 */
211 			if (spawns->next == NULL)
212 				break;
213 		}
214 	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
215 					      &secondary_spawns)));
216 
217 	/*
218 	 * Remove all instances that are marked as dead.  Also
219 	 * complete the resurrection of the others by moving them
220 	 * back to the cra_users list.
221 	 */
222 	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
223 		if (!spawn->dead)
224 			list_move(&spawn->list, &spawn->alg->cra_users);
225 		else if (spawn->registered)
226 			crypto_remove_instance(spawn->inst, list);
227 	}
228 }
229 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
230 
__crypto_register_alg(struct crypto_alg * alg)231 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
232 {
233 	struct crypto_alg *q;
234 	struct crypto_larval *larval;
235 	int ret = -EAGAIN;
236 
237 	if (crypto_is_dead(alg))
238 		goto err;
239 
240 	INIT_LIST_HEAD(&alg->cra_users);
241 
242 	/* No cheating! */
243 	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
244 
245 	ret = -EEXIST;
246 
247 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
248 		if (q == alg)
249 			goto err;
250 
251 		if (crypto_is_moribund(q))
252 			continue;
253 
254 		if (crypto_is_larval(q)) {
255 			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
256 				goto err;
257 			continue;
258 		}
259 
260 		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
261 		    !strcmp(q->cra_name, alg->cra_driver_name))
262 			goto err;
263 	}
264 
265 	larval = crypto_larval_alloc(alg->cra_name,
266 				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
267 	if (IS_ERR(larval))
268 		goto out;
269 
270 	ret = -ENOENT;
271 	larval->adult = crypto_mod_get(alg);
272 	if (!larval->adult)
273 		goto free_larval;
274 
275 	refcount_set(&larval->alg.cra_refcnt, 1);
276 	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
277 	       CRYPTO_MAX_ALG_NAME);
278 	larval->alg.cra_priority = alg->cra_priority;
279 
280 	list_add(&alg->cra_list, &crypto_alg_list);
281 	list_add(&larval->alg.cra_list, &crypto_alg_list);
282 
283 	crypto_stats_init(alg);
284 
285 out:
286 	return larval;
287 
288 free_larval:
289 	kfree(larval);
290 err:
291 	larval = ERR_PTR(ret);
292 	goto out;
293 }
294 
crypto_alg_tested(const char * name,int err)295 void crypto_alg_tested(const char *name, int err)
296 {
297 	struct crypto_larval *test;
298 	struct crypto_alg *alg;
299 	struct crypto_alg *q;
300 	LIST_HEAD(list);
301 	bool best;
302 
303 	down_write(&crypto_alg_sem);
304 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
305 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
306 			continue;
307 
308 		test = (struct crypto_larval *)q;
309 
310 		if (!strcmp(q->cra_driver_name, name))
311 			goto found;
312 	}
313 
314 	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
315 	goto unlock;
316 
317 found:
318 	q->cra_flags |= CRYPTO_ALG_DEAD;
319 	alg = test->adult;
320 	if (err || list_empty(&alg->cra_list))
321 		goto complete;
322 
323 	alg->cra_flags |= CRYPTO_ALG_TESTED;
324 
325 	/* Only satisfy larval waiters if we are the best. */
326 	best = true;
327 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
328 		if (crypto_is_moribund(q) || !crypto_is_larval(q))
329 			continue;
330 
331 		if (strcmp(alg->cra_name, q->cra_name))
332 			continue;
333 
334 		if (q->cra_priority > alg->cra_priority) {
335 			best = false;
336 			break;
337 		}
338 	}
339 
340 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
341 		if (q == alg)
342 			continue;
343 
344 		if (crypto_is_moribund(q))
345 			continue;
346 
347 		if (crypto_is_larval(q)) {
348 			struct crypto_larval *larval = (void *)q;
349 
350 			/*
351 			 * Check to see if either our generic name or
352 			 * specific name can satisfy the name requested
353 			 * by the larval entry q.
354 			 */
355 			if (strcmp(alg->cra_name, q->cra_name) &&
356 			    strcmp(alg->cra_driver_name, q->cra_name))
357 				continue;
358 
359 			if (larval->adult)
360 				continue;
361 			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
362 				continue;
363 
364 			if (best && crypto_mod_get(alg))
365 				larval->adult = alg;
366 			else
367 				larval->adult = ERR_PTR(-EAGAIN);
368 
369 			continue;
370 		}
371 
372 		if (strcmp(alg->cra_name, q->cra_name))
373 			continue;
374 
375 		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
376 		    q->cra_priority > alg->cra_priority)
377 			continue;
378 
379 		crypto_remove_spawns(q, &list, alg);
380 	}
381 
382 complete:
383 	complete_all(&test->completion);
384 
385 unlock:
386 	up_write(&crypto_alg_sem);
387 
388 	crypto_remove_final(&list);
389 }
390 EXPORT_SYMBOL_GPL(crypto_alg_tested);
391 
crypto_remove_final(struct list_head * list)392 void crypto_remove_final(struct list_head *list)
393 {
394 	struct crypto_alg *alg;
395 	struct crypto_alg *n;
396 
397 	list_for_each_entry_safe(alg, n, list, cra_list) {
398 		list_del_init(&alg->cra_list);
399 		crypto_alg_put(alg);
400 	}
401 }
402 EXPORT_SYMBOL_GPL(crypto_remove_final);
403 
crypto_wait_for_test(struct crypto_larval * larval)404 static void crypto_wait_for_test(struct crypto_larval *larval)
405 {
406 	int err;
407 
408 	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
409 	if (err != NOTIFY_STOP) {
410 		if (WARN_ON(err != NOTIFY_DONE))
411 			goto out;
412 		crypto_alg_tested(larval->alg.cra_driver_name, 0);
413 	}
414 
415 	err = wait_for_completion_killable(&larval->completion);
416 	WARN_ON(err);
417 	if (!err)
418 		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
419 
420 out:
421 	crypto_larval_kill(&larval->alg);
422 }
423 
crypto_register_alg(struct crypto_alg * alg)424 int crypto_register_alg(struct crypto_alg *alg)
425 {
426 	struct crypto_larval *larval;
427 	int err;
428 
429 	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
430 	err = crypto_check_alg(alg);
431 	if (err)
432 		return err;
433 
434 	down_write(&crypto_alg_sem);
435 	larval = __crypto_register_alg(alg);
436 	up_write(&crypto_alg_sem);
437 
438 	if (IS_ERR(larval))
439 		return PTR_ERR(larval);
440 
441 	crypto_wait_for_test(larval);
442 	return 0;
443 }
444 EXPORT_SYMBOL_GPL(crypto_register_alg);
445 
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)446 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
447 {
448 	if (unlikely(list_empty(&alg->cra_list)))
449 		return -ENOENT;
450 
451 	alg->cra_flags |= CRYPTO_ALG_DEAD;
452 
453 	list_del_init(&alg->cra_list);
454 	crypto_remove_spawns(alg, list, NULL);
455 
456 	return 0;
457 }
458 
crypto_unregister_alg(struct crypto_alg * alg)459 void crypto_unregister_alg(struct crypto_alg *alg)
460 {
461 	int ret;
462 	LIST_HEAD(list);
463 
464 	down_write(&crypto_alg_sem);
465 	ret = crypto_remove_alg(alg, &list);
466 	up_write(&crypto_alg_sem);
467 
468 	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
469 		return;
470 
471 	if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
472 		return;
473 
474 	if (alg->cra_destroy)
475 		alg->cra_destroy(alg);
476 
477 	crypto_remove_final(&list);
478 }
479 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
480 
crypto_register_algs(struct crypto_alg * algs,int count)481 int crypto_register_algs(struct crypto_alg *algs, int count)
482 {
483 	int i, ret;
484 
485 	for (i = 0; i < count; i++) {
486 		ret = crypto_register_alg(&algs[i]);
487 		if (ret)
488 			goto err;
489 	}
490 
491 	return 0;
492 
493 err:
494 	for (--i; i >= 0; --i)
495 		crypto_unregister_alg(&algs[i]);
496 
497 	return ret;
498 }
499 EXPORT_SYMBOL_GPL(crypto_register_algs);
500 
crypto_unregister_algs(struct crypto_alg * algs,int count)501 void crypto_unregister_algs(struct crypto_alg *algs, int count)
502 {
503 	int i;
504 
505 	for (i = 0; i < count; i++)
506 		crypto_unregister_alg(&algs[i]);
507 }
508 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
509 
crypto_register_template(struct crypto_template * tmpl)510 int crypto_register_template(struct crypto_template *tmpl)
511 {
512 	struct crypto_template *q;
513 	int err = -EEXIST;
514 
515 	down_write(&crypto_alg_sem);
516 
517 	crypto_check_module_sig(tmpl->module);
518 
519 	list_for_each_entry(q, &crypto_template_list, list) {
520 		if (q == tmpl)
521 			goto out;
522 	}
523 
524 	list_add(&tmpl->list, &crypto_template_list);
525 	err = 0;
526 out:
527 	up_write(&crypto_alg_sem);
528 	return err;
529 }
530 EXPORT_SYMBOL_GPL(crypto_register_template);
531 
crypto_register_templates(struct crypto_template * tmpls,int count)532 int crypto_register_templates(struct crypto_template *tmpls, int count)
533 {
534 	int i, err;
535 
536 	for (i = 0; i < count; i++) {
537 		err = crypto_register_template(&tmpls[i]);
538 		if (err)
539 			goto out;
540 	}
541 	return 0;
542 
543 out:
544 	for (--i; i >= 0; --i)
545 		crypto_unregister_template(&tmpls[i]);
546 	return err;
547 }
548 EXPORT_SYMBOL_GPL(crypto_register_templates);
549 
crypto_unregister_template(struct crypto_template * tmpl)550 void crypto_unregister_template(struct crypto_template *tmpl)
551 {
552 	struct crypto_instance *inst;
553 	struct hlist_node *n;
554 	struct hlist_head *list;
555 	LIST_HEAD(users);
556 
557 	down_write(&crypto_alg_sem);
558 
559 	BUG_ON(list_empty(&tmpl->list));
560 	list_del_init(&tmpl->list);
561 
562 	list = &tmpl->instances;
563 	hlist_for_each_entry(inst, list, list) {
564 		int err = crypto_remove_alg(&inst->alg, &users);
565 
566 		BUG_ON(err);
567 	}
568 
569 	up_write(&crypto_alg_sem);
570 
571 	hlist_for_each_entry_safe(inst, n, list, list) {
572 		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
573 		crypto_free_instance(inst);
574 	}
575 	crypto_remove_final(&users);
576 }
577 EXPORT_SYMBOL_GPL(crypto_unregister_template);
578 
crypto_unregister_templates(struct crypto_template * tmpls,int count)579 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
580 {
581 	int i;
582 
583 	for (i = count - 1; i >= 0; --i)
584 		crypto_unregister_template(&tmpls[i]);
585 }
586 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
587 
__crypto_lookup_template(const char * name)588 static struct crypto_template *__crypto_lookup_template(const char *name)
589 {
590 	struct crypto_template *q, *tmpl = NULL;
591 
592 	down_read(&crypto_alg_sem);
593 	list_for_each_entry(q, &crypto_template_list, list) {
594 		if (strcmp(q->name, name))
595 			continue;
596 		if (unlikely(!crypto_tmpl_get(q)))
597 			continue;
598 
599 		tmpl = q;
600 		break;
601 	}
602 	up_read(&crypto_alg_sem);
603 
604 	return tmpl;
605 }
606 
crypto_lookup_template(const char * name)607 struct crypto_template *crypto_lookup_template(const char *name)
608 {
609 	return try_then_request_module(__crypto_lookup_template(name),
610 				       "crypto-%s", name);
611 }
612 EXPORT_SYMBOL_GPL(crypto_lookup_template);
613 
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)614 int crypto_register_instance(struct crypto_template *tmpl,
615 			     struct crypto_instance *inst)
616 {
617 	struct crypto_larval *larval;
618 	struct crypto_spawn *spawn;
619 	int err;
620 
621 	err = crypto_check_alg(&inst->alg);
622 	if (err)
623 		return err;
624 
625 	inst->alg.cra_module = tmpl->module;
626 	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
627 
628 	down_write(&crypto_alg_sem);
629 
630 	larval = ERR_PTR(-EAGAIN);
631 	for (spawn = inst->spawns; spawn;) {
632 		struct crypto_spawn *next;
633 
634 		if (spawn->dead)
635 			goto unlock;
636 
637 		next = spawn->next;
638 		spawn->inst = inst;
639 		spawn->registered = true;
640 
641 		crypto_mod_put(spawn->alg);
642 
643 		spawn = next;
644 	}
645 
646 	larval = __crypto_register_alg(&inst->alg);
647 	if (IS_ERR(larval))
648 		goto unlock;
649 
650 	hlist_add_head(&inst->list, &tmpl->instances);
651 	inst->tmpl = tmpl;
652 
653 unlock:
654 	up_write(&crypto_alg_sem);
655 
656 	err = PTR_ERR(larval);
657 	if (IS_ERR(larval))
658 		goto err;
659 
660 	crypto_wait_for_test(larval);
661 	err = 0;
662 
663 err:
664 	return err;
665 }
666 EXPORT_SYMBOL_GPL(crypto_register_instance);
667 
crypto_unregister_instance(struct crypto_instance * inst)668 void crypto_unregister_instance(struct crypto_instance *inst)
669 {
670 	LIST_HEAD(list);
671 
672 	down_write(&crypto_alg_sem);
673 
674 	crypto_remove_spawns(&inst->alg, &list, NULL);
675 	crypto_remove_instance(inst, &list);
676 
677 	up_write(&crypto_alg_sem);
678 
679 	crypto_remove_final(&list);
680 }
681 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
682 
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)683 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
684 		      const char *name, u32 type, u32 mask)
685 {
686 	struct crypto_alg *alg;
687 	int err = -EAGAIN;
688 
689 	if (WARN_ON_ONCE(inst == NULL))
690 		return -EINVAL;
691 
692 	/* Allow the result of crypto_attr_alg_name() to be passed directly */
693 	if (IS_ERR(name))
694 		return PTR_ERR(name);
695 
696 	alg = crypto_find_alg(name, spawn->frontend, type, mask);
697 	if (IS_ERR(alg))
698 		return PTR_ERR(alg);
699 
700 	down_write(&crypto_alg_sem);
701 	if (!crypto_is_moribund(alg)) {
702 		list_add(&spawn->list, &alg->cra_users);
703 		spawn->alg = alg;
704 		spawn->mask = mask;
705 		spawn->next = inst->spawns;
706 		inst->spawns = spawn;
707 		inst->alg.cra_flags |=
708 			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
709 		err = 0;
710 	}
711 	up_write(&crypto_alg_sem);
712 	if (err)
713 		crypto_mod_put(alg);
714 	return err;
715 }
716 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
717 
crypto_drop_spawn(struct crypto_spawn * spawn)718 void crypto_drop_spawn(struct crypto_spawn *spawn)
719 {
720 	if (!spawn->alg) /* not yet initialized? */
721 		return;
722 
723 	down_write(&crypto_alg_sem);
724 	if (!spawn->dead)
725 		list_del(&spawn->list);
726 	up_write(&crypto_alg_sem);
727 
728 	if (!spawn->registered)
729 		crypto_mod_put(spawn->alg);
730 }
731 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
732 
crypto_spawn_alg(struct crypto_spawn * spawn)733 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
734 {
735 	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
736 	struct crypto_alg *target;
737 	bool shoot = false;
738 
739 	down_read(&crypto_alg_sem);
740 	if (!spawn->dead) {
741 		alg = spawn->alg;
742 		if (!crypto_mod_get(alg)) {
743 			target = crypto_alg_get(alg);
744 			shoot = true;
745 			alg = ERR_PTR(-EAGAIN);
746 		}
747 	}
748 	up_read(&crypto_alg_sem);
749 
750 	if (shoot) {
751 		crypto_shoot_alg(target);
752 		crypto_alg_put(target);
753 	}
754 
755 	return alg;
756 }
757 
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)758 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
759 				    u32 mask)
760 {
761 	struct crypto_alg *alg;
762 	struct crypto_tfm *tfm;
763 
764 	alg = crypto_spawn_alg(spawn);
765 	if (IS_ERR(alg))
766 		return ERR_CAST(alg);
767 
768 	tfm = ERR_PTR(-EINVAL);
769 	if (unlikely((alg->cra_flags ^ type) & mask))
770 		goto out_put_alg;
771 
772 	tfm = __crypto_alloc_tfm(alg, type, mask);
773 	if (IS_ERR(tfm))
774 		goto out_put_alg;
775 
776 	return tfm;
777 
778 out_put_alg:
779 	crypto_mod_put(alg);
780 	return tfm;
781 }
782 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
783 
crypto_spawn_tfm2(struct crypto_spawn * spawn)784 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
785 {
786 	struct crypto_alg *alg;
787 	struct crypto_tfm *tfm;
788 
789 	alg = crypto_spawn_alg(spawn);
790 	if (IS_ERR(alg))
791 		return ERR_CAST(alg);
792 
793 	tfm = crypto_create_tfm(alg, spawn->frontend);
794 	if (IS_ERR(tfm))
795 		goto out_put_alg;
796 
797 	return tfm;
798 
799 out_put_alg:
800 	crypto_mod_put(alg);
801 	return tfm;
802 }
803 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
804 
crypto_register_notifier(struct notifier_block * nb)805 int crypto_register_notifier(struct notifier_block *nb)
806 {
807 	return blocking_notifier_chain_register(&crypto_chain, nb);
808 }
809 EXPORT_SYMBOL_GPL(crypto_register_notifier);
810 
crypto_unregister_notifier(struct notifier_block * nb)811 int crypto_unregister_notifier(struct notifier_block *nb)
812 {
813 	return blocking_notifier_chain_unregister(&crypto_chain, nb);
814 }
815 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
816 
crypto_get_attr_type(struct rtattr ** tb)817 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
818 {
819 	struct rtattr *rta = tb[0];
820 	struct crypto_attr_type *algt;
821 
822 	if (!rta)
823 		return ERR_PTR(-ENOENT);
824 	if (RTA_PAYLOAD(rta) < sizeof(*algt))
825 		return ERR_PTR(-EINVAL);
826 	if (rta->rta_type != CRYPTOA_TYPE)
827 		return ERR_PTR(-EINVAL);
828 
829 	algt = RTA_DATA(rta);
830 
831 	return algt;
832 }
833 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
834 
835 /**
836  * crypto_check_attr_type() - check algorithm type and compute inherited mask
837  * @tb: the template parameters
838  * @type: the algorithm type the template would be instantiated as
839  * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
840  *	      to restrict the flags of any inner algorithms
841  *
842  * Validate that the algorithm type the user requested is compatible with the
843  * one the template would actually be instantiated as.  E.g., if the user is
844  * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
845  * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
846  *
847  * Also compute the mask to use to restrict the flags of any inner algorithms.
848  *
849  * Return: 0 on success; -errno on failure
850  */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)851 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
852 {
853 	struct crypto_attr_type *algt;
854 
855 	algt = crypto_get_attr_type(tb);
856 	if (IS_ERR(algt))
857 		return PTR_ERR(algt);
858 
859 	if ((algt->type ^ type) & algt->mask)
860 		return -EINVAL;
861 
862 	*mask_ret = crypto_algt_inherited_mask(algt);
863 	return 0;
864 }
865 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
866 
crypto_attr_alg_name(struct rtattr * rta)867 const char *crypto_attr_alg_name(struct rtattr *rta)
868 {
869 	struct crypto_attr_alg *alga;
870 
871 	if (!rta)
872 		return ERR_PTR(-ENOENT);
873 	if (RTA_PAYLOAD(rta) < sizeof(*alga))
874 		return ERR_PTR(-EINVAL);
875 	if (rta->rta_type != CRYPTOA_ALG)
876 		return ERR_PTR(-EINVAL);
877 
878 	alga = RTA_DATA(rta);
879 	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
880 
881 	return alga->name;
882 }
883 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
884 
crypto_attr_u32(struct rtattr * rta,u32 * num)885 int crypto_attr_u32(struct rtattr *rta, u32 *num)
886 {
887 	struct crypto_attr_u32 *nu32;
888 
889 	if (!rta)
890 		return -ENOENT;
891 	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
892 		return -EINVAL;
893 	if (rta->rta_type != CRYPTOA_U32)
894 		return -EINVAL;
895 
896 	nu32 = RTA_DATA(rta);
897 	*num = nu32->num;
898 
899 	return 0;
900 }
901 EXPORT_SYMBOL_GPL(crypto_attr_u32);
902 
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)903 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
904 			struct crypto_alg *alg)
905 {
906 	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
907 		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
908 		return -ENAMETOOLONG;
909 
910 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
911 		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
912 		return -ENAMETOOLONG;
913 
914 	return 0;
915 }
916 EXPORT_SYMBOL_GPL(crypto_inst_setname);
917 
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)918 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
919 {
920 	INIT_LIST_HEAD(&queue->list);
921 	queue->backlog = &queue->list;
922 	queue->qlen = 0;
923 	queue->max_qlen = max_qlen;
924 }
925 EXPORT_SYMBOL_GPL(crypto_init_queue);
926 
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)927 int crypto_enqueue_request(struct crypto_queue *queue,
928 			   struct crypto_async_request *request)
929 {
930 	int err = -EINPROGRESS;
931 
932 	if (unlikely(queue->qlen >= queue->max_qlen)) {
933 		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
934 			err = -ENOSPC;
935 			goto out;
936 		}
937 		err = -EBUSY;
938 		if (queue->backlog == &queue->list)
939 			queue->backlog = &request->list;
940 	}
941 
942 	queue->qlen++;
943 	list_add_tail(&request->list, &queue->list);
944 
945 out:
946 	return err;
947 }
948 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
949 
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)950 void crypto_enqueue_request_head(struct crypto_queue *queue,
951 				 struct crypto_async_request *request)
952 {
953 	queue->qlen++;
954 	list_add(&request->list, &queue->list);
955 }
956 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
957 
crypto_dequeue_request(struct crypto_queue * queue)958 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
959 {
960 	struct list_head *request;
961 
962 	if (unlikely(!queue->qlen))
963 		return NULL;
964 
965 	queue->qlen--;
966 
967 	if (queue->backlog != &queue->list)
968 		queue->backlog = queue->backlog->next;
969 
970 	request = queue->list.next;
971 	list_del(request);
972 
973 	return list_entry(request, struct crypto_async_request, list);
974 }
975 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
976 
crypto_inc_byte(u8 * a,unsigned int size)977 static inline void crypto_inc_byte(u8 *a, unsigned int size)
978 {
979 	u8 *b = (a + size);
980 	u8 c;
981 
982 	for (; size; size--) {
983 		c = *--b + 1;
984 		*b = c;
985 		if (c)
986 			break;
987 	}
988 }
989 
crypto_inc(u8 * a,unsigned int size)990 void crypto_inc(u8 *a, unsigned int size)
991 {
992 	__be32 *b = (__be32 *)(a + size);
993 	u32 c;
994 
995 	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
996 	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
997 		for (; size >= 4; size -= 4) {
998 			c = be32_to_cpu(*--b) + 1;
999 			*b = cpu_to_be32(c);
1000 			if (likely(c))
1001 				return;
1002 		}
1003 
1004 	crypto_inc_byte(a, size);
1005 }
1006 EXPORT_SYMBOL_GPL(crypto_inc);
1007 
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)1008 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1009 {
1010 	int relalign = 0;
1011 
1012 	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1013 		int size = sizeof(unsigned long);
1014 		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1015 			 ((unsigned long)dst ^ (unsigned long)src2)) &
1016 			(size - 1);
1017 
1018 		relalign = d ? 1 << __ffs(d) : size;
1019 
1020 		/*
1021 		 * If we care about alignment, process as many bytes as
1022 		 * needed to advance dst and src to values whose alignments
1023 		 * equal their relative alignment. This will allow us to
1024 		 * process the remainder of the input using optimal strides.
1025 		 */
1026 		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1027 			*dst++ = *src1++ ^ *src2++;
1028 			len--;
1029 		}
1030 	}
1031 
1032 	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1033 		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1034 		dst += 8;
1035 		src1 += 8;
1036 		src2 += 8;
1037 		len -= 8;
1038 	}
1039 
1040 	while (len >= 4 && !(relalign & 3)) {
1041 		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1042 		dst += 4;
1043 		src1 += 4;
1044 		src2 += 4;
1045 		len -= 4;
1046 	}
1047 
1048 	while (len >= 2 && !(relalign & 1)) {
1049 		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1050 		dst += 2;
1051 		src1 += 2;
1052 		src2 += 2;
1053 		len -= 2;
1054 	}
1055 
1056 	while (len--)
1057 		*dst++ = *src1++ ^ *src2++;
1058 }
1059 EXPORT_SYMBOL_GPL(__crypto_xor);
1060 
crypto_alg_extsize(struct crypto_alg * alg)1061 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1062 {
1063 	return alg->cra_ctxsize +
1064 	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1065 }
1066 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1067 
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1068 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1069 			u32 type, u32 mask)
1070 {
1071 	int ret = 0;
1072 	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1073 
1074 	if (!IS_ERR(alg)) {
1075 		crypto_mod_put(alg);
1076 		ret = 1;
1077 	}
1078 
1079 	return ret;
1080 }
1081 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1082 
1083 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1084 void crypto_stats_init(struct crypto_alg *alg)
1085 {
1086 	memset(&alg->stats, 0, sizeof(alg->stats));
1087 }
1088 EXPORT_SYMBOL_GPL(crypto_stats_init);
1089 
crypto_stats_get(struct crypto_alg * alg)1090 void crypto_stats_get(struct crypto_alg *alg)
1091 {
1092 	crypto_alg_get(alg);
1093 }
1094 EXPORT_SYMBOL_GPL(crypto_stats_get);
1095 
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1096 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1097 			       int ret)
1098 {
1099 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1100 		atomic64_inc(&alg->stats.aead.err_cnt);
1101 	} else {
1102 		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1103 		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1104 	}
1105 	crypto_alg_put(alg);
1106 }
1107 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1108 
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1109 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1110 			       int ret)
1111 {
1112 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1113 		atomic64_inc(&alg->stats.aead.err_cnt);
1114 	} else {
1115 		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1116 		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1117 	}
1118 	crypto_alg_put(alg);
1119 }
1120 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1121 
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1122 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1123 				   struct crypto_alg *alg)
1124 {
1125 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1126 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1127 	} else {
1128 		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1129 		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1130 	}
1131 	crypto_alg_put(alg);
1132 }
1133 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1134 
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1135 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1136 				   struct crypto_alg *alg)
1137 {
1138 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1139 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1140 	} else {
1141 		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1142 		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1143 	}
1144 	crypto_alg_put(alg);
1145 }
1146 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1147 
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1148 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1149 {
1150 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1151 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1152 	else
1153 		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1154 	crypto_alg_put(alg);
1155 }
1156 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1157 
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1158 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1159 {
1160 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1161 		atomic64_inc(&alg->stats.akcipher.err_cnt);
1162 	else
1163 		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1164 	crypto_alg_put(alg);
1165 }
1166 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1167 
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1168 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1169 {
1170 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1171 		atomic64_inc(&alg->stats.compress.err_cnt);
1172 	} else {
1173 		atomic64_inc(&alg->stats.compress.compress_cnt);
1174 		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1175 	}
1176 	crypto_alg_put(alg);
1177 }
1178 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1179 
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1180 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1181 {
1182 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1183 		atomic64_inc(&alg->stats.compress.err_cnt);
1184 	} else {
1185 		atomic64_inc(&alg->stats.compress.decompress_cnt);
1186 		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1187 	}
1188 	crypto_alg_put(alg);
1189 }
1190 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1191 
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1192 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1193 			       struct crypto_alg *alg)
1194 {
1195 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1196 		atomic64_inc(&alg->stats.hash.err_cnt);
1197 	else
1198 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1199 	crypto_alg_put(alg);
1200 }
1201 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1202 
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1203 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1204 			      struct crypto_alg *alg)
1205 {
1206 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1207 		atomic64_inc(&alg->stats.hash.err_cnt);
1208 	} else {
1209 		atomic64_inc(&alg->stats.hash.hash_cnt);
1210 		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1211 	}
1212 	crypto_alg_put(alg);
1213 }
1214 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1215 
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1216 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1217 {
1218 	if (ret)
1219 		atomic64_inc(&alg->stats.kpp.err_cnt);
1220 	else
1221 		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1222 	crypto_alg_put(alg);
1223 }
1224 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1225 
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1226 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1227 {
1228 	if (ret)
1229 		atomic64_inc(&alg->stats.kpp.err_cnt);
1230 	else
1231 		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1232 	crypto_alg_put(alg);
1233 }
1234 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1235 
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1236 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1237 {
1238 	if (ret)
1239 		atomic64_inc(&alg->stats.kpp.err_cnt);
1240 	else
1241 		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1242 	crypto_alg_put(alg);
1243 }
1244 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1245 
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1246 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1247 {
1248 	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1249 		atomic64_inc(&alg->stats.rng.err_cnt);
1250 	else
1251 		atomic64_inc(&alg->stats.rng.seed_cnt);
1252 	crypto_alg_put(alg);
1253 }
1254 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1255 
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1256 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1257 			       int ret)
1258 {
1259 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1260 		atomic64_inc(&alg->stats.rng.err_cnt);
1261 	} else {
1262 		atomic64_inc(&alg->stats.rng.generate_cnt);
1263 		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1264 	}
1265 	crypto_alg_put(alg);
1266 }
1267 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1268 
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1269 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1270 				   struct crypto_alg *alg)
1271 {
1272 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1273 		atomic64_inc(&alg->stats.cipher.err_cnt);
1274 	} else {
1275 		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1276 		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1277 	}
1278 	crypto_alg_put(alg);
1279 }
1280 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1281 
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1282 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1283 				   struct crypto_alg *alg)
1284 {
1285 	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1286 		atomic64_inc(&alg->stats.cipher.err_cnt);
1287 	} else {
1288 		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1289 		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1290 	}
1291 	crypto_alg_put(alg);
1292 }
1293 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1294 #endif
1295 
crypto_algapi_init(void)1296 static int __init crypto_algapi_init(void)
1297 {
1298 	crypto_init_proc();
1299 	return 0;
1300 }
1301 
crypto_algapi_exit(void)1302 static void __exit crypto_algapi_exit(void)
1303 {
1304 	crypto_exit_proc();
1305 }
1306 
1307 module_init(crypto_algapi_init);
1308 module_exit(crypto_algapi_exit);
1309 
1310 MODULE_LICENSE("GPL");
1311 MODULE_DESCRIPTION("Cryptographic algorithms API");
1312 MODULE_SOFTDEP("pre: cryptomgr");
1313