1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19
20 #include "internal.h"
21
22 static LIST_HEAD(crypto_template_list);
23
crypto_check_module_sig(struct module * mod)24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
28 module_name(mod));
29 }
30
crypto_check_alg(struct crypto_alg * alg)31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 crypto_check_module_sig(alg->cra_module);
34
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 return -EINVAL;
44
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
58 if (alg->cra_priority < 0)
59 return -EINVAL;
60
61 refcount_set(&alg->cra_refcnt, 1);
62
63 return 0;
64 }
65
crypto_free_instance(struct crypto_instance * inst)66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 inst->alg.cra_type->free(inst);
69 }
70
crypto_destroy_instance(struct crypto_alg * alg)71 static void crypto_destroy_instance(struct crypto_alg *alg)
72 {
73 struct crypto_instance *inst = (void *)alg;
74 struct crypto_template *tmpl = inst->tmpl;
75
76 crypto_free_instance(inst);
77 crypto_tmpl_put(tmpl);
78 }
79
80 /*
81 * This function adds a spawn to the list secondary_spawns which
82 * will be used at the end of crypto_remove_spawns to unregister
83 * instances, unless the spawn happens to be one that is depended
84 * on by the new algorithm (nalg in crypto_remove_spawns).
85 *
86 * This function is also responsible for resurrecting any algorithms
87 * in the dependency chain of nalg by unsetting n->dead.
88 */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 struct list_head *stack,
91 struct list_head *top,
92 struct list_head *secondary_spawns)
93 {
94 struct crypto_spawn *spawn, *n;
95
96 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 if (!spawn)
98 return NULL;
99
100 n = list_prev_entry(spawn, list);
101 list_move(&spawn->list, secondary_spawns);
102
103 if (list_is_last(&n->list, stack))
104 return top;
105
106 n = list_next_entry(n, list);
107 if (!spawn->dead)
108 n->dead = false;
109
110 return &n->inst->alg.cra_users;
111 }
112
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)113 static void crypto_remove_instance(struct crypto_instance *inst,
114 struct list_head *list)
115 {
116 struct crypto_template *tmpl = inst->tmpl;
117
118 if (crypto_is_dead(&inst->alg))
119 return;
120
121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122
123 if (!tmpl || !crypto_tmpl_get(tmpl))
124 return;
125
126 list_move(&inst->alg.cra_list, list);
127 hlist_del(&inst->list);
128 inst->alg.cra_destroy = crypto_destroy_instance;
129
130 BUG_ON(!list_empty(&inst->alg.cra_users));
131 }
132
133 /*
134 * Given an algorithm alg, remove all algorithms that depend on it
135 * through spawns. If nalg is not null, then exempt any algorithms
136 * that is depended on by nalg. This is useful when nalg itself
137 * depends on alg.
138 */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 struct crypto_alg *nalg)
141 {
142 u32 new_type = (nalg ?: alg)->cra_flags;
143 struct crypto_spawn *spawn, *n;
144 LIST_HEAD(secondary_spawns);
145 struct list_head *spawns;
146 LIST_HEAD(stack);
147 LIST_HEAD(top);
148
149 spawns = &alg->cra_users;
150 list_for_each_entry_safe(spawn, n, spawns, list) {
151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152 continue;
153
154 list_move(&spawn->list, &top);
155 }
156
157 /*
158 * Perform a depth-first walk starting from alg through
159 * the cra_users tree. The list stack records the path
160 * from alg to the current spawn.
161 */
162 spawns = ⊤
163 do {
164 while (!list_empty(spawns)) {
165 struct crypto_instance *inst;
166
167 spawn = list_first_entry(spawns, struct crypto_spawn,
168 list);
169 inst = spawn->inst;
170
171 list_move(&spawn->list, &stack);
172 spawn->dead = !spawn->registered || &inst->alg != nalg;
173
174 if (!spawn->registered)
175 break;
176
177 BUG_ON(&inst->alg == alg);
178
179 if (&inst->alg == nalg)
180 break;
181
182 spawns = &inst->alg.cra_users;
183
184 /*
185 * Even if spawn->registered is true, the
186 * instance itself may still be unregistered.
187 * This is because it may have failed during
188 * registration. Therefore we still need to
189 * make the following test.
190 *
191 * We may encounter an unregistered instance here, since
192 * an instance's spawns are set up prior to the instance
193 * being registered. An unregistered instance will have
194 * NULL ->cra_users.next, since ->cra_users isn't
195 * properly initialized until registration. But an
196 * unregistered instance cannot have any users, so treat
197 * it the same as ->cra_users being empty.
198 */
199 if (spawns->next == NULL)
200 break;
201 }
202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 &secondary_spawns)));
204
205 /*
206 * Remove all instances that are marked as dead. Also
207 * complete the resurrection of the others by moving them
208 * back to the cra_users list.
209 */
210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211 if (!spawn->dead)
212 list_move(&spawn->list, &spawn->alg->cra_users);
213 else if (spawn->registered)
214 crypto_remove_instance(spawn->inst, list);
215 }
216 }
217 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218
crypto_alg_finish_registration(struct crypto_alg * alg,bool fulfill_requests,struct list_head * algs_to_put)219 static void crypto_alg_finish_registration(struct crypto_alg *alg,
220 bool fulfill_requests,
221 struct list_head *algs_to_put)
222 {
223 struct crypto_alg *q;
224
225 list_for_each_entry(q, &crypto_alg_list, cra_list) {
226 if (q == alg)
227 continue;
228
229 if (crypto_is_moribund(q))
230 continue;
231
232 if (crypto_is_larval(q)) {
233 struct crypto_larval *larval = (void *)q;
234
235 /*
236 * Check to see if either our generic name or
237 * specific name can satisfy the name requested
238 * by the larval entry q.
239 */
240 if (strcmp(alg->cra_name, q->cra_name) &&
241 strcmp(alg->cra_driver_name, q->cra_name))
242 continue;
243
244 if (larval->adult)
245 continue;
246 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
247 continue;
248
249 if (fulfill_requests && crypto_mod_get(alg))
250 larval->adult = alg;
251 else
252 larval->adult = ERR_PTR(-EAGAIN);
253
254 continue;
255 }
256
257 if (strcmp(alg->cra_name, q->cra_name))
258 continue;
259
260 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
261 q->cra_priority > alg->cra_priority)
262 continue;
263
264 crypto_remove_spawns(q, algs_to_put, alg);
265 }
266
267 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
268 }
269
crypto_alloc_test_larval(struct crypto_alg * alg)270 static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
271 {
272 struct crypto_larval *larval;
273
274 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
275 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
276 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
277 return NULL; /* No self-test needed */
278
279 larval = crypto_larval_alloc(alg->cra_name,
280 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
281 if (IS_ERR(larval))
282 return larval;
283
284 larval->adult = crypto_mod_get(alg);
285 if (!larval->adult) {
286 kfree(larval);
287 return ERR_PTR(-ENOENT);
288 }
289
290 refcount_set(&larval->alg.cra_refcnt, 1);
291 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
292 CRYPTO_MAX_ALG_NAME);
293 larval->alg.cra_priority = alg->cra_priority;
294
295 return larval;
296 }
297
298 static struct crypto_larval *
__crypto_register_alg(struct crypto_alg * alg,struct list_head * algs_to_put)299 __crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
300 {
301 struct crypto_alg *q;
302 struct crypto_larval *larval;
303 int ret = -EAGAIN;
304
305 if (crypto_is_dead(alg))
306 goto err;
307
308 INIT_LIST_HEAD(&alg->cra_users);
309
310 ret = -EEXIST;
311
312 list_for_each_entry(q, &crypto_alg_list, cra_list) {
313 if (q == alg)
314 goto err;
315
316 if (crypto_is_moribund(q))
317 continue;
318
319 if (crypto_is_larval(q)) {
320 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
321 goto err;
322 continue;
323 }
324
325 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
326 !strcmp(q->cra_name, alg->cra_driver_name))
327 goto err;
328 }
329
330 larval = crypto_alloc_test_larval(alg);
331 if (IS_ERR(larval))
332 goto out;
333
334 list_add(&alg->cra_list, &crypto_alg_list);
335
336 crypto_stats_init(alg);
337
338 if (larval) {
339 /* No cheating! */
340 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
341
342 list_add(&larval->alg.cra_list, &crypto_alg_list);
343 } else {
344 alg->cra_flags |= CRYPTO_ALG_TESTED;
345 crypto_alg_finish_registration(alg, true, algs_to_put);
346 }
347
348 out:
349 return larval;
350
351 err:
352 larval = ERR_PTR(ret);
353 goto out;
354 }
355
crypto_alg_tested(const char * name,int err)356 void crypto_alg_tested(const char *name, int err)
357 {
358 struct crypto_larval *test;
359 struct crypto_alg *alg;
360 struct crypto_alg *q;
361 LIST_HEAD(list);
362 bool best;
363
364 down_write(&crypto_alg_sem);
365 list_for_each_entry(q, &crypto_alg_list, cra_list) {
366 if (crypto_is_moribund(q) || !crypto_is_larval(q))
367 continue;
368
369 test = (struct crypto_larval *)q;
370
371 if (!strcmp(q->cra_driver_name, name))
372 goto found;
373 }
374
375 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
376 goto unlock;
377
378 found:
379 q->cra_flags |= CRYPTO_ALG_DEAD;
380 alg = test->adult;
381 if (err || list_empty(&alg->cra_list))
382 goto complete;
383
384 alg->cra_flags |= CRYPTO_ALG_TESTED;
385
386 /*
387 * If a higher-priority implementation of the same algorithm is
388 * currently being tested, then don't fulfill request larvals.
389 */
390 best = true;
391 list_for_each_entry(q, &crypto_alg_list, cra_list) {
392 if (crypto_is_moribund(q) || !crypto_is_larval(q))
393 continue;
394
395 if (strcmp(alg->cra_name, q->cra_name))
396 continue;
397
398 if (q->cra_priority > alg->cra_priority) {
399 best = false;
400 break;
401 }
402 }
403
404 crypto_alg_finish_registration(alg, best, &list);
405
406 complete:
407 complete_all(&test->completion);
408
409 unlock:
410 up_write(&crypto_alg_sem);
411
412 crypto_remove_final(&list);
413 }
414 EXPORT_SYMBOL_GPL(crypto_alg_tested);
415
crypto_remove_final(struct list_head * list)416 void crypto_remove_final(struct list_head *list)
417 {
418 struct crypto_alg *alg;
419 struct crypto_alg *n;
420
421 list_for_each_entry_safe(alg, n, list, cra_list) {
422 list_del_init(&alg->cra_list);
423 crypto_alg_put(alg);
424 }
425 }
426 EXPORT_SYMBOL_GPL(crypto_remove_final);
427
crypto_register_alg(struct crypto_alg * alg)428 int crypto_register_alg(struct crypto_alg *alg)
429 {
430 struct crypto_larval *larval;
431 LIST_HEAD(algs_to_put);
432 bool test_started = false;
433 int err;
434
435 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
436 err = crypto_check_alg(alg);
437 if (err)
438 return err;
439
440 down_write(&crypto_alg_sem);
441 larval = __crypto_register_alg(alg, &algs_to_put);
442 if (!IS_ERR_OR_NULL(larval)) {
443 test_started = crypto_boot_test_finished();
444 larval->test_started = test_started;
445 }
446 up_write(&crypto_alg_sem);
447
448 if (IS_ERR(larval))
449 return PTR_ERR(larval);
450 if (test_started)
451 crypto_wait_for_test(larval);
452 crypto_remove_final(&algs_to_put);
453 return 0;
454 }
455 EXPORT_SYMBOL_GPL(crypto_register_alg);
456
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)457 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
458 {
459 if (unlikely(list_empty(&alg->cra_list)))
460 return -ENOENT;
461
462 alg->cra_flags |= CRYPTO_ALG_DEAD;
463
464 list_del_init(&alg->cra_list);
465 crypto_remove_spawns(alg, list, NULL);
466
467 return 0;
468 }
469
crypto_unregister_alg(struct crypto_alg * alg)470 void crypto_unregister_alg(struct crypto_alg *alg)
471 {
472 int ret;
473 LIST_HEAD(list);
474
475 down_write(&crypto_alg_sem);
476 ret = crypto_remove_alg(alg, &list);
477 up_write(&crypto_alg_sem);
478
479 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
480 return;
481
482 if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
483 return;
484
485 if (alg->cra_destroy)
486 alg->cra_destroy(alg);
487
488 crypto_remove_final(&list);
489 }
490 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
491
crypto_register_algs(struct crypto_alg * algs,int count)492 int crypto_register_algs(struct crypto_alg *algs, int count)
493 {
494 int i, ret;
495
496 for (i = 0; i < count; i++) {
497 ret = crypto_register_alg(&algs[i]);
498 if (ret)
499 goto err;
500 }
501
502 return 0;
503
504 err:
505 for (--i; i >= 0; --i)
506 crypto_unregister_alg(&algs[i]);
507
508 return ret;
509 }
510 EXPORT_SYMBOL_GPL(crypto_register_algs);
511
crypto_unregister_algs(struct crypto_alg * algs,int count)512 void crypto_unregister_algs(struct crypto_alg *algs, int count)
513 {
514 int i;
515
516 for (i = 0; i < count; i++)
517 crypto_unregister_alg(&algs[i]);
518 }
519 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
520
crypto_register_template(struct crypto_template * tmpl)521 int crypto_register_template(struct crypto_template *tmpl)
522 {
523 struct crypto_template *q;
524 int err = -EEXIST;
525
526 down_write(&crypto_alg_sem);
527
528 crypto_check_module_sig(tmpl->module);
529
530 list_for_each_entry(q, &crypto_template_list, list) {
531 if (q == tmpl)
532 goto out;
533 }
534
535 list_add(&tmpl->list, &crypto_template_list);
536 err = 0;
537 out:
538 up_write(&crypto_alg_sem);
539 return err;
540 }
541 EXPORT_SYMBOL_GPL(crypto_register_template);
542
crypto_register_templates(struct crypto_template * tmpls,int count)543 int crypto_register_templates(struct crypto_template *tmpls, int count)
544 {
545 int i, err;
546
547 for (i = 0; i < count; i++) {
548 err = crypto_register_template(&tmpls[i]);
549 if (err)
550 goto out;
551 }
552 return 0;
553
554 out:
555 for (--i; i >= 0; --i)
556 crypto_unregister_template(&tmpls[i]);
557 return err;
558 }
559 EXPORT_SYMBOL_GPL(crypto_register_templates);
560
crypto_unregister_template(struct crypto_template * tmpl)561 void crypto_unregister_template(struct crypto_template *tmpl)
562 {
563 struct crypto_instance *inst;
564 struct hlist_node *n;
565 struct hlist_head *list;
566 LIST_HEAD(users);
567
568 down_write(&crypto_alg_sem);
569
570 BUG_ON(list_empty(&tmpl->list));
571 list_del_init(&tmpl->list);
572
573 list = &tmpl->instances;
574 hlist_for_each_entry(inst, list, list) {
575 int err = crypto_remove_alg(&inst->alg, &users);
576
577 BUG_ON(err);
578 }
579
580 up_write(&crypto_alg_sem);
581
582 hlist_for_each_entry_safe(inst, n, list, list) {
583 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
584 crypto_free_instance(inst);
585 }
586 crypto_remove_final(&users);
587 }
588 EXPORT_SYMBOL_GPL(crypto_unregister_template);
589
crypto_unregister_templates(struct crypto_template * tmpls,int count)590 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
591 {
592 int i;
593
594 for (i = count - 1; i >= 0; --i)
595 crypto_unregister_template(&tmpls[i]);
596 }
597 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
598
__crypto_lookup_template(const char * name)599 static struct crypto_template *__crypto_lookup_template(const char *name)
600 {
601 struct crypto_template *q, *tmpl = NULL;
602
603 down_read(&crypto_alg_sem);
604 list_for_each_entry(q, &crypto_template_list, list) {
605 if (strcmp(q->name, name))
606 continue;
607 if (unlikely(!crypto_tmpl_get(q)))
608 continue;
609
610 tmpl = q;
611 break;
612 }
613 up_read(&crypto_alg_sem);
614
615 return tmpl;
616 }
617
crypto_lookup_template(const char * name)618 struct crypto_template *crypto_lookup_template(const char *name)
619 {
620 return try_then_request_module(__crypto_lookup_template(name),
621 "crypto-%s", name);
622 }
623 EXPORT_SYMBOL_GPL(crypto_lookup_template);
624
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)625 int crypto_register_instance(struct crypto_template *tmpl,
626 struct crypto_instance *inst)
627 {
628 struct crypto_larval *larval;
629 struct crypto_spawn *spawn;
630 LIST_HEAD(algs_to_put);
631 int err;
632
633 err = crypto_check_alg(&inst->alg);
634 if (err)
635 return err;
636
637 inst->alg.cra_module = tmpl->module;
638 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
639
640 down_write(&crypto_alg_sem);
641
642 larval = ERR_PTR(-EAGAIN);
643 for (spawn = inst->spawns; spawn;) {
644 struct crypto_spawn *next;
645
646 if (spawn->dead)
647 goto unlock;
648
649 next = spawn->next;
650 spawn->inst = inst;
651 spawn->registered = true;
652
653 crypto_mod_put(spawn->alg);
654
655 spawn = next;
656 }
657
658 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
659 if (IS_ERR(larval))
660 goto unlock;
661 else if (larval)
662 larval->test_started = true;
663
664 hlist_add_head(&inst->list, &tmpl->instances);
665 inst->tmpl = tmpl;
666
667 unlock:
668 up_write(&crypto_alg_sem);
669
670 if (IS_ERR(larval))
671 return PTR_ERR(larval);
672 if (larval)
673 crypto_wait_for_test(larval);
674 crypto_remove_final(&algs_to_put);
675 return 0;
676 }
677 EXPORT_SYMBOL_GPL(crypto_register_instance);
678
crypto_unregister_instance(struct crypto_instance * inst)679 void crypto_unregister_instance(struct crypto_instance *inst)
680 {
681 LIST_HEAD(list);
682
683 down_write(&crypto_alg_sem);
684
685 crypto_remove_spawns(&inst->alg, &list, NULL);
686 crypto_remove_instance(inst, &list);
687
688 up_write(&crypto_alg_sem);
689
690 crypto_remove_final(&list);
691 }
692 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
693
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)694 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
695 const char *name, u32 type, u32 mask)
696 {
697 struct crypto_alg *alg;
698 int err = -EAGAIN;
699
700 if (WARN_ON_ONCE(inst == NULL))
701 return -EINVAL;
702
703 /* Allow the result of crypto_attr_alg_name() to be passed directly */
704 if (IS_ERR(name))
705 return PTR_ERR(name);
706
707 alg = crypto_find_alg(name, spawn->frontend, type, mask);
708 if (IS_ERR(alg))
709 return PTR_ERR(alg);
710
711 down_write(&crypto_alg_sem);
712 if (!crypto_is_moribund(alg)) {
713 list_add(&spawn->list, &alg->cra_users);
714 spawn->alg = alg;
715 spawn->mask = mask;
716 spawn->next = inst->spawns;
717 inst->spawns = spawn;
718 inst->alg.cra_flags |=
719 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
720 err = 0;
721 }
722 up_write(&crypto_alg_sem);
723 if (err)
724 crypto_mod_put(alg);
725 return err;
726 }
727 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
728
crypto_drop_spawn(struct crypto_spawn * spawn)729 void crypto_drop_spawn(struct crypto_spawn *spawn)
730 {
731 if (!spawn->alg) /* not yet initialized? */
732 return;
733
734 down_write(&crypto_alg_sem);
735 if (!spawn->dead)
736 list_del(&spawn->list);
737 up_write(&crypto_alg_sem);
738
739 if (!spawn->registered)
740 crypto_mod_put(spawn->alg);
741 }
742 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
743
crypto_spawn_alg(struct crypto_spawn * spawn)744 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
745 {
746 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
747 struct crypto_alg *target;
748 bool shoot = false;
749
750 down_read(&crypto_alg_sem);
751 if (!spawn->dead) {
752 alg = spawn->alg;
753 if (!crypto_mod_get(alg)) {
754 target = crypto_alg_get(alg);
755 shoot = true;
756 alg = ERR_PTR(-EAGAIN);
757 }
758 }
759 up_read(&crypto_alg_sem);
760
761 if (shoot) {
762 crypto_shoot_alg(target);
763 crypto_alg_put(target);
764 }
765
766 return alg;
767 }
768
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)769 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
770 u32 mask)
771 {
772 struct crypto_alg *alg;
773 struct crypto_tfm *tfm;
774
775 alg = crypto_spawn_alg(spawn);
776 if (IS_ERR(alg))
777 return ERR_CAST(alg);
778
779 tfm = ERR_PTR(-EINVAL);
780 if (unlikely((alg->cra_flags ^ type) & mask))
781 goto out_put_alg;
782
783 tfm = __crypto_alloc_tfm(alg, type, mask);
784 if (IS_ERR(tfm))
785 goto out_put_alg;
786
787 return tfm;
788
789 out_put_alg:
790 crypto_mod_put(alg);
791 return tfm;
792 }
793 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
794
crypto_spawn_tfm2(struct crypto_spawn * spawn)795 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
796 {
797 struct crypto_alg *alg;
798 struct crypto_tfm *tfm;
799
800 alg = crypto_spawn_alg(spawn);
801 if (IS_ERR(alg))
802 return ERR_CAST(alg);
803
804 tfm = crypto_create_tfm(alg, spawn->frontend);
805 if (IS_ERR(tfm))
806 goto out_put_alg;
807
808 return tfm;
809
810 out_put_alg:
811 crypto_mod_put(alg);
812 return tfm;
813 }
814 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
815
crypto_register_notifier(struct notifier_block * nb)816 int crypto_register_notifier(struct notifier_block *nb)
817 {
818 return blocking_notifier_chain_register(&crypto_chain, nb);
819 }
820 EXPORT_SYMBOL_GPL(crypto_register_notifier);
821
crypto_unregister_notifier(struct notifier_block * nb)822 int crypto_unregister_notifier(struct notifier_block *nb)
823 {
824 return blocking_notifier_chain_unregister(&crypto_chain, nb);
825 }
826 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
827
crypto_get_attr_type(struct rtattr ** tb)828 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
829 {
830 struct rtattr *rta = tb[0];
831 struct crypto_attr_type *algt;
832
833 if (!rta)
834 return ERR_PTR(-ENOENT);
835 if (RTA_PAYLOAD(rta) < sizeof(*algt))
836 return ERR_PTR(-EINVAL);
837 if (rta->rta_type != CRYPTOA_TYPE)
838 return ERR_PTR(-EINVAL);
839
840 algt = RTA_DATA(rta);
841
842 return algt;
843 }
844 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
845
846 /**
847 * crypto_check_attr_type() - check algorithm type and compute inherited mask
848 * @tb: the template parameters
849 * @type: the algorithm type the template would be instantiated as
850 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
851 * to restrict the flags of any inner algorithms
852 *
853 * Validate that the algorithm type the user requested is compatible with the
854 * one the template would actually be instantiated as. E.g., if the user is
855 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
856 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
857 *
858 * Also compute the mask to use to restrict the flags of any inner algorithms.
859 *
860 * Return: 0 on success; -errno on failure
861 */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)862 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
863 {
864 struct crypto_attr_type *algt;
865
866 algt = crypto_get_attr_type(tb);
867 if (IS_ERR(algt))
868 return PTR_ERR(algt);
869
870 if ((algt->type ^ type) & algt->mask)
871 return -EINVAL;
872
873 *mask_ret = crypto_algt_inherited_mask(algt);
874 return 0;
875 }
876 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
877
crypto_attr_alg_name(struct rtattr * rta)878 const char *crypto_attr_alg_name(struct rtattr *rta)
879 {
880 struct crypto_attr_alg *alga;
881
882 if (!rta)
883 return ERR_PTR(-ENOENT);
884 if (RTA_PAYLOAD(rta) < sizeof(*alga))
885 return ERR_PTR(-EINVAL);
886 if (rta->rta_type != CRYPTOA_ALG)
887 return ERR_PTR(-EINVAL);
888
889 alga = RTA_DATA(rta);
890 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
891
892 return alga->name;
893 }
894 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
895
crypto_attr_u32(struct rtattr * rta,u32 * num)896 int crypto_attr_u32(struct rtattr *rta, u32 *num)
897 {
898 struct crypto_attr_u32 *nu32;
899
900 if (!rta)
901 return -ENOENT;
902 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
903 return -EINVAL;
904 if (rta->rta_type != CRYPTOA_U32)
905 return -EINVAL;
906
907 nu32 = RTA_DATA(rta);
908 *num = nu32->num;
909
910 return 0;
911 }
912 EXPORT_SYMBOL_GPL(crypto_attr_u32);
913
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)914 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
915 struct crypto_alg *alg)
916 {
917 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
918 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
919 return -ENAMETOOLONG;
920
921 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
922 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
923 return -ENAMETOOLONG;
924
925 return 0;
926 }
927 EXPORT_SYMBOL_GPL(crypto_inst_setname);
928
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)929 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
930 {
931 INIT_LIST_HEAD(&queue->list);
932 queue->backlog = &queue->list;
933 queue->qlen = 0;
934 queue->max_qlen = max_qlen;
935 }
936 EXPORT_SYMBOL_GPL(crypto_init_queue);
937
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)938 int crypto_enqueue_request(struct crypto_queue *queue,
939 struct crypto_async_request *request)
940 {
941 int err = -EINPROGRESS;
942
943 if (unlikely(queue->qlen >= queue->max_qlen)) {
944 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
945 err = -ENOSPC;
946 goto out;
947 }
948 err = -EBUSY;
949 if (queue->backlog == &queue->list)
950 queue->backlog = &request->list;
951 }
952
953 queue->qlen++;
954 list_add_tail(&request->list, &queue->list);
955
956 out:
957 return err;
958 }
959 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
960
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)961 void crypto_enqueue_request_head(struct crypto_queue *queue,
962 struct crypto_async_request *request)
963 {
964 queue->qlen++;
965 list_add(&request->list, &queue->list);
966 }
967 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
968
crypto_dequeue_request(struct crypto_queue * queue)969 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
970 {
971 struct list_head *request;
972
973 if (unlikely(!queue->qlen))
974 return NULL;
975
976 queue->qlen--;
977
978 if (queue->backlog != &queue->list)
979 queue->backlog = queue->backlog->next;
980
981 request = queue->list.next;
982 list_del(request);
983
984 return list_entry(request, struct crypto_async_request, list);
985 }
986 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
987
crypto_inc_byte(u8 * a,unsigned int size)988 static inline void crypto_inc_byte(u8 *a, unsigned int size)
989 {
990 u8 *b = (a + size);
991 u8 c;
992
993 for (; size; size--) {
994 c = *--b + 1;
995 *b = c;
996 if (c)
997 break;
998 }
999 }
1000
crypto_inc(u8 * a,unsigned int size)1001 void crypto_inc(u8 *a, unsigned int size)
1002 {
1003 __be32 *b = (__be32 *)(a + size);
1004 u32 c;
1005
1006 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1007 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1008 for (; size >= 4; size -= 4) {
1009 c = be32_to_cpu(*--b) + 1;
1010 *b = cpu_to_be32(c);
1011 if (likely(c))
1012 return;
1013 }
1014
1015 crypto_inc_byte(a, size);
1016 }
1017 EXPORT_SYMBOL_GPL(crypto_inc);
1018
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)1019 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
1020 {
1021 int relalign = 0;
1022
1023 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1024 int size = sizeof(unsigned long);
1025 int d = (((unsigned long)dst ^ (unsigned long)src1) |
1026 ((unsigned long)dst ^ (unsigned long)src2)) &
1027 (size - 1);
1028
1029 relalign = d ? 1 << __ffs(d) : size;
1030
1031 /*
1032 * If we care about alignment, process as many bytes as
1033 * needed to advance dst and src to values whose alignments
1034 * equal their relative alignment. This will allow us to
1035 * process the remainder of the input using optimal strides.
1036 */
1037 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1038 *dst++ = *src1++ ^ *src2++;
1039 len--;
1040 }
1041 }
1042
1043 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1044 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1045 dst += 8;
1046 src1 += 8;
1047 src2 += 8;
1048 len -= 8;
1049 }
1050
1051 while (len >= 4 && !(relalign & 3)) {
1052 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1053 dst += 4;
1054 src1 += 4;
1055 src2 += 4;
1056 len -= 4;
1057 }
1058
1059 while (len >= 2 && !(relalign & 1)) {
1060 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1061 dst += 2;
1062 src1 += 2;
1063 src2 += 2;
1064 len -= 2;
1065 }
1066
1067 while (len--)
1068 *dst++ = *src1++ ^ *src2++;
1069 }
1070 EXPORT_SYMBOL_GPL(__crypto_xor);
1071
crypto_alg_extsize(struct crypto_alg * alg)1072 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1073 {
1074 return alg->cra_ctxsize +
1075 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1076 }
1077 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1078
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1079 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1080 u32 type, u32 mask)
1081 {
1082 int ret = 0;
1083 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1084
1085 if (!IS_ERR(alg)) {
1086 crypto_mod_put(alg);
1087 ret = 1;
1088 }
1089
1090 return ret;
1091 }
1092 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1093
1094 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1095 void crypto_stats_init(struct crypto_alg *alg)
1096 {
1097 memset(&alg->stats, 0, sizeof(alg->stats));
1098 }
1099 EXPORT_SYMBOL_GPL(crypto_stats_init);
1100
crypto_stats_get(struct crypto_alg * alg)1101 void crypto_stats_get(struct crypto_alg *alg)
1102 {
1103 crypto_alg_get(alg);
1104 }
1105 EXPORT_SYMBOL_GPL(crypto_stats_get);
1106
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1107 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1108 int ret)
1109 {
1110 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1111 atomic64_inc(&alg->stats.aead.err_cnt);
1112 } else {
1113 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1114 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1115 }
1116 crypto_alg_put(alg);
1117 }
1118 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1119
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1120 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1121 int ret)
1122 {
1123 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1124 atomic64_inc(&alg->stats.aead.err_cnt);
1125 } else {
1126 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1127 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1128 }
1129 crypto_alg_put(alg);
1130 }
1131 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1132
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1133 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1134 struct crypto_alg *alg)
1135 {
1136 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1137 atomic64_inc(&alg->stats.akcipher.err_cnt);
1138 } else {
1139 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1140 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1141 }
1142 crypto_alg_put(alg);
1143 }
1144 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1145
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1146 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1147 struct crypto_alg *alg)
1148 {
1149 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1150 atomic64_inc(&alg->stats.akcipher.err_cnt);
1151 } else {
1152 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1153 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1154 }
1155 crypto_alg_put(alg);
1156 }
1157 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1158
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1159 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1160 {
1161 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1162 atomic64_inc(&alg->stats.akcipher.err_cnt);
1163 else
1164 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1165 crypto_alg_put(alg);
1166 }
1167 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1168
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1169 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1170 {
1171 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1172 atomic64_inc(&alg->stats.akcipher.err_cnt);
1173 else
1174 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1175 crypto_alg_put(alg);
1176 }
1177 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1178
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1179 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1180 {
1181 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1182 atomic64_inc(&alg->stats.compress.err_cnt);
1183 } else {
1184 atomic64_inc(&alg->stats.compress.compress_cnt);
1185 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1186 }
1187 crypto_alg_put(alg);
1188 }
1189 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1190
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1191 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1192 {
1193 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1194 atomic64_inc(&alg->stats.compress.err_cnt);
1195 } else {
1196 atomic64_inc(&alg->stats.compress.decompress_cnt);
1197 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1198 }
1199 crypto_alg_put(alg);
1200 }
1201 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1202
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1203 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1204 struct crypto_alg *alg)
1205 {
1206 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1207 atomic64_inc(&alg->stats.hash.err_cnt);
1208 else
1209 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1210 crypto_alg_put(alg);
1211 }
1212 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1213
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1214 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1215 struct crypto_alg *alg)
1216 {
1217 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1218 atomic64_inc(&alg->stats.hash.err_cnt);
1219 } else {
1220 atomic64_inc(&alg->stats.hash.hash_cnt);
1221 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1222 }
1223 crypto_alg_put(alg);
1224 }
1225 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1226
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1227 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1228 {
1229 if (ret)
1230 atomic64_inc(&alg->stats.kpp.err_cnt);
1231 else
1232 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1233 crypto_alg_put(alg);
1234 }
1235 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1236
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1237 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1238 {
1239 if (ret)
1240 atomic64_inc(&alg->stats.kpp.err_cnt);
1241 else
1242 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1243 crypto_alg_put(alg);
1244 }
1245 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1246
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1247 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1248 {
1249 if (ret)
1250 atomic64_inc(&alg->stats.kpp.err_cnt);
1251 else
1252 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1253 crypto_alg_put(alg);
1254 }
1255 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1256
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1257 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1258 {
1259 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1260 atomic64_inc(&alg->stats.rng.err_cnt);
1261 else
1262 atomic64_inc(&alg->stats.rng.seed_cnt);
1263 crypto_alg_put(alg);
1264 }
1265 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1266
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1267 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1268 int ret)
1269 {
1270 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1271 atomic64_inc(&alg->stats.rng.err_cnt);
1272 } else {
1273 atomic64_inc(&alg->stats.rng.generate_cnt);
1274 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1275 }
1276 crypto_alg_put(alg);
1277 }
1278 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1279
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1280 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1281 struct crypto_alg *alg)
1282 {
1283 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1284 atomic64_inc(&alg->stats.cipher.err_cnt);
1285 } else {
1286 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1287 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1288 }
1289 crypto_alg_put(alg);
1290 }
1291 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1292
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1293 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1294 struct crypto_alg *alg)
1295 {
1296 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1297 atomic64_inc(&alg->stats.cipher.err_cnt);
1298 } else {
1299 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1300 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1301 }
1302 crypto_alg_put(alg);
1303 }
1304 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1305 #endif
1306
crypto_start_tests(void)1307 static void __init crypto_start_tests(void)
1308 {
1309 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1310 return;
1311
1312 for (;;) {
1313 struct crypto_larval *larval = NULL;
1314 struct crypto_alg *q;
1315
1316 down_write(&crypto_alg_sem);
1317
1318 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1319 struct crypto_larval *l;
1320
1321 if (!crypto_is_larval(q))
1322 continue;
1323
1324 l = (void *)q;
1325
1326 if (!crypto_is_test_larval(l))
1327 continue;
1328
1329 if (l->test_started)
1330 continue;
1331
1332 l->test_started = true;
1333 larval = l;
1334 break;
1335 }
1336
1337 up_write(&crypto_alg_sem);
1338
1339 if (!larval)
1340 break;
1341
1342 crypto_wait_for_test(larval);
1343 }
1344
1345 set_crypto_boot_test_finished();
1346 }
1347
crypto_algapi_init(void)1348 static int __init crypto_algapi_init(void)
1349 {
1350 crypto_init_proc();
1351 crypto_start_tests();
1352 return 0;
1353 }
1354
crypto_algapi_exit(void)1355 static void __exit crypto_algapi_exit(void)
1356 {
1357 crypto_exit_proc();
1358 }
1359
1360 /*
1361 * We run this at late_initcall so that all the built-in algorithms
1362 * have had a chance to register themselves first.
1363 */
1364 late_initcall(crypto_algapi_init);
1365 module_exit(crypto_algapi_exit);
1366
1367 MODULE_LICENSE("GPL");
1368 MODULE_DESCRIPTION("Cryptographic algorithms API");
1369 MODULE_SOFTDEP("pre: cryptomgr");
1370