1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19
20 #include "internal.h"
21
22 static LIST_HEAD(crypto_template_list);
23
crypto_check_module_sig(struct module * mod)24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
28 module_name(mod));
29 }
30
crypto_check_alg(struct crypto_alg * alg)31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 crypto_check_module_sig(alg->cra_module);
34
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 return -EINVAL;
44
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
58 if (alg->cra_priority < 0)
59 return -EINVAL;
60
61 refcount_set(&alg->cra_refcnt, 1);
62
63 return 0;
64 }
65
crypto_free_instance(struct crypto_instance * inst)66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 if (!inst->alg.cra_type->free) {
69 inst->tmpl->free(inst);
70 return;
71 }
72
73 inst->alg.cra_type->free(inst);
74 }
75
crypto_destroy_instance(struct crypto_alg * alg)76 static void crypto_destroy_instance(struct crypto_alg *alg)
77 {
78 struct crypto_instance *inst = (void *)alg;
79 struct crypto_template *tmpl = inst->tmpl;
80
81 crypto_free_instance(inst);
82 crypto_tmpl_put(tmpl);
83 }
84
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)85 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
86 struct list_head *stack,
87 struct list_head *top,
88 struct list_head *secondary_spawns)
89 {
90 struct crypto_spawn *spawn, *n;
91
92 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
93 if (!spawn)
94 return NULL;
95
96 n = list_next_entry(spawn, list);
97
98 if (spawn->alg && &n->list != stack && !n->alg)
99 n->alg = (n->list.next == stack) ? alg :
100 &list_next_entry(n, list)->inst->alg;
101
102 list_move(&spawn->list, secondary_spawns);
103
104 return &n->list == stack ? top : &n->inst->alg.cra_users;
105 }
106
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)107 static void crypto_remove_instance(struct crypto_instance *inst,
108 struct list_head *list)
109 {
110 struct crypto_template *tmpl = inst->tmpl;
111
112 if (crypto_is_dead(&inst->alg))
113 return;
114
115 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
116 if (hlist_unhashed(&inst->list))
117 return;
118
119 if (!tmpl || !crypto_tmpl_get(tmpl))
120 return;
121
122 list_move(&inst->alg.cra_list, list);
123 hlist_del(&inst->list);
124 inst->alg.cra_destroy = crypto_destroy_instance;
125
126 BUG_ON(!list_empty(&inst->alg.cra_users));
127 }
128
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)129 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
130 struct crypto_alg *nalg)
131 {
132 u32 new_type = (nalg ?: alg)->cra_flags;
133 struct crypto_spawn *spawn, *n;
134 LIST_HEAD(secondary_spawns);
135 struct list_head *spawns;
136 LIST_HEAD(stack);
137 LIST_HEAD(top);
138
139 spawns = &alg->cra_users;
140 list_for_each_entry_safe(spawn, n, spawns, list) {
141 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
142 continue;
143
144 list_move(&spawn->list, &top);
145 }
146
147 spawns = ⊤
148 do {
149 while (!list_empty(spawns)) {
150 struct crypto_instance *inst;
151
152 spawn = list_first_entry(spawns, struct crypto_spawn,
153 list);
154 inst = spawn->inst;
155
156 BUG_ON(&inst->alg == alg);
157
158 list_move(&spawn->list, &stack);
159
160 if (&inst->alg == nalg)
161 break;
162
163 spawn->alg = NULL;
164 spawns = &inst->alg.cra_users;
165
166 /*
167 * We may encounter an unregistered instance here, since
168 * an instance's spawns are set up prior to the instance
169 * being registered. An unregistered instance will have
170 * NULL ->cra_users.next, since ->cra_users isn't
171 * properly initialized until registration. But an
172 * unregistered instance cannot have any users, so treat
173 * it the same as ->cra_users being empty.
174 */
175 if (spawns->next == NULL)
176 break;
177 }
178 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
179 &secondary_spawns)));
180
181 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
182 if (spawn->alg)
183 list_move(&spawn->list, &spawn->alg->cra_users);
184 else
185 crypto_remove_instance(spawn->inst, list);
186 }
187 }
188 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
189
__crypto_register_alg(struct crypto_alg * alg)190 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
191 {
192 struct crypto_alg *q;
193 struct crypto_larval *larval;
194 int ret = -EAGAIN;
195
196 if (crypto_is_dead(alg))
197 goto err;
198
199 INIT_LIST_HEAD(&alg->cra_users);
200
201 /* No cheating! */
202 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
203
204 ret = -EEXIST;
205
206 list_for_each_entry(q, &crypto_alg_list, cra_list) {
207 if (q == alg)
208 goto err;
209
210 if (crypto_is_moribund(q))
211 continue;
212
213 if (crypto_is_larval(q)) {
214 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
215 goto err;
216 continue;
217 }
218
219 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
220 !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
221 !strcmp(q->cra_name, alg->cra_driver_name))
222 goto err;
223 }
224
225 larval = crypto_larval_alloc(alg->cra_name,
226 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
227 if (IS_ERR(larval))
228 goto out;
229
230 ret = -ENOENT;
231 larval->adult = crypto_mod_get(alg);
232 if (!larval->adult)
233 goto free_larval;
234
235 refcount_set(&larval->alg.cra_refcnt, 1);
236 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
237 CRYPTO_MAX_ALG_NAME);
238 larval->alg.cra_priority = alg->cra_priority;
239
240 list_add(&alg->cra_list, &crypto_alg_list);
241 list_add(&larval->alg.cra_list, &crypto_alg_list);
242
243 crypto_stats_init(alg);
244
245 out:
246 return larval;
247
248 free_larval:
249 kfree(larval);
250 err:
251 larval = ERR_PTR(ret);
252 goto out;
253 }
254
crypto_alg_tested(const char * name,int err)255 void crypto_alg_tested(const char *name, int err)
256 {
257 struct crypto_larval *test;
258 struct crypto_alg *alg;
259 struct crypto_alg *q;
260 LIST_HEAD(list);
261 bool best;
262
263 down_write(&crypto_alg_sem);
264 list_for_each_entry(q, &crypto_alg_list, cra_list) {
265 if (crypto_is_moribund(q) || !crypto_is_larval(q))
266 continue;
267
268 test = (struct crypto_larval *)q;
269
270 if (!strcmp(q->cra_driver_name, name))
271 goto found;
272 }
273
274 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
275 goto unlock;
276
277 found:
278 q->cra_flags |= CRYPTO_ALG_DEAD;
279 alg = test->adult;
280 if (err || list_empty(&alg->cra_list))
281 goto complete;
282
283 alg->cra_flags |= CRYPTO_ALG_TESTED;
284
285 /* Only satisfy larval waiters if we are the best. */
286 best = true;
287 list_for_each_entry(q, &crypto_alg_list, cra_list) {
288 if (crypto_is_moribund(q) || !crypto_is_larval(q))
289 continue;
290
291 if (strcmp(alg->cra_name, q->cra_name))
292 continue;
293
294 if (q->cra_priority > alg->cra_priority) {
295 best = false;
296 break;
297 }
298 }
299
300 list_for_each_entry(q, &crypto_alg_list, cra_list) {
301 if (q == alg)
302 continue;
303
304 if (crypto_is_moribund(q))
305 continue;
306
307 if (crypto_is_larval(q)) {
308 struct crypto_larval *larval = (void *)q;
309
310 /*
311 * Check to see if either our generic name or
312 * specific name can satisfy the name requested
313 * by the larval entry q.
314 */
315 if (strcmp(alg->cra_name, q->cra_name) &&
316 strcmp(alg->cra_driver_name, q->cra_name))
317 continue;
318
319 if (larval->adult)
320 continue;
321 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
322 continue;
323
324 if (best && crypto_mod_get(alg))
325 larval->adult = alg;
326 else
327 larval->adult = ERR_PTR(-EAGAIN);
328
329 continue;
330 }
331
332 if (strcmp(alg->cra_name, q->cra_name))
333 continue;
334
335 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
336 q->cra_priority > alg->cra_priority)
337 continue;
338
339 crypto_remove_spawns(q, &list, alg);
340 }
341
342 complete:
343 complete_all(&test->completion);
344
345 unlock:
346 up_write(&crypto_alg_sem);
347
348 crypto_remove_final(&list);
349 }
350 EXPORT_SYMBOL_GPL(crypto_alg_tested);
351
crypto_remove_final(struct list_head * list)352 void crypto_remove_final(struct list_head *list)
353 {
354 struct crypto_alg *alg;
355 struct crypto_alg *n;
356
357 list_for_each_entry_safe(alg, n, list, cra_list) {
358 list_del_init(&alg->cra_list);
359 crypto_alg_put(alg);
360 }
361 }
362 EXPORT_SYMBOL_GPL(crypto_remove_final);
363
crypto_wait_for_test(struct crypto_larval * larval)364 static void crypto_wait_for_test(struct crypto_larval *larval)
365 {
366 int err;
367
368 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
369 if (err != NOTIFY_STOP) {
370 if (WARN_ON(err != NOTIFY_DONE))
371 goto out;
372 crypto_alg_tested(larval->alg.cra_driver_name, 0);
373 }
374
375 err = wait_for_completion_killable(&larval->completion);
376 WARN_ON(err);
377 if (!err)
378 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
379
380 out:
381 crypto_larval_kill(&larval->alg);
382 }
383
crypto_register_alg(struct crypto_alg * alg)384 int crypto_register_alg(struct crypto_alg *alg)
385 {
386 struct crypto_larval *larval;
387 int err;
388
389 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
390 err = crypto_check_alg(alg);
391 if (err)
392 return err;
393
394 down_write(&crypto_alg_sem);
395 larval = __crypto_register_alg(alg);
396 up_write(&crypto_alg_sem);
397
398 if (IS_ERR(larval))
399 return PTR_ERR(larval);
400
401 crypto_wait_for_test(larval);
402 return 0;
403 }
404 EXPORT_SYMBOL_GPL(crypto_register_alg);
405
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)406 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
407 {
408 if (unlikely(list_empty(&alg->cra_list)))
409 return -ENOENT;
410
411 alg->cra_flags |= CRYPTO_ALG_DEAD;
412
413 list_del_init(&alg->cra_list);
414 crypto_remove_spawns(alg, list, NULL);
415
416 return 0;
417 }
418
crypto_unregister_alg(struct crypto_alg * alg)419 int crypto_unregister_alg(struct crypto_alg *alg)
420 {
421 int ret;
422 LIST_HEAD(list);
423
424 down_write(&crypto_alg_sem);
425 ret = crypto_remove_alg(alg, &list);
426 up_write(&crypto_alg_sem);
427
428 if (ret)
429 return ret;
430
431 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
432 if (alg->cra_destroy)
433 alg->cra_destroy(alg);
434
435 crypto_remove_final(&list);
436 return 0;
437 }
438 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
439
crypto_register_algs(struct crypto_alg * algs,int count)440 int crypto_register_algs(struct crypto_alg *algs, int count)
441 {
442 int i, ret;
443
444 for (i = 0; i < count; i++) {
445 ret = crypto_register_alg(&algs[i]);
446 if (ret)
447 goto err;
448 }
449
450 return 0;
451
452 err:
453 for (--i; i >= 0; --i)
454 crypto_unregister_alg(&algs[i]);
455
456 return ret;
457 }
458 EXPORT_SYMBOL_GPL(crypto_register_algs);
459
crypto_unregister_algs(struct crypto_alg * algs,int count)460 int crypto_unregister_algs(struct crypto_alg *algs, int count)
461 {
462 int i, ret;
463
464 for (i = 0; i < count; i++) {
465 ret = crypto_unregister_alg(&algs[i]);
466 if (ret)
467 pr_err("Failed to unregister %s %s: %d\n",
468 algs[i].cra_driver_name, algs[i].cra_name, ret);
469 }
470
471 return 0;
472 }
473 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
474
crypto_register_template(struct crypto_template * tmpl)475 int crypto_register_template(struct crypto_template *tmpl)
476 {
477 struct crypto_template *q;
478 int err = -EEXIST;
479
480 down_write(&crypto_alg_sem);
481
482 crypto_check_module_sig(tmpl->module);
483
484 list_for_each_entry(q, &crypto_template_list, list) {
485 if (q == tmpl)
486 goto out;
487 }
488
489 list_add(&tmpl->list, &crypto_template_list);
490 err = 0;
491 out:
492 up_write(&crypto_alg_sem);
493 return err;
494 }
495 EXPORT_SYMBOL_GPL(crypto_register_template);
496
crypto_register_templates(struct crypto_template * tmpls,int count)497 int crypto_register_templates(struct crypto_template *tmpls, int count)
498 {
499 int i, err;
500
501 for (i = 0; i < count; i++) {
502 err = crypto_register_template(&tmpls[i]);
503 if (err)
504 goto out;
505 }
506 return 0;
507
508 out:
509 for (--i; i >= 0; --i)
510 crypto_unregister_template(&tmpls[i]);
511 return err;
512 }
513 EXPORT_SYMBOL_GPL(crypto_register_templates);
514
crypto_unregister_template(struct crypto_template * tmpl)515 void crypto_unregister_template(struct crypto_template *tmpl)
516 {
517 struct crypto_instance *inst;
518 struct hlist_node *n;
519 struct hlist_head *list;
520 LIST_HEAD(users);
521
522 down_write(&crypto_alg_sem);
523
524 BUG_ON(list_empty(&tmpl->list));
525 list_del_init(&tmpl->list);
526
527 list = &tmpl->instances;
528 hlist_for_each_entry(inst, list, list) {
529 int err = crypto_remove_alg(&inst->alg, &users);
530
531 BUG_ON(err);
532 }
533
534 up_write(&crypto_alg_sem);
535
536 hlist_for_each_entry_safe(inst, n, list, list) {
537 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
538 crypto_free_instance(inst);
539 }
540 crypto_remove_final(&users);
541 }
542 EXPORT_SYMBOL_GPL(crypto_unregister_template);
543
crypto_unregister_templates(struct crypto_template * tmpls,int count)544 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
545 {
546 int i;
547
548 for (i = count - 1; i >= 0; --i)
549 crypto_unregister_template(&tmpls[i]);
550 }
551 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
552
__crypto_lookup_template(const char * name)553 static struct crypto_template *__crypto_lookup_template(const char *name)
554 {
555 struct crypto_template *q, *tmpl = NULL;
556
557 down_read(&crypto_alg_sem);
558 list_for_each_entry(q, &crypto_template_list, list) {
559 if (strcmp(q->name, name))
560 continue;
561 if (unlikely(!crypto_tmpl_get(q)))
562 continue;
563
564 tmpl = q;
565 break;
566 }
567 up_read(&crypto_alg_sem);
568
569 return tmpl;
570 }
571
crypto_lookup_template(const char * name)572 struct crypto_template *crypto_lookup_template(const char *name)
573 {
574 return try_then_request_module(__crypto_lookup_template(name),
575 "crypto-%s", name);
576 }
577 EXPORT_SYMBOL_GPL(crypto_lookup_template);
578
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)579 int crypto_register_instance(struct crypto_template *tmpl,
580 struct crypto_instance *inst)
581 {
582 struct crypto_larval *larval;
583 int err;
584
585 err = crypto_check_alg(&inst->alg);
586 if (err)
587 return err;
588
589 inst->alg.cra_module = tmpl->module;
590 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
591
592 down_write(&crypto_alg_sem);
593
594 larval = __crypto_register_alg(&inst->alg);
595 if (IS_ERR(larval))
596 goto unlock;
597
598 hlist_add_head(&inst->list, &tmpl->instances);
599 inst->tmpl = tmpl;
600
601 unlock:
602 up_write(&crypto_alg_sem);
603
604 err = PTR_ERR(larval);
605 if (IS_ERR(larval))
606 goto err;
607
608 crypto_wait_for_test(larval);
609 err = 0;
610
611 err:
612 return err;
613 }
614 EXPORT_SYMBOL_GPL(crypto_register_instance);
615
crypto_unregister_instance(struct crypto_instance * inst)616 int crypto_unregister_instance(struct crypto_instance *inst)
617 {
618 LIST_HEAD(list);
619
620 down_write(&crypto_alg_sem);
621
622 crypto_remove_spawns(&inst->alg, &list, NULL);
623 crypto_remove_instance(inst, &list);
624
625 up_write(&crypto_alg_sem);
626
627 crypto_remove_final(&list);
628
629 return 0;
630 }
631 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
632
crypto_init_spawn(struct crypto_spawn * spawn,struct crypto_alg * alg,struct crypto_instance * inst,u32 mask)633 int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
634 struct crypto_instance *inst, u32 mask)
635 {
636 int err = -EAGAIN;
637
638 if (WARN_ON_ONCE(inst == NULL))
639 return -EINVAL;
640
641 spawn->inst = inst;
642 spawn->mask = mask;
643
644 down_write(&crypto_alg_sem);
645 if (!crypto_is_moribund(alg)) {
646 list_add(&spawn->list, &alg->cra_users);
647 spawn->alg = alg;
648 err = 0;
649 }
650 up_write(&crypto_alg_sem);
651
652 return err;
653 }
654 EXPORT_SYMBOL_GPL(crypto_init_spawn);
655
crypto_init_spawn2(struct crypto_spawn * spawn,struct crypto_alg * alg,struct crypto_instance * inst,const struct crypto_type * frontend)656 int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
657 struct crypto_instance *inst,
658 const struct crypto_type *frontend)
659 {
660 int err = -EINVAL;
661
662 if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
663 goto out;
664
665 spawn->frontend = frontend;
666 err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
667
668 out:
669 return err;
670 }
671 EXPORT_SYMBOL_GPL(crypto_init_spawn2);
672
crypto_grab_spawn(struct crypto_spawn * spawn,const char * name,u32 type,u32 mask)673 int crypto_grab_spawn(struct crypto_spawn *spawn, const char *name,
674 u32 type, u32 mask)
675 {
676 struct crypto_alg *alg;
677 int err;
678
679 alg = crypto_find_alg(name, spawn->frontend, type, mask);
680 if (IS_ERR(alg))
681 return PTR_ERR(alg);
682
683 err = crypto_init_spawn(spawn, alg, spawn->inst, mask);
684 crypto_mod_put(alg);
685 return err;
686 }
687 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
688
crypto_drop_spawn(struct crypto_spawn * spawn)689 void crypto_drop_spawn(struct crypto_spawn *spawn)
690 {
691 down_write(&crypto_alg_sem);
692 if (spawn->alg)
693 list_del(&spawn->list);
694 up_write(&crypto_alg_sem);
695 }
696 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
697
crypto_spawn_alg(struct crypto_spawn * spawn)698 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
699 {
700 struct crypto_alg *alg;
701
702 down_read(&crypto_alg_sem);
703 alg = spawn->alg;
704 if (alg && !crypto_mod_get(alg)) {
705 alg->cra_flags |= CRYPTO_ALG_DYING;
706 alg = NULL;
707 }
708 up_read(&crypto_alg_sem);
709
710 return alg ?: ERR_PTR(-EAGAIN);
711 }
712
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)713 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
714 u32 mask)
715 {
716 struct crypto_alg *alg;
717 struct crypto_tfm *tfm;
718
719 alg = crypto_spawn_alg(spawn);
720 if (IS_ERR(alg))
721 return ERR_CAST(alg);
722
723 tfm = ERR_PTR(-EINVAL);
724 if (unlikely((alg->cra_flags ^ type) & mask))
725 goto out_put_alg;
726
727 tfm = __crypto_alloc_tfm(alg, type, mask);
728 if (IS_ERR(tfm))
729 goto out_put_alg;
730
731 return tfm;
732
733 out_put_alg:
734 crypto_mod_put(alg);
735 return tfm;
736 }
737 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
738
crypto_spawn_tfm2(struct crypto_spawn * spawn)739 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
740 {
741 struct crypto_alg *alg;
742 struct crypto_tfm *tfm;
743
744 alg = crypto_spawn_alg(spawn);
745 if (IS_ERR(alg))
746 return ERR_CAST(alg);
747
748 tfm = crypto_create_tfm(alg, spawn->frontend);
749 if (IS_ERR(tfm))
750 goto out_put_alg;
751
752 return tfm;
753
754 out_put_alg:
755 crypto_mod_put(alg);
756 return tfm;
757 }
758 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
759
crypto_register_notifier(struct notifier_block * nb)760 int crypto_register_notifier(struct notifier_block *nb)
761 {
762 return blocking_notifier_chain_register(&crypto_chain, nb);
763 }
764 EXPORT_SYMBOL_GPL(crypto_register_notifier);
765
crypto_unregister_notifier(struct notifier_block * nb)766 int crypto_unregister_notifier(struct notifier_block *nb)
767 {
768 return blocking_notifier_chain_unregister(&crypto_chain, nb);
769 }
770 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
771
crypto_get_attr_type(struct rtattr ** tb)772 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
773 {
774 struct rtattr *rta = tb[0];
775 struct crypto_attr_type *algt;
776
777 if (!rta)
778 return ERR_PTR(-ENOENT);
779 if (RTA_PAYLOAD(rta) < sizeof(*algt))
780 return ERR_PTR(-EINVAL);
781 if (rta->rta_type != CRYPTOA_TYPE)
782 return ERR_PTR(-EINVAL);
783
784 algt = RTA_DATA(rta);
785
786 return algt;
787 }
788 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
789
crypto_check_attr_type(struct rtattr ** tb,u32 type)790 int crypto_check_attr_type(struct rtattr **tb, u32 type)
791 {
792 struct crypto_attr_type *algt;
793
794 algt = crypto_get_attr_type(tb);
795 if (IS_ERR(algt))
796 return PTR_ERR(algt);
797
798 if ((algt->type ^ type) & algt->mask)
799 return -EINVAL;
800
801 return 0;
802 }
803 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
804
crypto_attr_alg_name(struct rtattr * rta)805 const char *crypto_attr_alg_name(struct rtattr *rta)
806 {
807 struct crypto_attr_alg *alga;
808
809 if (!rta)
810 return ERR_PTR(-ENOENT);
811 if (RTA_PAYLOAD(rta) < sizeof(*alga))
812 return ERR_PTR(-EINVAL);
813 if (rta->rta_type != CRYPTOA_ALG)
814 return ERR_PTR(-EINVAL);
815
816 alga = RTA_DATA(rta);
817 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
818
819 return alga->name;
820 }
821 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
822
crypto_attr_alg2(struct rtattr * rta,const struct crypto_type * frontend,u32 type,u32 mask)823 struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
824 const struct crypto_type *frontend,
825 u32 type, u32 mask)
826 {
827 const char *name;
828
829 name = crypto_attr_alg_name(rta);
830 if (IS_ERR(name))
831 return ERR_CAST(name);
832
833 return crypto_find_alg(name, frontend, type, mask);
834 }
835 EXPORT_SYMBOL_GPL(crypto_attr_alg2);
836
crypto_attr_u32(struct rtattr * rta,u32 * num)837 int crypto_attr_u32(struct rtattr *rta, u32 *num)
838 {
839 struct crypto_attr_u32 *nu32;
840
841 if (!rta)
842 return -ENOENT;
843 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
844 return -EINVAL;
845 if (rta->rta_type != CRYPTOA_U32)
846 return -EINVAL;
847
848 nu32 = RTA_DATA(rta);
849 *num = nu32->num;
850
851 return 0;
852 }
853 EXPORT_SYMBOL_GPL(crypto_attr_u32);
854
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)855 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
856 struct crypto_alg *alg)
857 {
858 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
859 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
860 return -ENAMETOOLONG;
861
862 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
863 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
864 return -ENAMETOOLONG;
865
866 return 0;
867 }
868 EXPORT_SYMBOL_GPL(crypto_inst_setname);
869
crypto_alloc_instance(const char * name,struct crypto_alg * alg,unsigned int head)870 void *crypto_alloc_instance(const char *name, struct crypto_alg *alg,
871 unsigned int head)
872 {
873 struct crypto_instance *inst;
874 char *p;
875 int err;
876
877 p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
878 GFP_KERNEL);
879 if (!p)
880 return ERR_PTR(-ENOMEM);
881
882 inst = (void *)(p + head);
883
884 err = crypto_inst_setname(inst, name, alg);
885 if (err)
886 goto err_free_inst;
887
888 return p;
889
890 err_free_inst:
891 kfree(p);
892 return ERR_PTR(err);
893 }
894 EXPORT_SYMBOL_GPL(crypto_alloc_instance);
895
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)896 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
897 {
898 INIT_LIST_HEAD(&queue->list);
899 queue->backlog = &queue->list;
900 queue->qlen = 0;
901 queue->max_qlen = max_qlen;
902 }
903 EXPORT_SYMBOL_GPL(crypto_init_queue);
904
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)905 int crypto_enqueue_request(struct crypto_queue *queue,
906 struct crypto_async_request *request)
907 {
908 int err = -EINPROGRESS;
909
910 if (unlikely(queue->qlen >= queue->max_qlen)) {
911 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
912 err = -ENOSPC;
913 goto out;
914 }
915 err = -EBUSY;
916 if (queue->backlog == &queue->list)
917 queue->backlog = &request->list;
918 }
919
920 queue->qlen++;
921 list_add_tail(&request->list, &queue->list);
922
923 out:
924 return err;
925 }
926 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
927
crypto_dequeue_request(struct crypto_queue * queue)928 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
929 {
930 struct list_head *request;
931
932 if (unlikely(!queue->qlen))
933 return NULL;
934
935 queue->qlen--;
936
937 if (queue->backlog != &queue->list)
938 queue->backlog = queue->backlog->next;
939
940 request = queue->list.next;
941 list_del(request);
942
943 return list_entry(request, struct crypto_async_request, list);
944 }
945 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
946
crypto_inc_byte(u8 * a,unsigned int size)947 static inline void crypto_inc_byte(u8 *a, unsigned int size)
948 {
949 u8 *b = (a + size);
950 u8 c;
951
952 for (; size; size--) {
953 c = *--b + 1;
954 *b = c;
955 if (c)
956 break;
957 }
958 }
959
crypto_inc(u8 * a,unsigned int size)960 void crypto_inc(u8 *a, unsigned int size)
961 {
962 __be32 *b = (__be32 *)(a + size);
963 u32 c;
964
965 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
966 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
967 for (; size >= 4; size -= 4) {
968 c = be32_to_cpu(*--b) + 1;
969 *b = cpu_to_be32(c);
970 if (likely(c))
971 return;
972 }
973
974 crypto_inc_byte(a, size);
975 }
976 EXPORT_SYMBOL_GPL(crypto_inc);
977
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)978 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
979 {
980 int relalign = 0;
981
982 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
983 int size = sizeof(unsigned long);
984 int d = (((unsigned long)dst ^ (unsigned long)src1) |
985 ((unsigned long)dst ^ (unsigned long)src2)) &
986 (size - 1);
987
988 relalign = d ? 1 << __ffs(d) : size;
989
990 /*
991 * If we care about alignment, process as many bytes as
992 * needed to advance dst and src to values whose alignments
993 * equal their relative alignment. This will allow us to
994 * process the remainder of the input using optimal strides.
995 */
996 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
997 *dst++ = *src1++ ^ *src2++;
998 len--;
999 }
1000 }
1001
1002 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1003 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1004 dst += 8;
1005 src1 += 8;
1006 src2 += 8;
1007 len -= 8;
1008 }
1009
1010 while (len >= 4 && !(relalign & 3)) {
1011 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1012 dst += 4;
1013 src1 += 4;
1014 src2 += 4;
1015 len -= 4;
1016 }
1017
1018 while (len >= 2 && !(relalign & 1)) {
1019 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1020 dst += 2;
1021 src1 += 2;
1022 src2 += 2;
1023 len -= 2;
1024 }
1025
1026 while (len--)
1027 *dst++ = *src1++ ^ *src2++;
1028 }
1029 EXPORT_SYMBOL_GPL(__crypto_xor);
1030
crypto_alg_extsize(struct crypto_alg * alg)1031 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1032 {
1033 return alg->cra_ctxsize +
1034 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1035 }
1036 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1037
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1038 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1039 u32 type, u32 mask)
1040 {
1041 int ret = 0;
1042 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1043
1044 if (!IS_ERR(alg)) {
1045 crypto_mod_put(alg);
1046 ret = 1;
1047 }
1048
1049 return ret;
1050 }
1051 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1052
1053 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1054 void crypto_stats_init(struct crypto_alg *alg)
1055 {
1056 memset(&alg->stats, 0, sizeof(alg->stats));
1057 }
1058 EXPORT_SYMBOL_GPL(crypto_stats_init);
1059
crypto_stats_get(struct crypto_alg * alg)1060 void crypto_stats_get(struct crypto_alg *alg)
1061 {
1062 crypto_alg_get(alg);
1063 }
1064 EXPORT_SYMBOL_GPL(crypto_stats_get);
1065
crypto_stats_ablkcipher_encrypt(unsigned int nbytes,int ret,struct crypto_alg * alg)1066 void crypto_stats_ablkcipher_encrypt(unsigned int nbytes, int ret,
1067 struct crypto_alg *alg)
1068 {
1069 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1070 atomic64_inc(&alg->stats.cipher.err_cnt);
1071 } else {
1072 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1073 atomic64_add(nbytes, &alg->stats.cipher.encrypt_tlen);
1074 }
1075 crypto_alg_put(alg);
1076 }
1077 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_encrypt);
1078
crypto_stats_ablkcipher_decrypt(unsigned int nbytes,int ret,struct crypto_alg * alg)1079 void crypto_stats_ablkcipher_decrypt(unsigned int nbytes, int ret,
1080 struct crypto_alg *alg)
1081 {
1082 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1083 atomic64_inc(&alg->stats.cipher.err_cnt);
1084 } else {
1085 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1086 atomic64_add(nbytes, &alg->stats.cipher.decrypt_tlen);
1087 }
1088 crypto_alg_put(alg);
1089 }
1090 EXPORT_SYMBOL_GPL(crypto_stats_ablkcipher_decrypt);
1091
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1092 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1093 int ret)
1094 {
1095 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1096 atomic64_inc(&alg->stats.aead.err_cnt);
1097 } else {
1098 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1099 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1100 }
1101 crypto_alg_put(alg);
1102 }
1103 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1104
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1105 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1106 int ret)
1107 {
1108 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1109 atomic64_inc(&alg->stats.aead.err_cnt);
1110 } else {
1111 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1112 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1113 }
1114 crypto_alg_put(alg);
1115 }
1116 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1117
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1118 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1119 struct crypto_alg *alg)
1120 {
1121 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1122 atomic64_inc(&alg->stats.akcipher.err_cnt);
1123 } else {
1124 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1125 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1126 }
1127 crypto_alg_put(alg);
1128 }
1129 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1130
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1131 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1132 struct crypto_alg *alg)
1133 {
1134 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1135 atomic64_inc(&alg->stats.akcipher.err_cnt);
1136 } else {
1137 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1138 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1139 }
1140 crypto_alg_put(alg);
1141 }
1142 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1143
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1144 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1145 {
1146 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1147 atomic64_inc(&alg->stats.akcipher.err_cnt);
1148 else
1149 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1150 crypto_alg_put(alg);
1151 }
1152 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1153
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1154 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1155 {
1156 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1157 atomic64_inc(&alg->stats.akcipher.err_cnt);
1158 else
1159 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1160 crypto_alg_put(alg);
1161 }
1162 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1163
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1164 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1165 {
1166 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1167 atomic64_inc(&alg->stats.compress.err_cnt);
1168 } else {
1169 atomic64_inc(&alg->stats.compress.compress_cnt);
1170 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1171 }
1172 crypto_alg_put(alg);
1173 }
1174 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1175
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1176 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1177 {
1178 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1179 atomic64_inc(&alg->stats.compress.err_cnt);
1180 } else {
1181 atomic64_inc(&alg->stats.compress.decompress_cnt);
1182 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1183 }
1184 crypto_alg_put(alg);
1185 }
1186 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1187
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1188 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1189 struct crypto_alg *alg)
1190 {
1191 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1192 atomic64_inc(&alg->stats.hash.err_cnt);
1193 else
1194 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1195 crypto_alg_put(alg);
1196 }
1197 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1198
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1199 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1200 struct crypto_alg *alg)
1201 {
1202 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1203 atomic64_inc(&alg->stats.hash.err_cnt);
1204 } else {
1205 atomic64_inc(&alg->stats.hash.hash_cnt);
1206 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1207 }
1208 crypto_alg_put(alg);
1209 }
1210 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1211
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1212 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1213 {
1214 if (ret)
1215 atomic64_inc(&alg->stats.kpp.err_cnt);
1216 else
1217 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1218 crypto_alg_put(alg);
1219 }
1220 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1221
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1222 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1223 {
1224 if (ret)
1225 atomic64_inc(&alg->stats.kpp.err_cnt);
1226 else
1227 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1228 crypto_alg_put(alg);
1229 }
1230 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1231
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1232 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1233 {
1234 if (ret)
1235 atomic64_inc(&alg->stats.kpp.err_cnt);
1236 else
1237 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1238 crypto_alg_put(alg);
1239 }
1240 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1241
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1242 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1243 {
1244 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1245 atomic64_inc(&alg->stats.rng.err_cnt);
1246 else
1247 atomic64_inc(&alg->stats.rng.seed_cnt);
1248 crypto_alg_put(alg);
1249 }
1250 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1251
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1252 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1253 int ret)
1254 {
1255 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1256 atomic64_inc(&alg->stats.rng.err_cnt);
1257 } else {
1258 atomic64_inc(&alg->stats.rng.generate_cnt);
1259 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1260 }
1261 crypto_alg_put(alg);
1262 }
1263 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1264
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1265 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1266 struct crypto_alg *alg)
1267 {
1268 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1269 atomic64_inc(&alg->stats.cipher.err_cnt);
1270 } else {
1271 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1272 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1273 }
1274 crypto_alg_put(alg);
1275 }
1276 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1277
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1278 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1279 struct crypto_alg *alg)
1280 {
1281 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1282 atomic64_inc(&alg->stats.cipher.err_cnt);
1283 } else {
1284 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1285 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1286 }
1287 crypto_alg_put(alg);
1288 }
1289 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1290 #endif
1291
crypto_algapi_init(void)1292 static int __init crypto_algapi_init(void)
1293 {
1294 crypto_init_proc();
1295 return 0;
1296 }
1297
crypto_algapi_exit(void)1298 static void __exit crypto_algapi_exit(void)
1299 {
1300 crypto_exit_proc();
1301 }
1302
1303 module_init(crypto_algapi_init);
1304 module_exit(crypto_algapi_exit);
1305
1306 MODULE_LICENSE("GPL");
1307 MODULE_DESCRIPTION("Cryptographic algorithms API");
1308