1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/algapi.h>
9 #include <linux/err.h>
10 #include <linux/errno.h>
11 #include <linux/fips.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/list.h>
15 #include <linux/module.h>
16 #include <linux/rtnetlink.h>
17 #include <linux/slab.h>
18 #include <linux/string.h>
19
20 #include "internal.h"
21
22 static LIST_HEAD(crypto_template_list);
23
crypto_check_module_sig(struct module * mod)24 static inline void crypto_check_module_sig(struct module *mod)
25 {
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
28 module_name(mod));
29 }
30
crypto_check_alg(struct crypto_alg * alg)31 static int crypto_check_alg(struct crypto_alg *alg)
32 {
33 crypto_check_module_sig(alg->cra_module);
34
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 return -EINVAL;
44
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
58 if (alg->cra_priority < 0)
59 return -EINVAL;
60
61 refcount_set(&alg->cra_refcnt, 1);
62
63 return 0;
64 }
65
crypto_free_instance(struct crypto_instance * inst)66 static void crypto_free_instance(struct crypto_instance *inst)
67 {
68 inst->alg.cra_type->free(inst);
69 }
70
crypto_destroy_instance(struct crypto_alg * alg)71 static void crypto_destroy_instance(struct crypto_alg *alg)
72 {
73 struct crypto_instance *inst = (void *)alg;
74 struct crypto_template *tmpl = inst->tmpl;
75
76 crypto_free_instance(inst);
77 crypto_tmpl_put(tmpl);
78 }
79
80 /*
81 * This function adds a spawn to the list secondary_spawns which
82 * will be used at the end of crypto_remove_spawns to unregister
83 * instances, unless the spawn happens to be one that is depended
84 * on by the new algorithm (nalg in crypto_remove_spawns).
85 *
86 * This function is also responsible for resurrecting any algorithms
87 * in the dependency chain of nalg by unsetting n->dead.
88 */
crypto_more_spawns(struct crypto_alg * alg,struct list_head * stack,struct list_head * top,struct list_head * secondary_spawns)89 static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 struct list_head *stack,
91 struct list_head *top,
92 struct list_head *secondary_spawns)
93 {
94 struct crypto_spawn *spawn, *n;
95
96 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 if (!spawn)
98 return NULL;
99
100 n = list_prev_entry(spawn, list);
101 list_move(&spawn->list, secondary_spawns);
102
103 if (list_is_last(&n->list, stack))
104 return top;
105
106 n = list_next_entry(n, list);
107 if (!spawn->dead)
108 n->dead = false;
109
110 return &n->inst->alg.cra_users;
111 }
112
crypto_remove_instance(struct crypto_instance * inst,struct list_head * list)113 static void crypto_remove_instance(struct crypto_instance *inst,
114 struct list_head *list)
115 {
116 struct crypto_template *tmpl = inst->tmpl;
117
118 if (crypto_is_dead(&inst->alg))
119 return;
120
121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122
123 if (!tmpl || !crypto_tmpl_get(tmpl))
124 return;
125
126 list_move(&inst->alg.cra_list, list);
127 hlist_del(&inst->list);
128 inst->alg.cra_destroy = crypto_destroy_instance;
129
130 BUG_ON(!list_empty(&inst->alg.cra_users));
131 }
132
133 /*
134 * Given an algorithm alg, remove all algorithms that depend on it
135 * through spawns. If nalg is not null, then exempt any algorithms
136 * that is depended on by nalg. This is useful when nalg itself
137 * depends on alg.
138 */
crypto_remove_spawns(struct crypto_alg * alg,struct list_head * list,struct crypto_alg * nalg)139 void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 struct crypto_alg *nalg)
141 {
142 u32 new_type = (nalg ?: alg)->cra_flags;
143 struct crypto_spawn *spawn, *n;
144 LIST_HEAD(secondary_spawns);
145 struct list_head *spawns;
146 LIST_HEAD(stack);
147 LIST_HEAD(top);
148
149 spawns = &alg->cra_users;
150 list_for_each_entry_safe(spawn, n, spawns, list) {
151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152 continue;
153
154 list_move(&spawn->list, &top);
155 }
156
157 /*
158 * Perform a depth-first walk starting from alg through
159 * the cra_users tree. The list stack records the path
160 * from alg to the current spawn.
161 */
162 spawns = ⊤
163 do {
164 while (!list_empty(spawns)) {
165 struct crypto_instance *inst;
166
167 spawn = list_first_entry(spawns, struct crypto_spawn,
168 list);
169 inst = spawn->inst;
170
171 list_move(&spawn->list, &stack);
172 spawn->dead = !spawn->registered || &inst->alg != nalg;
173
174 if (!spawn->registered)
175 break;
176
177 BUG_ON(&inst->alg == alg);
178
179 if (&inst->alg == nalg)
180 break;
181
182 spawns = &inst->alg.cra_users;
183
184 /*
185 * Even if spawn->registered is true, the
186 * instance itself may still be unregistered.
187 * This is because it may have failed during
188 * registration. Therefore we still need to
189 * make the following test.
190 *
191 * We may encounter an unregistered instance here, since
192 * an instance's spawns are set up prior to the instance
193 * being registered. An unregistered instance will have
194 * NULL ->cra_users.next, since ->cra_users isn't
195 * properly initialized until registration. But an
196 * unregistered instance cannot have any users, so treat
197 * it the same as ->cra_users being empty.
198 */
199 if (spawns->next == NULL)
200 break;
201 }
202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 &secondary_spawns)));
204
205 /*
206 * Remove all instances that are marked as dead. Also
207 * complete the resurrection of the others by moving them
208 * back to the cra_users list.
209 */
210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211 if (!spawn->dead)
212 list_move(&spawn->list, &spawn->alg->cra_users);
213 else if (spawn->registered)
214 crypto_remove_instance(spawn->inst, list);
215 }
216 }
217 EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218
__crypto_register_alg(struct crypto_alg * alg)219 static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
220 {
221 struct crypto_alg *q;
222 struct crypto_larval *larval;
223 int ret = -EAGAIN;
224
225 if (crypto_is_dead(alg))
226 goto err;
227
228 INIT_LIST_HEAD(&alg->cra_users);
229
230 /* No cheating! */
231 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
232
233 ret = -EEXIST;
234
235 list_for_each_entry(q, &crypto_alg_list, cra_list) {
236 if (q == alg)
237 goto err;
238
239 if (crypto_is_moribund(q))
240 continue;
241
242 if (crypto_is_larval(q)) {
243 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
244 goto err;
245 continue;
246 }
247
248 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
249 !strcmp(q->cra_name, alg->cra_driver_name))
250 goto err;
251 }
252
253 larval = crypto_larval_alloc(alg->cra_name,
254 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
255 if (IS_ERR(larval))
256 goto out;
257
258 ret = -ENOENT;
259 larval->adult = crypto_mod_get(alg);
260 if (!larval->adult)
261 goto free_larval;
262
263 refcount_set(&larval->alg.cra_refcnt, 1);
264 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
265 CRYPTO_MAX_ALG_NAME);
266 larval->alg.cra_priority = alg->cra_priority;
267
268 list_add(&alg->cra_list, &crypto_alg_list);
269 list_add(&larval->alg.cra_list, &crypto_alg_list);
270
271 crypto_stats_init(alg);
272
273 out:
274 return larval;
275
276 free_larval:
277 kfree(larval);
278 err:
279 larval = ERR_PTR(ret);
280 goto out;
281 }
282
crypto_alg_tested(const char * name,int err)283 void crypto_alg_tested(const char *name, int err)
284 {
285 struct crypto_larval *test;
286 struct crypto_alg *alg;
287 struct crypto_alg *q;
288 LIST_HEAD(list);
289 bool best;
290
291 down_write(&crypto_alg_sem);
292 list_for_each_entry(q, &crypto_alg_list, cra_list) {
293 if (crypto_is_moribund(q) || !crypto_is_larval(q))
294 continue;
295
296 test = (struct crypto_larval *)q;
297
298 if (!strcmp(q->cra_driver_name, name))
299 goto found;
300 }
301
302 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
303 goto unlock;
304
305 found:
306 q->cra_flags |= CRYPTO_ALG_DEAD;
307 alg = test->adult;
308 if (err || list_empty(&alg->cra_list))
309 goto complete;
310
311 alg->cra_flags |= CRYPTO_ALG_TESTED;
312
313 /* Only satisfy larval waiters if we are the best. */
314 best = true;
315 list_for_each_entry(q, &crypto_alg_list, cra_list) {
316 if (crypto_is_moribund(q) || !crypto_is_larval(q))
317 continue;
318
319 if (strcmp(alg->cra_name, q->cra_name))
320 continue;
321
322 if (q->cra_priority > alg->cra_priority) {
323 best = false;
324 break;
325 }
326 }
327
328 list_for_each_entry(q, &crypto_alg_list, cra_list) {
329 if (q == alg)
330 continue;
331
332 if (crypto_is_moribund(q))
333 continue;
334
335 if (crypto_is_larval(q)) {
336 struct crypto_larval *larval = (void *)q;
337
338 /*
339 * Check to see if either our generic name or
340 * specific name can satisfy the name requested
341 * by the larval entry q.
342 */
343 if (strcmp(alg->cra_name, q->cra_name) &&
344 strcmp(alg->cra_driver_name, q->cra_name))
345 continue;
346
347 if (larval->adult)
348 continue;
349 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
350 continue;
351
352 if (best && crypto_mod_get(alg))
353 larval->adult = alg;
354 else
355 larval->adult = ERR_PTR(-EAGAIN);
356
357 continue;
358 }
359
360 if (strcmp(alg->cra_name, q->cra_name))
361 continue;
362
363 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
364 q->cra_priority > alg->cra_priority)
365 continue;
366
367 crypto_remove_spawns(q, &list, alg);
368 }
369
370 complete:
371 complete_all(&test->completion);
372
373 unlock:
374 up_write(&crypto_alg_sem);
375
376 crypto_remove_final(&list);
377 }
378 EXPORT_SYMBOL_GPL(crypto_alg_tested);
379
crypto_remove_final(struct list_head * list)380 void crypto_remove_final(struct list_head *list)
381 {
382 struct crypto_alg *alg;
383 struct crypto_alg *n;
384
385 list_for_each_entry_safe(alg, n, list, cra_list) {
386 list_del_init(&alg->cra_list);
387 crypto_alg_put(alg);
388 }
389 }
390 EXPORT_SYMBOL_GPL(crypto_remove_final);
391
crypto_wait_for_test(struct crypto_larval * larval)392 static void crypto_wait_for_test(struct crypto_larval *larval)
393 {
394 int err;
395
396 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
397 if (err != NOTIFY_STOP) {
398 if (WARN_ON(err != NOTIFY_DONE))
399 goto out;
400 crypto_alg_tested(larval->alg.cra_driver_name, 0);
401 }
402
403 err = wait_for_completion_killable(&larval->completion);
404 WARN_ON(err);
405 if (!err)
406 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
407
408 out:
409 crypto_larval_kill(&larval->alg);
410 }
411
crypto_register_alg(struct crypto_alg * alg)412 int crypto_register_alg(struct crypto_alg *alg)
413 {
414 struct crypto_larval *larval;
415 int err;
416
417 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
418 err = crypto_check_alg(alg);
419 if (err)
420 return err;
421
422 down_write(&crypto_alg_sem);
423 larval = __crypto_register_alg(alg);
424 up_write(&crypto_alg_sem);
425
426 if (IS_ERR(larval))
427 return PTR_ERR(larval);
428
429 crypto_wait_for_test(larval);
430 return 0;
431 }
432 EXPORT_SYMBOL_GPL(crypto_register_alg);
433
crypto_remove_alg(struct crypto_alg * alg,struct list_head * list)434 static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
435 {
436 if (unlikely(list_empty(&alg->cra_list)))
437 return -ENOENT;
438
439 alg->cra_flags |= CRYPTO_ALG_DEAD;
440
441 list_del_init(&alg->cra_list);
442 crypto_remove_spawns(alg, list, NULL);
443
444 return 0;
445 }
446
crypto_unregister_alg(struct crypto_alg * alg)447 void crypto_unregister_alg(struct crypto_alg *alg)
448 {
449 int ret;
450 LIST_HEAD(list);
451
452 down_write(&crypto_alg_sem);
453 ret = crypto_remove_alg(alg, &list);
454 up_write(&crypto_alg_sem);
455
456 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
457 return;
458
459 if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
460 return;
461
462 if (alg->cra_destroy)
463 alg->cra_destroy(alg);
464
465 crypto_remove_final(&list);
466 }
467 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
468
crypto_register_algs(struct crypto_alg * algs,int count)469 int crypto_register_algs(struct crypto_alg *algs, int count)
470 {
471 int i, ret;
472
473 for (i = 0; i < count; i++) {
474 ret = crypto_register_alg(&algs[i]);
475 if (ret)
476 goto err;
477 }
478
479 return 0;
480
481 err:
482 for (--i; i >= 0; --i)
483 crypto_unregister_alg(&algs[i]);
484
485 return ret;
486 }
487 EXPORT_SYMBOL_GPL(crypto_register_algs);
488
crypto_unregister_algs(struct crypto_alg * algs,int count)489 void crypto_unregister_algs(struct crypto_alg *algs, int count)
490 {
491 int i;
492
493 for (i = 0; i < count; i++)
494 crypto_unregister_alg(&algs[i]);
495 }
496 EXPORT_SYMBOL_GPL(crypto_unregister_algs);
497
crypto_register_template(struct crypto_template * tmpl)498 int crypto_register_template(struct crypto_template *tmpl)
499 {
500 struct crypto_template *q;
501 int err = -EEXIST;
502
503 down_write(&crypto_alg_sem);
504
505 crypto_check_module_sig(tmpl->module);
506
507 list_for_each_entry(q, &crypto_template_list, list) {
508 if (q == tmpl)
509 goto out;
510 }
511
512 list_add(&tmpl->list, &crypto_template_list);
513 err = 0;
514 out:
515 up_write(&crypto_alg_sem);
516 return err;
517 }
518 EXPORT_SYMBOL_GPL(crypto_register_template);
519
crypto_register_templates(struct crypto_template * tmpls,int count)520 int crypto_register_templates(struct crypto_template *tmpls, int count)
521 {
522 int i, err;
523
524 for (i = 0; i < count; i++) {
525 err = crypto_register_template(&tmpls[i]);
526 if (err)
527 goto out;
528 }
529 return 0;
530
531 out:
532 for (--i; i >= 0; --i)
533 crypto_unregister_template(&tmpls[i]);
534 return err;
535 }
536 EXPORT_SYMBOL_GPL(crypto_register_templates);
537
crypto_unregister_template(struct crypto_template * tmpl)538 void crypto_unregister_template(struct crypto_template *tmpl)
539 {
540 struct crypto_instance *inst;
541 struct hlist_node *n;
542 struct hlist_head *list;
543 LIST_HEAD(users);
544
545 down_write(&crypto_alg_sem);
546
547 BUG_ON(list_empty(&tmpl->list));
548 list_del_init(&tmpl->list);
549
550 list = &tmpl->instances;
551 hlist_for_each_entry(inst, list, list) {
552 int err = crypto_remove_alg(&inst->alg, &users);
553
554 BUG_ON(err);
555 }
556
557 up_write(&crypto_alg_sem);
558
559 hlist_for_each_entry_safe(inst, n, list, list) {
560 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
561 crypto_free_instance(inst);
562 }
563 crypto_remove_final(&users);
564 }
565 EXPORT_SYMBOL_GPL(crypto_unregister_template);
566
crypto_unregister_templates(struct crypto_template * tmpls,int count)567 void crypto_unregister_templates(struct crypto_template *tmpls, int count)
568 {
569 int i;
570
571 for (i = count - 1; i >= 0; --i)
572 crypto_unregister_template(&tmpls[i]);
573 }
574 EXPORT_SYMBOL_GPL(crypto_unregister_templates);
575
__crypto_lookup_template(const char * name)576 static struct crypto_template *__crypto_lookup_template(const char *name)
577 {
578 struct crypto_template *q, *tmpl = NULL;
579
580 down_read(&crypto_alg_sem);
581 list_for_each_entry(q, &crypto_template_list, list) {
582 if (strcmp(q->name, name))
583 continue;
584 if (unlikely(!crypto_tmpl_get(q)))
585 continue;
586
587 tmpl = q;
588 break;
589 }
590 up_read(&crypto_alg_sem);
591
592 return tmpl;
593 }
594
crypto_lookup_template(const char * name)595 struct crypto_template *crypto_lookup_template(const char *name)
596 {
597 return try_then_request_module(__crypto_lookup_template(name),
598 "crypto-%s", name);
599 }
600 EXPORT_SYMBOL_GPL(crypto_lookup_template);
601
crypto_register_instance(struct crypto_template * tmpl,struct crypto_instance * inst)602 int crypto_register_instance(struct crypto_template *tmpl,
603 struct crypto_instance *inst)
604 {
605 struct crypto_larval *larval;
606 struct crypto_spawn *spawn;
607 int err;
608
609 err = crypto_check_alg(&inst->alg);
610 if (err)
611 return err;
612
613 inst->alg.cra_module = tmpl->module;
614 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
615
616 down_write(&crypto_alg_sem);
617
618 larval = ERR_PTR(-EAGAIN);
619 for (spawn = inst->spawns; spawn;) {
620 struct crypto_spawn *next;
621
622 if (spawn->dead)
623 goto unlock;
624
625 next = spawn->next;
626 spawn->inst = inst;
627 spawn->registered = true;
628
629 crypto_mod_put(spawn->alg);
630
631 spawn = next;
632 }
633
634 larval = __crypto_register_alg(&inst->alg);
635 if (IS_ERR(larval))
636 goto unlock;
637
638 hlist_add_head(&inst->list, &tmpl->instances);
639 inst->tmpl = tmpl;
640
641 unlock:
642 up_write(&crypto_alg_sem);
643
644 err = PTR_ERR(larval);
645 if (IS_ERR(larval))
646 goto err;
647
648 crypto_wait_for_test(larval);
649 err = 0;
650
651 err:
652 return err;
653 }
654 EXPORT_SYMBOL_GPL(crypto_register_instance);
655
crypto_unregister_instance(struct crypto_instance * inst)656 void crypto_unregister_instance(struct crypto_instance *inst)
657 {
658 LIST_HEAD(list);
659
660 down_write(&crypto_alg_sem);
661
662 crypto_remove_spawns(&inst->alg, &list, NULL);
663 crypto_remove_instance(inst, &list);
664
665 up_write(&crypto_alg_sem);
666
667 crypto_remove_final(&list);
668 }
669 EXPORT_SYMBOL_GPL(crypto_unregister_instance);
670
crypto_grab_spawn(struct crypto_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)671 int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
672 const char *name, u32 type, u32 mask)
673 {
674 struct crypto_alg *alg;
675 int err = -EAGAIN;
676
677 if (WARN_ON_ONCE(inst == NULL))
678 return -EINVAL;
679
680 /* Allow the result of crypto_attr_alg_name() to be passed directly */
681 if (IS_ERR(name))
682 return PTR_ERR(name);
683
684 alg = crypto_find_alg(name, spawn->frontend, type, mask);
685 if (IS_ERR(alg))
686 return PTR_ERR(alg);
687
688 down_write(&crypto_alg_sem);
689 if (!crypto_is_moribund(alg)) {
690 list_add(&spawn->list, &alg->cra_users);
691 spawn->alg = alg;
692 spawn->mask = mask;
693 spawn->next = inst->spawns;
694 inst->spawns = spawn;
695 inst->alg.cra_flags |=
696 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
697 err = 0;
698 }
699 up_write(&crypto_alg_sem);
700 if (err)
701 crypto_mod_put(alg);
702 return err;
703 }
704 EXPORT_SYMBOL_GPL(crypto_grab_spawn);
705
crypto_drop_spawn(struct crypto_spawn * spawn)706 void crypto_drop_spawn(struct crypto_spawn *spawn)
707 {
708 if (!spawn->alg) /* not yet initialized? */
709 return;
710
711 down_write(&crypto_alg_sem);
712 if (!spawn->dead)
713 list_del(&spawn->list);
714 up_write(&crypto_alg_sem);
715
716 if (!spawn->registered)
717 crypto_mod_put(spawn->alg);
718 }
719 EXPORT_SYMBOL_GPL(crypto_drop_spawn);
720
crypto_spawn_alg(struct crypto_spawn * spawn)721 static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
722 {
723 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
724 struct crypto_alg *target;
725 bool shoot = false;
726
727 down_read(&crypto_alg_sem);
728 if (!spawn->dead) {
729 alg = spawn->alg;
730 if (!crypto_mod_get(alg)) {
731 target = crypto_alg_get(alg);
732 shoot = true;
733 alg = ERR_PTR(-EAGAIN);
734 }
735 }
736 up_read(&crypto_alg_sem);
737
738 if (shoot) {
739 crypto_shoot_alg(target);
740 crypto_alg_put(target);
741 }
742
743 return alg;
744 }
745
crypto_spawn_tfm(struct crypto_spawn * spawn,u32 type,u32 mask)746 struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
747 u32 mask)
748 {
749 struct crypto_alg *alg;
750 struct crypto_tfm *tfm;
751
752 alg = crypto_spawn_alg(spawn);
753 if (IS_ERR(alg))
754 return ERR_CAST(alg);
755
756 tfm = ERR_PTR(-EINVAL);
757 if (unlikely((alg->cra_flags ^ type) & mask))
758 goto out_put_alg;
759
760 tfm = __crypto_alloc_tfm(alg, type, mask);
761 if (IS_ERR(tfm))
762 goto out_put_alg;
763
764 return tfm;
765
766 out_put_alg:
767 crypto_mod_put(alg);
768 return tfm;
769 }
770 EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
771
crypto_spawn_tfm2(struct crypto_spawn * spawn)772 void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
773 {
774 struct crypto_alg *alg;
775 struct crypto_tfm *tfm;
776
777 alg = crypto_spawn_alg(spawn);
778 if (IS_ERR(alg))
779 return ERR_CAST(alg);
780
781 tfm = crypto_create_tfm(alg, spawn->frontend);
782 if (IS_ERR(tfm))
783 goto out_put_alg;
784
785 return tfm;
786
787 out_put_alg:
788 crypto_mod_put(alg);
789 return tfm;
790 }
791 EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
792
crypto_register_notifier(struct notifier_block * nb)793 int crypto_register_notifier(struct notifier_block *nb)
794 {
795 return blocking_notifier_chain_register(&crypto_chain, nb);
796 }
797 EXPORT_SYMBOL_GPL(crypto_register_notifier);
798
crypto_unregister_notifier(struct notifier_block * nb)799 int crypto_unregister_notifier(struct notifier_block *nb)
800 {
801 return blocking_notifier_chain_unregister(&crypto_chain, nb);
802 }
803 EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
804
crypto_get_attr_type(struct rtattr ** tb)805 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
806 {
807 struct rtattr *rta = tb[0];
808 struct crypto_attr_type *algt;
809
810 if (!rta)
811 return ERR_PTR(-ENOENT);
812 if (RTA_PAYLOAD(rta) < sizeof(*algt))
813 return ERR_PTR(-EINVAL);
814 if (rta->rta_type != CRYPTOA_TYPE)
815 return ERR_PTR(-EINVAL);
816
817 algt = RTA_DATA(rta);
818
819 return algt;
820 }
821 EXPORT_SYMBOL_GPL(crypto_get_attr_type);
822
823 /**
824 * crypto_check_attr_type() - check algorithm type and compute inherited mask
825 * @tb: the template parameters
826 * @type: the algorithm type the template would be instantiated as
827 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
828 * to restrict the flags of any inner algorithms
829 *
830 * Validate that the algorithm type the user requested is compatible with the
831 * one the template would actually be instantiated as. E.g., if the user is
832 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
833 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
834 *
835 * Also compute the mask to use to restrict the flags of any inner algorithms.
836 *
837 * Return: 0 on success; -errno on failure
838 */
crypto_check_attr_type(struct rtattr ** tb,u32 type,u32 * mask_ret)839 int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
840 {
841 struct crypto_attr_type *algt;
842
843 algt = crypto_get_attr_type(tb);
844 if (IS_ERR(algt))
845 return PTR_ERR(algt);
846
847 if ((algt->type ^ type) & algt->mask)
848 return -EINVAL;
849
850 *mask_ret = crypto_algt_inherited_mask(algt);
851 return 0;
852 }
853 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
854
crypto_attr_alg_name(struct rtattr * rta)855 const char *crypto_attr_alg_name(struct rtattr *rta)
856 {
857 struct crypto_attr_alg *alga;
858
859 if (!rta)
860 return ERR_PTR(-ENOENT);
861 if (RTA_PAYLOAD(rta) < sizeof(*alga))
862 return ERR_PTR(-EINVAL);
863 if (rta->rta_type != CRYPTOA_ALG)
864 return ERR_PTR(-EINVAL);
865
866 alga = RTA_DATA(rta);
867 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
868
869 return alga->name;
870 }
871 EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
872
crypto_attr_u32(struct rtattr * rta,u32 * num)873 int crypto_attr_u32(struct rtattr *rta, u32 *num)
874 {
875 struct crypto_attr_u32 *nu32;
876
877 if (!rta)
878 return -ENOENT;
879 if (RTA_PAYLOAD(rta) < sizeof(*nu32))
880 return -EINVAL;
881 if (rta->rta_type != CRYPTOA_U32)
882 return -EINVAL;
883
884 nu32 = RTA_DATA(rta);
885 *num = nu32->num;
886
887 return 0;
888 }
889 EXPORT_SYMBOL_GPL(crypto_attr_u32);
890
crypto_inst_setname(struct crypto_instance * inst,const char * name,struct crypto_alg * alg)891 int crypto_inst_setname(struct crypto_instance *inst, const char *name,
892 struct crypto_alg *alg)
893 {
894 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
895 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
896 return -ENAMETOOLONG;
897
898 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
899 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
900 return -ENAMETOOLONG;
901
902 return 0;
903 }
904 EXPORT_SYMBOL_GPL(crypto_inst_setname);
905
crypto_init_queue(struct crypto_queue * queue,unsigned int max_qlen)906 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
907 {
908 INIT_LIST_HEAD(&queue->list);
909 queue->backlog = &queue->list;
910 queue->qlen = 0;
911 queue->max_qlen = max_qlen;
912 }
913 EXPORT_SYMBOL_GPL(crypto_init_queue);
914
crypto_enqueue_request(struct crypto_queue * queue,struct crypto_async_request * request)915 int crypto_enqueue_request(struct crypto_queue *queue,
916 struct crypto_async_request *request)
917 {
918 int err = -EINPROGRESS;
919
920 if (unlikely(queue->qlen >= queue->max_qlen)) {
921 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
922 err = -ENOSPC;
923 goto out;
924 }
925 err = -EBUSY;
926 if (queue->backlog == &queue->list)
927 queue->backlog = &request->list;
928 }
929
930 queue->qlen++;
931 list_add_tail(&request->list, &queue->list);
932
933 out:
934 return err;
935 }
936 EXPORT_SYMBOL_GPL(crypto_enqueue_request);
937
crypto_enqueue_request_head(struct crypto_queue * queue,struct crypto_async_request * request)938 void crypto_enqueue_request_head(struct crypto_queue *queue,
939 struct crypto_async_request *request)
940 {
941 queue->qlen++;
942 list_add(&request->list, &queue->list);
943 }
944 EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
945
crypto_dequeue_request(struct crypto_queue * queue)946 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
947 {
948 struct list_head *request;
949
950 if (unlikely(!queue->qlen))
951 return NULL;
952
953 queue->qlen--;
954
955 if (queue->backlog != &queue->list)
956 queue->backlog = queue->backlog->next;
957
958 request = queue->list.next;
959 list_del(request);
960
961 return list_entry(request, struct crypto_async_request, list);
962 }
963 EXPORT_SYMBOL_GPL(crypto_dequeue_request);
964
crypto_inc_byte(u8 * a,unsigned int size)965 static inline void crypto_inc_byte(u8 *a, unsigned int size)
966 {
967 u8 *b = (a + size);
968 u8 c;
969
970 for (; size; size--) {
971 c = *--b + 1;
972 *b = c;
973 if (c)
974 break;
975 }
976 }
977
crypto_inc(u8 * a,unsigned int size)978 void crypto_inc(u8 *a, unsigned int size)
979 {
980 __be32 *b = (__be32 *)(a + size);
981 u32 c;
982
983 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
984 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
985 for (; size >= 4; size -= 4) {
986 c = be32_to_cpu(*--b) + 1;
987 *b = cpu_to_be32(c);
988 if (likely(c))
989 return;
990 }
991
992 crypto_inc_byte(a, size);
993 }
994 EXPORT_SYMBOL_GPL(crypto_inc);
995
__crypto_xor(u8 * dst,const u8 * src1,const u8 * src2,unsigned int len)996 void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
997 {
998 int relalign = 0;
999
1000 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
1001 int size = sizeof(unsigned long);
1002 int d = (((unsigned long)dst ^ (unsigned long)src1) |
1003 ((unsigned long)dst ^ (unsigned long)src2)) &
1004 (size - 1);
1005
1006 relalign = d ? 1 << __ffs(d) : size;
1007
1008 /*
1009 * If we care about alignment, process as many bytes as
1010 * needed to advance dst and src to values whose alignments
1011 * equal their relative alignment. This will allow us to
1012 * process the remainder of the input using optimal strides.
1013 */
1014 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1015 *dst++ = *src1++ ^ *src2++;
1016 len--;
1017 }
1018 }
1019
1020 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1021 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1022 dst += 8;
1023 src1 += 8;
1024 src2 += 8;
1025 len -= 8;
1026 }
1027
1028 while (len >= 4 && !(relalign & 3)) {
1029 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1030 dst += 4;
1031 src1 += 4;
1032 src2 += 4;
1033 len -= 4;
1034 }
1035
1036 while (len >= 2 && !(relalign & 1)) {
1037 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1038 dst += 2;
1039 src1 += 2;
1040 src2 += 2;
1041 len -= 2;
1042 }
1043
1044 while (len--)
1045 *dst++ = *src1++ ^ *src2++;
1046 }
1047 EXPORT_SYMBOL_GPL(__crypto_xor);
1048
crypto_alg_extsize(struct crypto_alg * alg)1049 unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1050 {
1051 return alg->cra_ctxsize +
1052 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1053 }
1054 EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1055
crypto_type_has_alg(const char * name,const struct crypto_type * frontend,u32 type,u32 mask)1056 int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1057 u32 type, u32 mask)
1058 {
1059 int ret = 0;
1060 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1061
1062 if (!IS_ERR(alg)) {
1063 crypto_mod_put(alg);
1064 ret = 1;
1065 }
1066
1067 return ret;
1068 }
1069 EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1070
1071 #ifdef CONFIG_CRYPTO_STATS
crypto_stats_init(struct crypto_alg * alg)1072 void crypto_stats_init(struct crypto_alg *alg)
1073 {
1074 memset(&alg->stats, 0, sizeof(alg->stats));
1075 }
1076 EXPORT_SYMBOL_GPL(crypto_stats_init);
1077
crypto_stats_get(struct crypto_alg * alg)1078 void crypto_stats_get(struct crypto_alg *alg)
1079 {
1080 crypto_alg_get(alg);
1081 }
1082 EXPORT_SYMBOL_GPL(crypto_stats_get);
1083
crypto_stats_aead_encrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1084 void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1085 int ret)
1086 {
1087 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1088 atomic64_inc(&alg->stats.aead.err_cnt);
1089 } else {
1090 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1091 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1092 }
1093 crypto_alg_put(alg);
1094 }
1095 EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1096
crypto_stats_aead_decrypt(unsigned int cryptlen,struct crypto_alg * alg,int ret)1097 void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1098 int ret)
1099 {
1100 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1101 atomic64_inc(&alg->stats.aead.err_cnt);
1102 } else {
1103 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1104 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1105 }
1106 crypto_alg_put(alg);
1107 }
1108 EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1109
crypto_stats_akcipher_encrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1110 void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1111 struct crypto_alg *alg)
1112 {
1113 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1114 atomic64_inc(&alg->stats.akcipher.err_cnt);
1115 } else {
1116 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1117 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1118 }
1119 crypto_alg_put(alg);
1120 }
1121 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1122
crypto_stats_akcipher_decrypt(unsigned int src_len,int ret,struct crypto_alg * alg)1123 void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1124 struct crypto_alg *alg)
1125 {
1126 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1127 atomic64_inc(&alg->stats.akcipher.err_cnt);
1128 } else {
1129 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1130 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1131 }
1132 crypto_alg_put(alg);
1133 }
1134 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1135
crypto_stats_akcipher_sign(int ret,struct crypto_alg * alg)1136 void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1137 {
1138 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1139 atomic64_inc(&alg->stats.akcipher.err_cnt);
1140 else
1141 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1142 crypto_alg_put(alg);
1143 }
1144 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1145
crypto_stats_akcipher_verify(int ret,struct crypto_alg * alg)1146 void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1147 {
1148 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1149 atomic64_inc(&alg->stats.akcipher.err_cnt);
1150 else
1151 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1152 crypto_alg_put(alg);
1153 }
1154 EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1155
crypto_stats_compress(unsigned int slen,int ret,struct crypto_alg * alg)1156 void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1157 {
1158 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1159 atomic64_inc(&alg->stats.compress.err_cnt);
1160 } else {
1161 atomic64_inc(&alg->stats.compress.compress_cnt);
1162 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1163 }
1164 crypto_alg_put(alg);
1165 }
1166 EXPORT_SYMBOL_GPL(crypto_stats_compress);
1167
crypto_stats_decompress(unsigned int slen,int ret,struct crypto_alg * alg)1168 void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1169 {
1170 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1171 atomic64_inc(&alg->stats.compress.err_cnt);
1172 } else {
1173 atomic64_inc(&alg->stats.compress.decompress_cnt);
1174 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1175 }
1176 crypto_alg_put(alg);
1177 }
1178 EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1179
crypto_stats_ahash_update(unsigned int nbytes,int ret,struct crypto_alg * alg)1180 void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1181 struct crypto_alg *alg)
1182 {
1183 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1184 atomic64_inc(&alg->stats.hash.err_cnt);
1185 else
1186 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1187 crypto_alg_put(alg);
1188 }
1189 EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1190
crypto_stats_ahash_final(unsigned int nbytes,int ret,struct crypto_alg * alg)1191 void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1192 struct crypto_alg *alg)
1193 {
1194 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1195 atomic64_inc(&alg->stats.hash.err_cnt);
1196 } else {
1197 atomic64_inc(&alg->stats.hash.hash_cnt);
1198 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1199 }
1200 crypto_alg_put(alg);
1201 }
1202 EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1203
crypto_stats_kpp_set_secret(struct crypto_alg * alg,int ret)1204 void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1205 {
1206 if (ret)
1207 atomic64_inc(&alg->stats.kpp.err_cnt);
1208 else
1209 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1210 crypto_alg_put(alg);
1211 }
1212 EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1213
crypto_stats_kpp_generate_public_key(struct crypto_alg * alg,int ret)1214 void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1215 {
1216 if (ret)
1217 atomic64_inc(&alg->stats.kpp.err_cnt);
1218 else
1219 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1220 crypto_alg_put(alg);
1221 }
1222 EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1223
crypto_stats_kpp_compute_shared_secret(struct crypto_alg * alg,int ret)1224 void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1225 {
1226 if (ret)
1227 atomic64_inc(&alg->stats.kpp.err_cnt);
1228 else
1229 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1230 crypto_alg_put(alg);
1231 }
1232 EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1233
crypto_stats_rng_seed(struct crypto_alg * alg,int ret)1234 void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1235 {
1236 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1237 atomic64_inc(&alg->stats.rng.err_cnt);
1238 else
1239 atomic64_inc(&alg->stats.rng.seed_cnt);
1240 crypto_alg_put(alg);
1241 }
1242 EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1243
crypto_stats_rng_generate(struct crypto_alg * alg,unsigned int dlen,int ret)1244 void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1245 int ret)
1246 {
1247 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1248 atomic64_inc(&alg->stats.rng.err_cnt);
1249 } else {
1250 atomic64_inc(&alg->stats.rng.generate_cnt);
1251 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1252 }
1253 crypto_alg_put(alg);
1254 }
1255 EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1256
crypto_stats_skcipher_encrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1257 void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1258 struct crypto_alg *alg)
1259 {
1260 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1261 atomic64_inc(&alg->stats.cipher.err_cnt);
1262 } else {
1263 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1264 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1265 }
1266 crypto_alg_put(alg);
1267 }
1268 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1269
crypto_stats_skcipher_decrypt(unsigned int cryptlen,int ret,struct crypto_alg * alg)1270 void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1271 struct crypto_alg *alg)
1272 {
1273 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1274 atomic64_inc(&alg->stats.cipher.err_cnt);
1275 } else {
1276 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1277 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1278 }
1279 crypto_alg_put(alg);
1280 }
1281 EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1282 #endif
1283
crypto_algapi_init(void)1284 static int __init crypto_algapi_init(void)
1285 {
1286 crypto_init_proc();
1287 return 0;
1288 }
1289
crypto_algapi_exit(void)1290 static void __exit crypto_algapi_exit(void)
1291 {
1292 crypto_exit_proc();
1293 }
1294
1295 module_init(crypto_algapi_init);
1296 module_exit(crypto_algapi_exit);
1297
1298 MODULE_LICENSE("GPL");
1299 MODULE_DESCRIPTION("Cryptographic algorithms API");
1300 MODULE_SOFTDEP("pre: cryptomgr");
1301