1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
24 #include "internal.h"
25
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
33
34 #ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36 EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
37 #endif
38
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40
crypto_mod_get(struct crypto_alg * alg)41 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42 {
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44 }
45 EXPORT_SYMBOL_GPL(crypto_mod_get);
46
crypto_mod_put(struct crypto_alg * alg)47 void crypto_mod_put(struct crypto_alg *alg)
48 {
49 struct module *module = alg->cra_module;
50
51 crypto_alg_put(alg);
52 module_put(module);
53 }
54 EXPORT_SYMBOL_GPL(crypto_mod_put);
55
__crypto_alg_lookup(const char * name,u32 type,u32 mask)56 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 u32 mask)
58 {
59 struct crypto_alg *q, *alg = NULL;
60 int best = -2;
61
62 list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 int exact, fuzzy;
64
65 if (crypto_is_moribund(q))
66 continue;
67
68 if ((q->cra_flags ^ type) & mask)
69 continue;
70
71 if (crypto_is_larval(q) &&
72 !crypto_is_test_larval((struct crypto_larval *)q) &&
73 ((struct crypto_larval *)q)->mask != mask)
74 continue;
75
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
79 continue;
80
81 if (unlikely(!crypto_mod_get(q)))
82 continue;
83
84 best = q->cra_priority;
85 if (alg)
86 crypto_mod_put(alg);
87 alg = q;
88
89 if (exact)
90 break;
91 }
92
93 return alg;
94 }
95
crypto_larval_destroy(struct crypto_alg * alg)96 static void crypto_larval_destroy(struct crypto_alg *alg)
97 {
98 struct crypto_larval *larval = (void *)alg;
99
100 BUG_ON(!crypto_is_larval(alg));
101 if (!IS_ERR_OR_NULL(larval->adult))
102 crypto_mod_put(larval->adult);
103 kfree(larval);
104 }
105
crypto_larval_alloc(const char * name,u32 type,u32 mask)106 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107 {
108 struct crypto_larval *larval;
109
110 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 if (!larval)
112 return ERR_PTR(-ENOMEM);
113
114 larval->mask = mask;
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
118
119 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
121
122 return larval;
123 }
124 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125
crypto_larval_add(const char * name,u32 type,u32 mask)126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 u32 mask)
128 {
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
131
132 larval = crypto_larval_alloc(name, type, mask);
133 if (IS_ERR(larval))
134 return ERR_CAST(larval);
135
136 refcount_set(&larval->alg.cra_refcnt, 2);
137
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
140 if (!alg) {
141 alg = &larval->alg;
142 list_add(&alg->cra_list, &crypto_alg_list);
143 }
144 up_write(&crypto_alg_sem);
145
146 if (alg != &larval->alg) {
147 kfree(larval);
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg);
150 }
151
152 return alg;
153 }
154
crypto_larval_kill(struct crypto_alg * alg)155 void crypto_larval_kill(struct crypto_alg *alg)
156 {
157 struct crypto_larval *larval = (void *)alg;
158
159 down_write(&crypto_alg_sem);
160 list_del(&alg->cra_list);
161 up_write(&crypto_alg_sem);
162 complete_all(&larval->completion);
163 crypto_alg_put(alg);
164 }
165 EXPORT_SYMBOL_GPL(crypto_larval_kill);
166
crypto_wait_for_test(struct crypto_larval * larval)167 void crypto_wait_for_test(struct crypto_larval *larval)
168 {
169 int err;
170
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 goto out;
174
175 err = wait_for_completion_killable(&larval->completion);
176 WARN_ON(err);
177 out:
178 crypto_larval_kill(&larval->alg);
179 }
180 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181
crypto_start_test(struct crypto_larval * larval)182 static void crypto_start_test(struct crypto_larval *larval)
183 {
184 if (!crypto_is_test_larval(larval))
185 return;
186
187 if (larval->test_started)
188 return;
189
190 down_write(&crypto_alg_sem);
191 if (larval->test_started) {
192 up_write(&crypto_alg_sem);
193 return;
194 }
195
196 larval->test_started = true;
197 up_write(&crypto_alg_sem);
198
199 crypto_wait_for_test(larval);
200 }
201
crypto_larval_wait(struct crypto_alg * alg)202 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
203 {
204 struct crypto_larval *larval = (void *)alg;
205 long timeout;
206
207 if (!crypto_boot_test_finished())
208 crypto_start_test(larval);
209
210 timeout = wait_for_completion_killable_timeout(
211 &larval->completion, 60 * HZ);
212
213 alg = larval->adult;
214 if (timeout < 0)
215 alg = ERR_PTR(-EINTR);
216 else if (!timeout)
217 alg = ERR_PTR(-ETIMEDOUT);
218 else if (!alg)
219 alg = ERR_PTR(-ENOENT);
220 else if (IS_ERR(alg))
221 ;
222 else if (crypto_is_test_larval(larval) &&
223 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 alg = ERR_PTR(-EAGAIN);
225 else if (!crypto_mod_get(alg))
226 alg = ERR_PTR(-EAGAIN);
227 crypto_mod_put(&larval->alg);
228
229 return alg;
230 }
231
crypto_alg_lookup(const char * name,u32 type,u32 mask)232 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
233 u32 mask)
234 {
235 struct crypto_alg *alg;
236 u32 test = 0;
237
238 if (!((type | mask) & CRYPTO_ALG_TESTED))
239 test |= CRYPTO_ALG_TESTED;
240
241 down_read(&crypto_alg_sem);
242 alg = __crypto_alg_lookup(name, type | test, mask | test);
243 if (!alg && test) {
244 alg = __crypto_alg_lookup(name, type, mask);
245 if (alg && !crypto_is_larval(alg)) {
246 /* Test failed */
247 crypto_mod_put(alg);
248 alg = ERR_PTR(-ELIBBAD);
249 }
250 }
251 up_read(&crypto_alg_sem);
252
253 return alg;
254 }
255
crypto_larval_lookup(const char * name,u32 type,u32 mask)256 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
257 u32 mask)
258 {
259 struct crypto_alg *alg;
260
261 if (!name)
262 return ERR_PTR(-ENOENT);
263
264 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
265 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
266
267 alg = crypto_alg_lookup(name, type, mask);
268 if (!alg && !(mask & CRYPTO_NOLOAD)) {
269 request_module("crypto-%s", name);
270
271 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
272 CRYPTO_ALG_NEED_FALLBACK))
273 request_module("crypto-%s-all", name);
274
275 alg = crypto_alg_lookup(name, type, mask);
276 }
277
278 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
279 alg = crypto_larval_wait(alg);
280 else if (!alg)
281 alg = crypto_larval_add(name, type, mask);
282
283 return alg;
284 }
285
crypto_probing_notify(unsigned long val,void * v)286 int crypto_probing_notify(unsigned long val, void *v)
287 {
288 int ok;
289
290 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
291 if (ok == NOTIFY_DONE) {
292 request_module("cryptomgr");
293 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
294 }
295
296 return ok;
297 }
298 EXPORT_SYMBOL_GPL(crypto_probing_notify);
299
crypto_alg_mod_lookup(const char * name,u32 type,u32 mask)300 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
301 {
302 struct crypto_alg *alg;
303 struct crypto_alg *larval;
304 int ok;
305
306 /*
307 * If the internal flag is set for a cipher, require a caller to
308 * to invoke the cipher with the internal flag to use that cipher.
309 * Also, if a caller wants to allocate a cipher that may or may
310 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
311 * !(mask & CRYPTO_ALG_INTERNAL).
312 */
313 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
314 mask |= CRYPTO_ALG_INTERNAL;
315
316 larval = crypto_larval_lookup(name, type, mask);
317 if (IS_ERR(larval) || !crypto_is_larval(larval))
318 return larval;
319
320 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
321
322 if (ok == NOTIFY_STOP)
323 alg = crypto_larval_wait(larval);
324 else {
325 crypto_mod_put(larval);
326 alg = ERR_PTR(-ENOENT);
327 }
328 crypto_larval_kill(larval);
329 return alg;
330 }
331 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
332
crypto_init_ops(struct crypto_tfm * tfm,u32 type,u32 mask)333 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
334 {
335 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
336
337 if (type_obj)
338 return type_obj->init(tfm, type, mask);
339 return 0;
340 }
341
crypto_exit_ops(struct crypto_tfm * tfm)342 static void crypto_exit_ops(struct crypto_tfm *tfm)
343 {
344 const struct crypto_type *type = tfm->__crt_alg->cra_type;
345
346 if (type && tfm->exit)
347 tfm->exit(tfm);
348 }
349
crypto_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)350 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
351 {
352 const struct crypto_type *type_obj = alg->cra_type;
353 unsigned int len;
354
355 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
356 if (type_obj)
357 return len + type_obj->ctxsize(alg, type, mask);
358
359 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
360 default:
361 BUG();
362
363 case CRYPTO_ALG_TYPE_CIPHER:
364 len += crypto_cipher_ctxsize(alg);
365 break;
366
367 case CRYPTO_ALG_TYPE_COMPRESS:
368 len += crypto_compress_ctxsize(alg);
369 break;
370 }
371
372 return len;
373 }
374
crypto_shoot_alg(struct crypto_alg * alg)375 void crypto_shoot_alg(struct crypto_alg *alg)
376 {
377 down_write(&crypto_alg_sem);
378 alg->cra_flags |= CRYPTO_ALG_DYING;
379 up_write(&crypto_alg_sem);
380 }
381 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
382
__crypto_alloc_tfm(struct crypto_alg * alg,u32 type,u32 mask)383 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
384 u32 mask)
385 {
386 struct crypto_tfm *tfm = NULL;
387 unsigned int tfm_size;
388 int err = -ENOMEM;
389
390 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
391 tfm = kzalloc(tfm_size, GFP_KERNEL);
392 if (tfm == NULL)
393 goto out_err;
394
395 tfm->__crt_alg = alg;
396
397 err = crypto_init_ops(tfm, type, mask);
398 if (err)
399 goto out_free_tfm;
400
401 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
402 goto cra_init_failed;
403
404 goto out;
405
406 cra_init_failed:
407 crypto_exit_ops(tfm);
408 out_free_tfm:
409 if (err == -EAGAIN)
410 crypto_shoot_alg(alg);
411 kfree(tfm);
412 out_err:
413 tfm = ERR_PTR(err);
414 out:
415 return tfm;
416 }
417 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
418
419 /*
420 * crypto_alloc_base - Locate algorithm and allocate transform
421 * @alg_name: Name of algorithm
422 * @type: Type of algorithm
423 * @mask: Mask for type comparison
424 *
425 * This function should not be used by new algorithm types.
426 * Please use crypto_alloc_tfm instead.
427 *
428 * crypto_alloc_base() will first attempt to locate an already loaded
429 * algorithm. If that fails and the kernel supports dynamically loadable
430 * modules, it will then attempt to load a module of the same name or
431 * alias. If that fails it will send a query to any loaded crypto manager
432 * to construct an algorithm on the fly. A refcount is grabbed on the
433 * algorithm which is then associated with the new transform.
434 *
435 * The returned transform is of a non-determinate type. Most people
436 * should use one of the more specific allocation functions such as
437 * crypto_alloc_skcipher().
438 *
439 * In case of error the return value is an error pointer.
440 */
crypto_alloc_base(const char * alg_name,u32 type,u32 mask)441 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
442 {
443 struct crypto_tfm *tfm;
444 int err;
445
446 for (;;) {
447 struct crypto_alg *alg;
448
449 alg = crypto_alg_mod_lookup(alg_name, type, mask);
450 if (IS_ERR(alg)) {
451 err = PTR_ERR(alg);
452 goto err;
453 }
454
455 tfm = __crypto_alloc_tfm(alg, type, mask);
456 if (!IS_ERR(tfm))
457 return tfm;
458
459 crypto_mod_put(alg);
460 err = PTR_ERR(tfm);
461
462 err:
463 if (err != -EAGAIN)
464 break;
465 if (fatal_signal_pending(current)) {
466 err = -EINTR;
467 break;
468 }
469 }
470
471 return ERR_PTR(err);
472 }
473 EXPORT_SYMBOL_GPL(crypto_alloc_base);
474
crypto_create_tfm_node(struct crypto_alg * alg,const struct crypto_type * frontend,int node)475 void *crypto_create_tfm_node(struct crypto_alg *alg,
476 const struct crypto_type *frontend,
477 int node)
478 {
479 char *mem;
480 struct crypto_tfm *tfm = NULL;
481 unsigned int tfmsize;
482 unsigned int total;
483 int err = -ENOMEM;
484
485 tfmsize = frontend->tfmsize;
486 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
487
488 mem = kzalloc_node(total, GFP_KERNEL, node);
489 if (mem == NULL)
490 goto out_err;
491
492 tfm = (struct crypto_tfm *)(mem + tfmsize);
493 tfm->__crt_alg = alg;
494 tfm->node = node;
495
496 err = frontend->init_tfm(tfm);
497 if (err)
498 goto out_free_tfm;
499
500 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
501 goto cra_init_failed;
502
503 goto out;
504
505 cra_init_failed:
506 crypto_exit_ops(tfm);
507 out_free_tfm:
508 if (err == -EAGAIN)
509 crypto_shoot_alg(alg);
510 kfree(mem);
511 out_err:
512 mem = ERR_PTR(err);
513 out:
514 return mem;
515 }
516 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
517
crypto_find_alg(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)518 struct crypto_alg *crypto_find_alg(const char *alg_name,
519 const struct crypto_type *frontend,
520 u32 type, u32 mask)
521 {
522 if (frontend) {
523 type &= frontend->maskclear;
524 mask &= frontend->maskclear;
525 type |= frontend->type;
526 mask |= frontend->maskset;
527 }
528
529 return crypto_alg_mod_lookup(alg_name, type, mask);
530 }
531 EXPORT_SYMBOL_GPL(crypto_find_alg);
532
533 /*
534 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
535 * @alg_name: Name of algorithm
536 * @frontend: Frontend algorithm type
537 * @type: Type of algorithm
538 * @mask: Mask for type comparison
539 * @node: NUMA node in which users desire to put requests, if node is
540 * NUMA_NO_NODE, it means users have no special requirement.
541 *
542 * crypto_alloc_tfm() will first attempt to locate an already loaded
543 * algorithm. If that fails and the kernel supports dynamically loadable
544 * modules, it will then attempt to load a module of the same name or
545 * alias. If that fails it will send a query to any loaded crypto manager
546 * to construct an algorithm on the fly. A refcount is grabbed on the
547 * algorithm which is then associated with the new transform.
548 *
549 * The returned transform is of a non-determinate type. Most people
550 * should use one of the more specific allocation functions such as
551 * crypto_alloc_skcipher().
552 *
553 * In case of error the return value is an error pointer.
554 */
555
crypto_alloc_tfm_node(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask,int node)556 void *crypto_alloc_tfm_node(const char *alg_name,
557 const struct crypto_type *frontend, u32 type, u32 mask,
558 int node)
559 {
560 void *tfm;
561 int err;
562
563 for (;;) {
564 struct crypto_alg *alg;
565
566 alg = crypto_find_alg(alg_name, frontend, type, mask);
567 if (IS_ERR(alg)) {
568 err = PTR_ERR(alg);
569 goto err;
570 }
571
572 tfm = crypto_create_tfm_node(alg, frontend, node);
573 if (!IS_ERR(tfm))
574 return tfm;
575
576 crypto_mod_put(alg);
577 err = PTR_ERR(tfm);
578
579 err:
580 if (err != -EAGAIN)
581 break;
582 if (fatal_signal_pending(current)) {
583 err = -EINTR;
584 break;
585 }
586 }
587
588 return ERR_PTR(err);
589 }
590 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
591
592 /*
593 * crypto_destroy_tfm - Free crypto transform
594 * @mem: Start of tfm slab
595 * @tfm: Transform to free
596 *
597 * This function frees up the transform and any associated resources,
598 * then drops the refcount on the associated algorithm.
599 */
crypto_destroy_tfm(void * mem,struct crypto_tfm * tfm)600 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
601 {
602 struct crypto_alg *alg;
603
604 if (IS_ERR_OR_NULL(mem))
605 return;
606
607 alg = tfm->__crt_alg;
608
609 if (!tfm->exit && alg->cra_exit)
610 alg->cra_exit(tfm);
611 crypto_exit_ops(tfm);
612 crypto_mod_put(alg);
613 kfree_sensitive(mem);
614 }
615 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
616
crypto_has_alg(const char * name,u32 type,u32 mask)617 int crypto_has_alg(const char *name, u32 type, u32 mask)
618 {
619 int ret = 0;
620 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
621
622 if (!IS_ERR(alg)) {
623 crypto_mod_put(alg);
624 ret = 1;
625 }
626
627 return ret;
628 }
629 EXPORT_SYMBOL_GPL(crypto_has_alg);
630
crypto_req_done(struct crypto_async_request * req,int err)631 void crypto_req_done(struct crypto_async_request *req, int err)
632 {
633 struct crypto_wait *wait = req->data;
634
635 if (err == -EINPROGRESS)
636 return;
637
638 wait->err = err;
639 complete(&wait->completion);
640 }
641 EXPORT_SYMBOL_GPL(crypto_req_done);
642
643 MODULE_DESCRIPTION("Cryptographic core API");
644 MODULE_LICENSE("GPL");
645