• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Scatterlist Cryptographic API.
3  *
4  * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5  * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6  * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7  *
8  * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9  * and Nettle, by Niels Möller.
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include <linux/completion.h>
28 #include "internal.h"
29 
30 LIST_HEAD(crypto_alg_list);
31 EXPORT_SYMBOL_GPL(crypto_alg_list);
32 DECLARE_RWSEM(crypto_alg_sem);
33 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 
35 BLOCKING_NOTIFIER_HEAD(crypto_chain);
36 EXPORT_SYMBOL_GPL(crypto_chain);
37 
38 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
39 
crypto_mod_get(struct crypto_alg * alg)40 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
41 {
42 	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
43 }
44 EXPORT_SYMBOL_GPL(crypto_mod_get);
45 
crypto_mod_put(struct crypto_alg * alg)46 void crypto_mod_put(struct crypto_alg *alg)
47 {
48 	struct module *module = alg->cra_module;
49 
50 	crypto_alg_put(alg);
51 	module_put(module);
52 }
53 EXPORT_SYMBOL_GPL(crypto_mod_put);
54 
crypto_is_test_larval(struct crypto_larval * larval)55 static inline int crypto_is_test_larval(struct crypto_larval *larval)
56 {
57 	return larval->alg.cra_driver_name[0];
58 }
59 
__crypto_alg_lookup(const char * name,u32 type,u32 mask)60 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
61 					      u32 mask)
62 {
63 	struct crypto_alg *q, *alg = NULL;
64 	int best = -2;
65 
66 	list_for_each_entry(q, &crypto_alg_list, cra_list) {
67 		int exact, fuzzy;
68 
69 		if (crypto_is_moribund(q))
70 			continue;
71 
72 		if ((q->cra_flags ^ type) & mask)
73 			continue;
74 
75 		if (crypto_is_larval(q) &&
76 		    !crypto_is_test_larval((struct crypto_larval *)q) &&
77 		    ((struct crypto_larval *)q)->mask != mask)
78 			continue;
79 
80 		exact = !strcmp(q->cra_driver_name, name);
81 		fuzzy = !strcmp(q->cra_name, name);
82 		if (!exact && !(fuzzy && q->cra_priority > best))
83 			continue;
84 
85 		if (unlikely(!crypto_mod_get(q)))
86 			continue;
87 
88 		best = q->cra_priority;
89 		if (alg)
90 			crypto_mod_put(alg);
91 		alg = q;
92 
93 		if (exact)
94 			break;
95 	}
96 
97 	return alg;
98 }
99 
crypto_larval_destroy(struct crypto_alg * alg)100 static void crypto_larval_destroy(struct crypto_alg *alg)
101 {
102 	struct crypto_larval *larval = (void *)alg;
103 
104 	BUG_ON(!crypto_is_larval(alg));
105 	if (larval->adult)
106 		crypto_mod_put(larval->adult);
107 	kfree(larval);
108 }
109 
crypto_larval_alloc(const char * name,u32 type,u32 mask)110 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
111 {
112 	struct crypto_larval *larval;
113 
114 	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
115 	if (!larval)
116 		return ERR_PTR(-ENOMEM);
117 
118 	larval->mask = mask;
119 	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
120 	larval->alg.cra_priority = -1;
121 	larval->alg.cra_destroy = crypto_larval_destroy;
122 
123 	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 	init_completion(&larval->completion);
125 
126 	return larval;
127 }
128 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
129 
crypto_larval_add(const char * name,u32 type,u32 mask)130 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
131 					    u32 mask)
132 {
133 	struct crypto_alg *alg;
134 	struct crypto_larval *larval;
135 
136 	larval = crypto_larval_alloc(name, type, mask);
137 	if (IS_ERR(larval))
138 		return ERR_CAST(larval);
139 
140 	atomic_set(&larval->alg.cra_refcnt, 2);
141 
142 	down_write(&crypto_alg_sem);
143 	alg = __crypto_alg_lookup(name, type, mask);
144 	if (!alg) {
145 		alg = &larval->alg;
146 		list_add(&alg->cra_list, &crypto_alg_list);
147 	}
148 	up_write(&crypto_alg_sem);
149 
150 	if (alg != &larval->alg) {
151 		kfree(larval);
152 		if (crypto_is_larval(alg))
153 			alg = crypto_larval_wait(alg);
154 	}
155 
156 	return alg;
157 }
158 
crypto_larval_kill(struct crypto_alg * alg)159 void crypto_larval_kill(struct crypto_alg *alg)
160 {
161 	struct crypto_larval *larval = (void *)alg;
162 
163 	down_write(&crypto_alg_sem);
164 	list_del(&alg->cra_list);
165 	up_write(&crypto_alg_sem);
166 	complete_all(&larval->completion);
167 	crypto_alg_put(alg);
168 }
169 EXPORT_SYMBOL_GPL(crypto_larval_kill);
170 
crypto_larval_wait(struct crypto_alg * alg)171 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172 {
173 	struct crypto_larval *larval = (void *)alg;
174 	long timeout;
175 
176 	timeout = wait_for_completion_killable_timeout(
177 		&larval->completion, 60 * HZ);
178 
179 	alg = larval->adult;
180 	if (timeout < 0)
181 		alg = ERR_PTR(-EINTR);
182 	else if (!timeout)
183 		alg = ERR_PTR(-ETIMEDOUT);
184 	else if (!alg)
185 		alg = ERR_PTR(-ENOENT);
186 	else if (crypto_is_test_larval(larval) &&
187 		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 		alg = ERR_PTR(-EAGAIN);
189 	else if (!crypto_mod_get(alg))
190 		alg = ERR_PTR(-EAGAIN);
191 	crypto_mod_put(&larval->alg);
192 
193 	return alg;
194 }
195 
crypto_alg_lookup(const char * name,u32 type,u32 mask)196 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
197 {
198 	struct crypto_alg *alg;
199 
200 	down_read(&crypto_alg_sem);
201 	alg = __crypto_alg_lookup(name, type, mask);
202 	up_read(&crypto_alg_sem);
203 
204 	return alg;
205 }
206 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
207 
crypto_larval_lookup(const char * name,u32 type,u32 mask)208 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
209 {
210 	struct crypto_alg *alg;
211 
212 	if (!name)
213 		return ERR_PTR(-ENOENT);
214 
215 	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
216 	type &= mask;
217 
218 	alg = crypto_alg_lookup(name, type, mask);
219 	if (!alg) {
220 		request_module("crypto-%s", name);
221 
222 		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
223 		      CRYPTO_ALG_NEED_FALLBACK))
224 			request_module("crypto-%s-all", name);
225 
226 		alg = crypto_alg_lookup(name, type, mask);
227 	}
228 
229 	if (alg)
230 		return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
231 
232 	return crypto_larval_add(name, type, mask);
233 }
234 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
235 
crypto_probing_notify(unsigned long val,void * v)236 int crypto_probing_notify(unsigned long val, void *v)
237 {
238 	int ok;
239 
240 	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
241 	if (ok == NOTIFY_DONE) {
242 		request_module("cryptomgr");
243 		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
244 	}
245 
246 	return ok;
247 }
248 EXPORT_SYMBOL_GPL(crypto_probing_notify);
249 
crypto_alg_mod_lookup(const char * name,u32 type,u32 mask)250 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
251 {
252 	struct crypto_alg *alg;
253 	struct crypto_alg *larval;
254 	int ok;
255 
256 	if (!((type | mask) & CRYPTO_ALG_TESTED)) {
257 		type |= CRYPTO_ALG_TESTED;
258 		mask |= CRYPTO_ALG_TESTED;
259 	}
260 
261 	/*
262 	 * If the internal flag is set for a cipher, require a caller to
263 	 * to invoke the cipher with the internal flag to use that cipher.
264 	 * Also, if a caller wants to allocate a cipher that may or may
265 	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
266 	 * !(mask & CRYPTO_ALG_INTERNAL).
267 	 */
268 	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
269 		mask |= CRYPTO_ALG_INTERNAL;
270 
271 	larval = crypto_larval_lookup(name, type, mask);
272 	if (IS_ERR(larval) || !crypto_is_larval(larval))
273 		return larval;
274 
275 	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
276 
277 	if (ok == NOTIFY_STOP)
278 		alg = crypto_larval_wait(larval);
279 	else {
280 		crypto_mod_put(larval);
281 		alg = ERR_PTR(-ENOENT);
282 	}
283 	crypto_larval_kill(larval);
284 	return alg;
285 }
286 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
287 
crypto_init_ops(struct crypto_tfm * tfm,u32 type,u32 mask)288 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
289 {
290 	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
291 
292 	if (type_obj)
293 		return type_obj->init(tfm, type, mask);
294 
295 	switch (crypto_tfm_alg_type(tfm)) {
296 	case CRYPTO_ALG_TYPE_CIPHER:
297 		return crypto_init_cipher_ops(tfm);
298 
299 	case CRYPTO_ALG_TYPE_COMPRESS:
300 		return crypto_init_compress_ops(tfm);
301 
302 	default:
303 		break;
304 	}
305 
306 	BUG();
307 	return -EINVAL;
308 }
309 
crypto_exit_ops(struct crypto_tfm * tfm)310 static void crypto_exit_ops(struct crypto_tfm *tfm)
311 {
312 	const struct crypto_type *type = tfm->__crt_alg->cra_type;
313 
314 	if (type) {
315 		if (tfm->exit)
316 			tfm->exit(tfm);
317 		return;
318 	}
319 
320 	switch (crypto_tfm_alg_type(tfm)) {
321 	case CRYPTO_ALG_TYPE_CIPHER:
322 		crypto_exit_cipher_ops(tfm);
323 		break;
324 
325 	case CRYPTO_ALG_TYPE_COMPRESS:
326 		crypto_exit_compress_ops(tfm);
327 		break;
328 
329 	default:
330 		BUG();
331 	}
332 }
333 
crypto_ctxsize(struct crypto_alg * alg,u32 type,u32 mask)334 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
335 {
336 	const struct crypto_type *type_obj = alg->cra_type;
337 	unsigned int len;
338 
339 	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
340 	if (type_obj)
341 		return len + type_obj->ctxsize(alg, type, mask);
342 
343 	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
344 	default:
345 		BUG();
346 
347 	case CRYPTO_ALG_TYPE_CIPHER:
348 		len += crypto_cipher_ctxsize(alg);
349 		break;
350 
351 	case CRYPTO_ALG_TYPE_COMPRESS:
352 		len += crypto_compress_ctxsize(alg);
353 		break;
354 	}
355 
356 	return len;
357 }
358 
crypto_shoot_alg(struct crypto_alg * alg)359 static void crypto_shoot_alg(struct crypto_alg *alg)
360 {
361 	down_write(&crypto_alg_sem);
362 	alg->cra_flags |= CRYPTO_ALG_DYING;
363 	up_write(&crypto_alg_sem);
364 }
365 
__crypto_alloc_tfm(struct crypto_alg * alg,u32 type,u32 mask)366 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
367 				      u32 mask)
368 {
369 	struct crypto_tfm *tfm = NULL;
370 	unsigned int tfm_size;
371 	int err = -ENOMEM;
372 
373 	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
374 	tfm = kzalloc(tfm_size, GFP_KERNEL);
375 	if (tfm == NULL)
376 		goto out_err;
377 
378 	tfm->__crt_alg = alg;
379 
380 	err = crypto_init_ops(tfm, type, mask);
381 	if (err)
382 		goto out_free_tfm;
383 
384 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
385 		goto cra_init_failed;
386 
387 	goto out;
388 
389 cra_init_failed:
390 	crypto_exit_ops(tfm);
391 out_free_tfm:
392 	if (err == -EAGAIN)
393 		crypto_shoot_alg(alg);
394 	kfree(tfm);
395 out_err:
396 	tfm = ERR_PTR(err);
397 out:
398 	return tfm;
399 }
400 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
401 
402 /*
403  *	crypto_alloc_base - Locate algorithm and allocate transform
404  *	@alg_name: Name of algorithm
405  *	@type: Type of algorithm
406  *	@mask: Mask for type comparison
407  *
408  *	This function should not be used by new algorithm types.
409  *	Please use crypto_alloc_tfm instead.
410  *
411  *	crypto_alloc_base() will first attempt to locate an already loaded
412  *	algorithm.  If that fails and the kernel supports dynamically loadable
413  *	modules, it will then attempt to load a module of the same name or
414  *	alias.  If that fails it will send a query to any loaded crypto manager
415  *	to construct an algorithm on the fly.  A refcount is grabbed on the
416  *	algorithm which is then associated with the new transform.
417  *
418  *	The returned transform is of a non-determinate type.  Most people
419  *	should use one of the more specific allocation functions such as
420  *	crypto_alloc_blkcipher.
421  *
422  *	In case of error the return value is an error pointer.
423  */
crypto_alloc_base(const char * alg_name,u32 type,u32 mask)424 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
425 {
426 	struct crypto_tfm *tfm;
427 	int err;
428 
429 	for (;;) {
430 		struct crypto_alg *alg;
431 
432 		alg = crypto_alg_mod_lookup(alg_name, type, mask);
433 		if (IS_ERR(alg)) {
434 			err = PTR_ERR(alg);
435 			goto err;
436 		}
437 
438 		tfm = __crypto_alloc_tfm(alg, type, mask);
439 		if (!IS_ERR(tfm))
440 			return tfm;
441 
442 		crypto_mod_put(alg);
443 		err = PTR_ERR(tfm);
444 
445 err:
446 		if (err != -EAGAIN)
447 			break;
448 		if (fatal_signal_pending(current)) {
449 			err = -EINTR;
450 			break;
451 		}
452 	}
453 
454 	return ERR_PTR(err);
455 }
456 EXPORT_SYMBOL_GPL(crypto_alloc_base);
457 
crypto_create_tfm(struct crypto_alg * alg,const struct crypto_type * frontend)458 void *crypto_create_tfm(struct crypto_alg *alg,
459 			const struct crypto_type *frontend)
460 {
461 	char *mem;
462 	struct crypto_tfm *tfm = NULL;
463 	unsigned int tfmsize;
464 	unsigned int total;
465 	int err = -ENOMEM;
466 
467 	tfmsize = frontend->tfmsize;
468 	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
469 
470 	mem = kzalloc(total, GFP_KERNEL);
471 	if (mem == NULL)
472 		goto out_err;
473 
474 	tfm = (struct crypto_tfm *)(mem + tfmsize);
475 	tfm->__crt_alg = alg;
476 
477 	err = frontend->init_tfm(tfm);
478 	if (err)
479 		goto out_free_tfm;
480 
481 	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
482 		goto cra_init_failed;
483 
484 	goto out;
485 
486 cra_init_failed:
487 	crypto_exit_ops(tfm);
488 out_free_tfm:
489 	if (err == -EAGAIN)
490 		crypto_shoot_alg(alg);
491 	kfree(mem);
492 out_err:
493 	mem = ERR_PTR(err);
494 out:
495 	return mem;
496 }
497 EXPORT_SYMBOL_GPL(crypto_create_tfm);
498 
crypto_find_alg(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)499 struct crypto_alg *crypto_find_alg(const char *alg_name,
500 				   const struct crypto_type *frontend,
501 				   u32 type, u32 mask)
502 {
503 	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
504 		crypto_alg_mod_lookup;
505 
506 	if (frontend) {
507 		type &= frontend->maskclear;
508 		mask &= frontend->maskclear;
509 		type |= frontend->type;
510 		mask |= frontend->maskset;
511 
512 		if (frontend->lookup)
513 			lookup = frontend->lookup;
514 	}
515 
516 	return lookup(alg_name, type, mask);
517 }
518 EXPORT_SYMBOL_GPL(crypto_find_alg);
519 
520 /*
521  *	crypto_alloc_tfm - Locate algorithm and allocate transform
522  *	@alg_name: Name of algorithm
523  *	@frontend: Frontend algorithm type
524  *	@type: Type of algorithm
525  *	@mask: Mask for type comparison
526  *
527  *	crypto_alloc_tfm() will first attempt to locate an already loaded
528  *	algorithm.  If that fails and the kernel supports dynamically loadable
529  *	modules, it will then attempt to load a module of the same name or
530  *	alias.  If that fails it will send a query to any loaded crypto manager
531  *	to construct an algorithm on the fly.  A refcount is grabbed on the
532  *	algorithm which is then associated with the new transform.
533  *
534  *	The returned transform is of a non-determinate type.  Most people
535  *	should use one of the more specific allocation functions such as
536  *	crypto_alloc_blkcipher.
537  *
538  *	In case of error the return value is an error pointer.
539  */
crypto_alloc_tfm(const char * alg_name,const struct crypto_type * frontend,u32 type,u32 mask)540 void *crypto_alloc_tfm(const char *alg_name,
541 		       const struct crypto_type *frontend, u32 type, u32 mask)
542 {
543 	void *tfm;
544 	int err;
545 
546 	for (;;) {
547 		struct crypto_alg *alg;
548 
549 		alg = crypto_find_alg(alg_name, frontend, type, mask);
550 		if (IS_ERR(alg)) {
551 			err = PTR_ERR(alg);
552 			goto err;
553 		}
554 
555 		tfm = crypto_create_tfm(alg, frontend);
556 		if (!IS_ERR(tfm))
557 			return tfm;
558 
559 		crypto_mod_put(alg);
560 		err = PTR_ERR(tfm);
561 
562 err:
563 		if (err != -EAGAIN)
564 			break;
565 		if (fatal_signal_pending(current)) {
566 			err = -EINTR;
567 			break;
568 		}
569 	}
570 
571 	return ERR_PTR(err);
572 }
573 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
574 
575 /*
576  *	crypto_destroy_tfm - Free crypto transform
577  *	@mem: Start of tfm slab
578  *	@tfm: Transform to free
579  *
580  *	This function frees up the transform and any associated resources,
581  *	then drops the refcount on the associated algorithm.
582  */
crypto_destroy_tfm(void * mem,struct crypto_tfm * tfm)583 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
584 {
585 	struct crypto_alg *alg;
586 
587 	if (unlikely(!mem))
588 		return;
589 
590 	alg = tfm->__crt_alg;
591 
592 	if (!tfm->exit && alg->cra_exit)
593 		alg->cra_exit(tfm);
594 	crypto_exit_ops(tfm);
595 	crypto_mod_put(alg);
596 	kzfree(mem);
597 }
598 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
599 
crypto_has_alg(const char * name,u32 type,u32 mask)600 int crypto_has_alg(const char *name, u32 type, u32 mask)
601 {
602 	int ret = 0;
603 	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
604 
605 	if (!IS_ERR(alg)) {
606 		crypto_mod_put(alg);
607 		ret = 1;
608 	}
609 
610 	return ret;
611 }
612 EXPORT_SYMBOL_GPL(crypto_has_alg);
613 
crypto_req_done(struct crypto_async_request * req,int err)614 void crypto_req_done(struct crypto_async_request *req, int err)
615 {
616 	struct crypto_wait *wait = req->data;
617 
618 	if (err == -EINPROGRESS)
619 		return;
620 
621 	wait->err = err;
622 	complete(&wait->completion);
623 }
624 EXPORT_SYMBOL_GPL(crypto_req_done);
625 
626 MODULE_DESCRIPTION("Cryptographic core API");
627 MODULE_LICENSE("GPL");
628