• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * ESSIV skcipher and aead template for block encryption
4  *
5  * This template encapsulates the ESSIV IV generation algorithm used by
6  * dm-crypt and fscrypt, which converts the initial vector for the skcipher
7  * used for block encryption, by encrypting it using the hash of the
8  * skcipher key as encryption key. Usually, the input IV is a 64-bit sector
9  * number in LE representation zero-padded to the size of the IV, but this
10  * is not assumed by this driver.
11  *
12  * The typical use of this template is to instantiate the skcipher
13  * 'essiv(cbc(aes),sha256)', which is the only instantiation used by
14  * fscrypt, and the most relevant one for dm-crypt. However, dm-crypt
15  * also permits ESSIV to be used in combination with the authenc template,
16  * e.g., 'essiv(authenc(hmac(sha256),cbc(aes)),sha256)', in which case
17  * we need to instantiate an aead that accepts the same special key format
18  * as the authenc template, and deals with the way the encrypted IV is
19  * embedded into the AAD area of the aead request. This means the AEAD
20  * flavor produced by this template is tightly coupled to the way dm-crypt
21  * happens to use it.
22  *
23  * Copyright (c) 2019 Linaro, Ltd. <ard.biesheuvel@linaro.org>
24  *
25  * Heavily based on:
26  * adiantum length-preserving encryption mode
27  *
28  * Copyright 2018 Google LLC
29  */
30 
31 #include <crypto/authenc.h>
32 #include <crypto/internal/aead.h>
33 #include <crypto/internal/hash.h>
34 #include <crypto/internal/skcipher.h>
35 #include <crypto/scatterwalk.h>
36 #include <linux/module.h>
37 
38 #include "internal.h"
39 
40 struct essiv_instance_ctx {
41 	union {
42 		struct crypto_skcipher_spawn	skcipher_spawn;
43 		struct crypto_aead_spawn	aead_spawn;
44 	} u;
45 	char	essiv_cipher_name[CRYPTO_MAX_ALG_NAME];
46 	char	shash_driver_name[CRYPTO_MAX_ALG_NAME];
47 };
48 
49 struct essiv_tfm_ctx {
50 	union {
51 		struct crypto_skcipher	*skcipher;
52 		struct crypto_aead	*aead;
53 	} u;
54 	struct crypto_cipher		*essiv_cipher;
55 	struct crypto_shash		*hash;
56 	int				ivoffset;
57 };
58 
59 struct essiv_aead_request_ctx {
60 	struct scatterlist		sg[4];
61 	u8				*assoc;
62 	struct aead_request		aead_req;
63 };
64 
essiv_skcipher_setkey(struct crypto_skcipher * tfm,const u8 * key,unsigned int keylen)65 static int essiv_skcipher_setkey(struct crypto_skcipher *tfm,
66 				 const u8 *key, unsigned int keylen)
67 {
68 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
69 	SHASH_DESC_ON_STACK(desc, tctx->hash);
70 	u8 salt[HASH_MAX_DIGESTSIZE];
71 	int err;
72 
73 	crypto_skcipher_clear_flags(tctx->u.skcipher, CRYPTO_TFM_REQ_MASK);
74 	crypto_skcipher_set_flags(tctx->u.skcipher,
75 				  crypto_skcipher_get_flags(tfm) &
76 				  CRYPTO_TFM_REQ_MASK);
77 	err = crypto_skcipher_setkey(tctx->u.skcipher, key, keylen);
78 	crypto_skcipher_set_flags(tfm,
79 				  crypto_skcipher_get_flags(tctx->u.skcipher) &
80 				  CRYPTO_TFM_RES_MASK);
81 	if (err)
82 		return err;
83 
84 	desc->tfm = tctx->hash;
85 	err = crypto_shash_digest(desc, key, keylen, salt);
86 	if (err)
87 		return err;
88 
89 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
90 	crypto_cipher_set_flags(tctx->essiv_cipher,
91 				crypto_skcipher_get_flags(tfm) &
92 				CRYPTO_TFM_REQ_MASK);
93 	err = crypto_cipher_setkey(tctx->essiv_cipher, salt,
94 				   crypto_shash_digestsize(tctx->hash));
95 	crypto_skcipher_set_flags(tfm,
96 				  crypto_cipher_get_flags(tctx->essiv_cipher) &
97 				  CRYPTO_TFM_RES_MASK);
98 
99 	return err;
100 }
101 
essiv_aead_setkey(struct crypto_aead * tfm,const u8 * key,unsigned int keylen)102 static int essiv_aead_setkey(struct crypto_aead *tfm, const u8 *key,
103 			     unsigned int keylen)
104 {
105 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
106 	SHASH_DESC_ON_STACK(desc, tctx->hash);
107 	struct crypto_authenc_keys keys;
108 	u8 salt[HASH_MAX_DIGESTSIZE];
109 	int err;
110 
111 	crypto_aead_clear_flags(tctx->u.aead, CRYPTO_TFM_REQ_MASK);
112 	crypto_aead_set_flags(tctx->u.aead, crypto_aead_get_flags(tfm) &
113 					    CRYPTO_TFM_REQ_MASK);
114 	err = crypto_aead_setkey(tctx->u.aead, key, keylen);
115 	crypto_aead_set_flags(tfm, crypto_aead_get_flags(tctx->u.aead) &
116 				   CRYPTO_TFM_RES_MASK);
117 	if (err)
118 		return err;
119 
120 	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0) {
121 		crypto_aead_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
122 		return -EINVAL;
123 	}
124 
125 	desc->tfm = tctx->hash;
126 	err = crypto_shash_init(desc) ?:
127 	      crypto_shash_update(desc, keys.enckey, keys.enckeylen) ?:
128 	      crypto_shash_finup(desc, keys.authkey, keys.authkeylen, salt);
129 	if (err)
130 		return err;
131 
132 	crypto_cipher_clear_flags(tctx->essiv_cipher, CRYPTO_TFM_REQ_MASK);
133 	crypto_cipher_set_flags(tctx->essiv_cipher, crypto_aead_get_flags(tfm) &
134 						    CRYPTO_TFM_REQ_MASK);
135 	err = crypto_cipher_setkey(tctx->essiv_cipher, salt,
136 				   crypto_shash_digestsize(tctx->hash));
137 	crypto_aead_set_flags(tfm, crypto_cipher_get_flags(tctx->essiv_cipher) &
138 				   CRYPTO_TFM_RES_MASK);
139 
140 	return err;
141 }
142 
essiv_aead_setauthsize(struct crypto_aead * tfm,unsigned int authsize)143 static int essiv_aead_setauthsize(struct crypto_aead *tfm,
144 				  unsigned int authsize)
145 {
146 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
147 
148 	return crypto_aead_setauthsize(tctx->u.aead, authsize);
149 }
150 
essiv_skcipher_done(struct crypto_async_request * areq,int err)151 static void essiv_skcipher_done(struct crypto_async_request *areq, int err)
152 {
153 	struct skcipher_request *req = areq->data;
154 
155 	skcipher_request_complete(req, err);
156 }
157 
essiv_skcipher_crypt(struct skcipher_request * req,bool enc)158 static int essiv_skcipher_crypt(struct skcipher_request *req, bool enc)
159 {
160 	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
161 	const struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
162 	struct skcipher_request *subreq = skcipher_request_ctx(req);
163 
164 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
165 
166 	skcipher_request_set_tfm(subreq, tctx->u.skcipher);
167 	skcipher_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
168 				   req->iv);
169 	skcipher_request_set_callback(subreq, skcipher_request_flags(req),
170 				      essiv_skcipher_done, req);
171 
172 	return enc ? crypto_skcipher_encrypt(subreq) :
173 		     crypto_skcipher_decrypt(subreq);
174 }
175 
essiv_skcipher_encrypt(struct skcipher_request * req)176 static int essiv_skcipher_encrypt(struct skcipher_request *req)
177 {
178 	return essiv_skcipher_crypt(req, true);
179 }
180 
essiv_skcipher_decrypt(struct skcipher_request * req)181 static int essiv_skcipher_decrypt(struct skcipher_request *req)
182 {
183 	return essiv_skcipher_crypt(req, false);
184 }
185 
essiv_aead_done(struct crypto_async_request * areq,int err)186 static void essiv_aead_done(struct crypto_async_request *areq, int err)
187 {
188 	struct aead_request *req = areq->data;
189 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
190 
191 	if (err == -EINPROGRESS)
192 		goto out;
193 
194 	kfree(rctx->assoc);
195 
196 out:
197 	aead_request_complete(req, err);
198 }
199 
essiv_aead_crypt(struct aead_request * req,bool enc)200 static int essiv_aead_crypt(struct aead_request *req, bool enc)
201 {
202 	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
203 	const struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
204 	struct essiv_aead_request_ctx *rctx = aead_request_ctx(req);
205 	struct aead_request *subreq = &rctx->aead_req;
206 	struct scatterlist *src = req->src;
207 	int err;
208 
209 	crypto_cipher_encrypt_one(tctx->essiv_cipher, req->iv, req->iv);
210 
211 	/*
212 	 * dm-crypt embeds the sector number and the IV in the AAD region, so
213 	 * we have to copy the converted IV into the right scatterlist before
214 	 * we pass it on.
215 	 */
216 	rctx->assoc = NULL;
217 	if (req->src == req->dst || !enc) {
218 		scatterwalk_map_and_copy(req->iv, req->dst,
219 					 req->assoclen - crypto_aead_ivsize(tfm),
220 					 crypto_aead_ivsize(tfm), 1);
221 	} else {
222 		u8 *iv = (u8 *)aead_request_ctx(req) + tctx->ivoffset;
223 		int ivsize = crypto_aead_ivsize(tfm);
224 		int ssize = req->assoclen - ivsize;
225 		struct scatterlist *sg;
226 		int nents;
227 
228 		if (ssize < 0)
229 			return -EINVAL;
230 
231 		nents = sg_nents_for_len(req->src, ssize);
232 		if (nents < 0)
233 			return -EINVAL;
234 
235 		memcpy(iv, req->iv, ivsize);
236 		sg_init_table(rctx->sg, 4);
237 
238 		if (unlikely(nents > 1)) {
239 			/*
240 			 * This is a case that rarely occurs in practice, but
241 			 * for correctness, we have to deal with it nonetheless.
242 			 */
243 			rctx->assoc = kmalloc(ssize, GFP_ATOMIC);
244 			if (!rctx->assoc)
245 				return -ENOMEM;
246 
247 			scatterwalk_map_and_copy(rctx->assoc, req->src, 0,
248 						 ssize, 0);
249 			sg_set_buf(rctx->sg, rctx->assoc, ssize);
250 		} else {
251 			sg_set_page(rctx->sg, sg_page(req->src), ssize,
252 				    req->src->offset);
253 		}
254 
255 		sg_set_buf(rctx->sg + 1, iv, ivsize);
256 		sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen);
257 		if (sg != rctx->sg + 2)
258 			sg_chain(rctx->sg, 3, sg);
259 
260 		src = rctx->sg;
261 	}
262 
263 	aead_request_set_tfm(subreq, tctx->u.aead);
264 	aead_request_set_ad(subreq, req->assoclen);
265 	aead_request_set_callback(subreq, aead_request_flags(req),
266 				  essiv_aead_done, req);
267 	aead_request_set_crypt(subreq, src, req->dst, req->cryptlen, req->iv);
268 
269 	err = enc ? crypto_aead_encrypt(subreq) :
270 		    crypto_aead_decrypt(subreq);
271 
272 	if (rctx->assoc && err != -EINPROGRESS && err != -EBUSY)
273 		kfree(rctx->assoc);
274 	return err;
275 }
276 
essiv_aead_encrypt(struct aead_request * req)277 static int essiv_aead_encrypt(struct aead_request *req)
278 {
279 	return essiv_aead_crypt(req, true);
280 }
281 
essiv_aead_decrypt(struct aead_request * req)282 static int essiv_aead_decrypt(struct aead_request *req)
283 {
284 	return essiv_aead_crypt(req, false);
285 }
286 
essiv_init_tfm(struct essiv_instance_ctx * ictx,struct essiv_tfm_ctx * tctx)287 static int essiv_init_tfm(struct essiv_instance_ctx *ictx,
288 			  struct essiv_tfm_ctx *tctx)
289 {
290 	struct crypto_cipher *essiv_cipher;
291 	struct crypto_shash *hash;
292 	int err;
293 
294 	essiv_cipher = crypto_alloc_cipher(ictx->essiv_cipher_name, 0, 0);
295 	if (IS_ERR(essiv_cipher))
296 		return PTR_ERR(essiv_cipher);
297 
298 	hash = crypto_alloc_shash(ictx->shash_driver_name, 0, 0);
299 	if (IS_ERR(hash)) {
300 		err = PTR_ERR(hash);
301 		goto err_free_essiv_cipher;
302 	}
303 
304 	tctx->essiv_cipher = essiv_cipher;
305 	tctx->hash = hash;
306 
307 	return 0;
308 
309 err_free_essiv_cipher:
310 	crypto_free_cipher(essiv_cipher);
311 	return err;
312 }
313 
essiv_skcipher_init_tfm(struct crypto_skcipher * tfm)314 static int essiv_skcipher_init_tfm(struct crypto_skcipher *tfm)
315 {
316 	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
317 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
318 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
319 	struct crypto_skcipher *skcipher;
320 	int err;
321 
322 	skcipher = crypto_spawn_skcipher(&ictx->u.skcipher_spawn);
323 	if (IS_ERR(skcipher))
324 		return PTR_ERR(skcipher);
325 
326 	crypto_skcipher_set_reqsize(tfm, sizeof(struct skcipher_request) +
327 				         crypto_skcipher_reqsize(skcipher));
328 
329 	err = essiv_init_tfm(ictx, tctx);
330 	if (err) {
331 		crypto_free_skcipher(skcipher);
332 		return err;
333 	}
334 
335 	tctx->u.skcipher = skcipher;
336 	return 0;
337 }
338 
essiv_aead_init_tfm(struct crypto_aead * tfm)339 static int essiv_aead_init_tfm(struct crypto_aead *tfm)
340 {
341 	struct aead_instance *inst = aead_alg_instance(tfm);
342 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
343 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
344 	struct crypto_aead *aead;
345 	unsigned int subreq_size;
346 	int err;
347 
348 	BUILD_BUG_ON(offsetofend(struct essiv_aead_request_ctx, aead_req) !=
349 		     sizeof(struct essiv_aead_request_ctx));
350 
351 	aead = crypto_spawn_aead(&ictx->u.aead_spawn);
352 	if (IS_ERR(aead))
353 		return PTR_ERR(aead);
354 
355 	subreq_size = FIELD_SIZEOF(struct essiv_aead_request_ctx, aead_req) +
356 		      crypto_aead_reqsize(aead);
357 
358 	tctx->ivoffset = offsetof(struct essiv_aead_request_ctx, aead_req) +
359 			 subreq_size;
360 	crypto_aead_set_reqsize(tfm, tctx->ivoffset + crypto_aead_ivsize(aead));
361 
362 	err = essiv_init_tfm(ictx, tctx);
363 	if (err) {
364 		crypto_free_aead(aead);
365 		return err;
366 	}
367 
368 	tctx->u.aead = aead;
369 	return 0;
370 }
371 
essiv_skcipher_exit_tfm(struct crypto_skcipher * tfm)372 static void essiv_skcipher_exit_tfm(struct crypto_skcipher *tfm)
373 {
374 	struct essiv_tfm_ctx *tctx = crypto_skcipher_ctx(tfm);
375 
376 	crypto_free_skcipher(tctx->u.skcipher);
377 	crypto_free_cipher(tctx->essiv_cipher);
378 	crypto_free_shash(tctx->hash);
379 }
380 
essiv_aead_exit_tfm(struct crypto_aead * tfm)381 static void essiv_aead_exit_tfm(struct crypto_aead *tfm)
382 {
383 	struct essiv_tfm_ctx *tctx = crypto_aead_ctx(tfm);
384 
385 	crypto_free_aead(tctx->u.aead);
386 	crypto_free_cipher(tctx->essiv_cipher);
387 	crypto_free_shash(tctx->hash);
388 }
389 
essiv_skcipher_free_instance(struct skcipher_instance * inst)390 static void essiv_skcipher_free_instance(struct skcipher_instance *inst)
391 {
392 	struct essiv_instance_ctx *ictx = skcipher_instance_ctx(inst);
393 
394 	crypto_drop_skcipher(&ictx->u.skcipher_spawn);
395 	kfree(inst);
396 }
397 
essiv_aead_free_instance(struct aead_instance * inst)398 static void essiv_aead_free_instance(struct aead_instance *inst)
399 {
400 	struct essiv_instance_ctx *ictx = aead_instance_ctx(inst);
401 
402 	crypto_drop_aead(&ictx->u.aead_spawn);
403 	kfree(inst);
404 }
405 
parse_cipher_name(char * essiv_cipher_name,const char * cra_name)406 static bool parse_cipher_name(char *essiv_cipher_name, const char *cra_name)
407 {
408 	const char *p, *q;
409 	int len;
410 
411 	/* find the last opening parens */
412 	p = strrchr(cra_name, '(');
413 	if (!p++)
414 		return false;
415 
416 	/* find the first closing parens in the tail of the string */
417 	q = strchr(p, ')');
418 	if (!q)
419 		return false;
420 
421 	len = q - p;
422 	if (len >= CRYPTO_MAX_ALG_NAME)
423 		return false;
424 
425 	memcpy(essiv_cipher_name, p, len);
426 	essiv_cipher_name[len] = '\0';
427 	return true;
428 }
429 
essiv_supported_algorithms(const char * essiv_cipher_name,struct shash_alg * hash_alg,int ivsize)430 static bool essiv_supported_algorithms(const char *essiv_cipher_name,
431 				       struct shash_alg *hash_alg,
432 				       int ivsize)
433 {
434 	struct crypto_alg *alg;
435 	bool ret = false;
436 
437 	alg = crypto_alg_mod_lookup(essiv_cipher_name,
438 				    CRYPTO_ALG_TYPE_CIPHER,
439 				    CRYPTO_ALG_TYPE_MASK);
440 	if (IS_ERR(alg))
441 		return false;
442 
443 	if (hash_alg->digestsize < alg->cra_cipher.cia_min_keysize ||
444 	    hash_alg->digestsize > alg->cra_cipher.cia_max_keysize)
445 		goto out;
446 
447 	if (ivsize != alg->cra_blocksize)
448 		goto out;
449 
450 	if (crypto_shash_alg_has_setkey(hash_alg))
451 		goto out;
452 
453 	ret = true;
454 
455 out:
456 	crypto_mod_put(alg);
457 	return ret;
458 }
459 
essiv_create(struct crypto_template * tmpl,struct rtattr ** tb)460 static int essiv_create(struct crypto_template *tmpl, struct rtattr **tb)
461 {
462 	struct crypto_attr_type *algt;
463 	const char *inner_cipher_name;
464 	const char *shash_name;
465 	struct skcipher_instance *skcipher_inst = NULL;
466 	struct aead_instance *aead_inst = NULL;
467 	struct crypto_instance *inst;
468 	struct crypto_alg *base, *block_base;
469 	struct essiv_instance_ctx *ictx;
470 	struct skcipher_alg *skcipher_alg = NULL;
471 	struct aead_alg *aead_alg = NULL;
472 	struct crypto_alg *_hash_alg;
473 	struct shash_alg *hash_alg;
474 	int ivsize;
475 	u32 type;
476 	int err;
477 
478 	algt = crypto_get_attr_type(tb);
479 	if (IS_ERR(algt))
480 		return PTR_ERR(algt);
481 
482 	inner_cipher_name = crypto_attr_alg_name(tb[1]);
483 	if (IS_ERR(inner_cipher_name))
484 		return PTR_ERR(inner_cipher_name);
485 
486 	shash_name = crypto_attr_alg_name(tb[2]);
487 	if (IS_ERR(shash_name))
488 		return PTR_ERR(shash_name);
489 
490 	type = algt->type & algt->mask;
491 
492 	switch (type) {
493 	case CRYPTO_ALG_TYPE_BLKCIPHER:
494 		skcipher_inst = kzalloc(sizeof(*skcipher_inst) +
495 					sizeof(*ictx), GFP_KERNEL);
496 		if (!skcipher_inst)
497 			return -ENOMEM;
498 		inst = skcipher_crypto_instance(skcipher_inst);
499 		base = &skcipher_inst->alg.base;
500 		ictx = crypto_instance_ctx(inst);
501 
502 		/* Symmetric cipher, e.g., "cbc(aes)" */
503 		crypto_set_skcipher_spawn(&ictx->u.skcipher_spawn, inst);
504 		err = crypto_grab_skcipher(&ictx->u.skcipher_spawn,
505 					   inner_cipher_name, 0,
506 					   crypto_requires_sync(algt->type,
507 								algt->mask));
508 		if (err)
509 			goto out_free_inst;
510 		skcipher_alg = crypto_spawn_skcipher_alg(&ictx->u.skcipher_spawn);
511 		block_base = &skcipher_alg->base;
512 		ivsize = crypto_skcipher_alg_ivsize(skcipher_alg);
513 		break;
514 
515 	case CRYPTO_ALG_TYPE_AEAD:
516 		aead_inst = kzalloc(sizeof(*aead_inst) +
517 				    sizeof(*ictx), GFP_KERNEL);
518 		if (!aead_inst)
519 			return -ENOMEM;
520 		inst = aead_crypto_instance(aead_inst);
521 		base = &aead_inst->alg.base;
522 		ictx = crypto_instance_ctx(inst);
523 
524 		/* AEAD cipher, e.g., "authenc(hmac(sha256),cbc(aes))" */
525 		crypto_set_aead_spawn(&ictx->u.aead_spawn, inst);
526 		err = crypto_grab_aead(&ictx->u.aead_spawn,
527 				       inner_cipher_name, 0,
528 				       crypto_requires_sync(algt->type,
529 							    algt->mask));
530 		if (err)
531 			goto out_free_inst;
532 		aead_alg = crypto_spawn_aead_alg(&ictx->u.aead_spawn);
533 		block_base = &aead_alg->base;
534 		if (!strstarts(block_base->cra_name, "authenc(")) {
535 			pr_warn("Only authenc() type AEADs are supported by ESSIV\n");
536 			err = -EINVAL;
537 			goto out_drop_skcipher;
538 		}
539 		ivsize = aead_alg->ivsize;
540 		break;
541 
542 	default:
543 		return -EINVAL;
544 	}
545 
546 	if (!parse_cipher_name(ictx->essiv_cipher_name, block_base->cra_name)) {
547 		pr_warn("Failed to parse ESSIV cipher name from skcipher cra_name\n");
548 		err = -EINVAL;
549 		goto out_drop_skcipher;
550 	}
551 
552 	/* Synchronous hash, e.g., "sha256" */
553 	_hash_alg = crypto_alg_mod_lookup(shash_name,
554 					  CRYPTO_ALG_TYPE_SHASH,
555 					  CRYPTO_ALG_TYPE_MASK);
556 	if (IS_ERR(_hash_alg)) {
557 		err = PTR_ERR(_hash_alg);
558 		goto out_drop_skcipher;
559 	}
560 	hash_alg = __crypto_shash_alg(_hash_alg);
561 
562 	/* Check the set of algorithms */
563 	if (!essiv_supported_algorithms(ictx->essiv_cipher_name, hash_alg,
564 					ivsize)) {
565 		pr_warn("Unsupported essiv instantiation: essiv(%s,%s)\n",
566 			block_base->cra_name, hash_alg->base.cra_name);
567 		err = -EINVAL;
568 		goto out_free_hash;
569 	}
570 
571 	/* record the driver name so we can instantiate this exact algo later */
572 	strlcpy(ictx->shash_driver_name, hash_alg->base.cra_driver_name,
573 		CRYPTO_MAX_ALG_NAME);
574 
575 	/* Instance fields */
576 
577 	err = -ENAMETOOLONG;
578 	if (snprintf(base->cra_name, CRYPTO_MAX_ALG_NAME,
579 		     "essiv(%s,%s)", block_base->cra_name,
580 		     hash_alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
581 		goto out_free_hash;
582 	if (snprintf(base->cra_driver_name, CRYPTO_MAX_ALG_NAME,
583 		     "essiv(%s,%s)", block_base->cra_driver_name,
584 		     hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
585 		goto out_free_hash;
586 
587 	base->cra_flags		= block_base->cra_flags & CRYPTO_ALG_ASYNC;
588 	base->cra_blocksize	= block_base->cra_blocksize;
589 	base->cra_ctxsize	= sizeof(struct essiv_tfm_ctx);
590 	base->cra_alignmask	= block_base->cra_alignmask;
591 	base->cra_priority	= block_base->cra_priority;
592 
593 	if (type == CRYPTO_ALG_TYPE_BLKCIPHER) {
594 		skcipher_inst->alg.setkey	= essiv_skcipher_setkey;
595 		skcipher_inst->alg.encrypt	= essiv_skcipher_encrypt;
596 		skcipher_inst->alg.decrypt	= essiv_skcipher_decrypt;
597 		skcipher_inst->alg.init		= essiv_skcipher_init_tfm;
598 		skcipher_inst->alg.exit		= essiv_skcipher_exit_tfm;
599 
600 		skcipher_inst->alg.min_keysize	= crypto_skcipher_alg_min_keysize(skcipher_alg);
601 		skcipher_inst->alg.max_keysize	= crypto_skcipher_alg_max_keysize(skcipher_alg);
602 		skcipher_inst->alg.ivsize	= ivsize;
603 		skcipher_inst->alg.chunksize	= crypto_skcipher_alg_chunksize(skcipher_alg);
604 		skcipher_inst->alg.walksize	= crypto_skcipher_alg_walksize(skcipher_alg);
605 
606 		skcipher_inst->free		= essiv_skcipher_free_instance;
607 
608 		err = skcipher_register_instance(tmpl, skcipher_inst);
609 	} else {
610 		aead_inst->alg.setkey		= essiv_aead_setkey;
611 		aead_inst->alg.setauthsize	= essiv_aead_setauthsize;
612 		aead_inst->alg.encrypt		= essiv_aead_encrypt;
613 		aead_inst->alg.decrypt		= essiv_aead_decrypt;
614 		aead_inst->alg.init		= essiv_aead_init_tfm;
615 		aead_inst->alg.exit		= essiv_aead_exit_tfm;
616 
617 		aead_inst->alg.ivsize		= ivsize;
618 		aead_inst->alg.maxauthsize	= crypto_aead_alg_maxauthsize(aead_alg);
619 		aead_inst->alg.chunksize	= crypto_aead_alg_chunksize(aead_alg);
620 
621 		aead_inst->free			= essiv_aead_free_instance;
622 
623 		err = aead_register_instance(tmpl, aead_inst);
624 	}
625 
626 	if (err)
627 		goto out_free_hash;
628 
629 	crypto_mod_put(_hash_alg);
630 	return 0;
631 
632 out_free_hash:
633 	crypto_mod_put(_hash_alg);
634 out_drop_skcipher:
635 	if (type == CRYPTO_ALG_TYPE_BLKCIPHER)
636 		crypto_drop_skcipher(&ictx->u.skcipher_spawn);
637 	else
638 		crypto_drop_aead(&ictx->u.aead_spawn);
639 out_free_inst:
640 	kfree(skcipher_inst);
641 	kfree(aead_inst);
642 	return err;
643 }
644 
645 /* essiv(cipher_name, shash_name) */
646 static struct crypto_template essiv_tmpl = {
647 	.name	= "essiv",
648 	.create	= essiv_create,
649 	.module	= THIS_MODULE,
650 };
651 
essiv_module_init(void)652 static int __init essiv_module_init(void)
653 {
654 	return crypto_register_template(&essiv_tmpl);
655 }
656 
essiv_module_exit(void)657 static void __exit essiv_module_exit(void)
658 {
659 	crypto_unregister_template(&essiv_tmpl);
660 }
661 
662 subsys_initcall(essiv_module_init);
663 module_exit(essiv_module_exit);
664 
665 MODULE_DESCRIPTION("ESSIV skcipher/aead wrapper for block encryption");
666 MODULE_LICENSE("GPL v2");
667 MODULE_ALIAS_CRYPTO("essiv");
668