• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * pcrypt - Parallel crypto wrapper.
4  *
5  * Copyright (C) 2009 secunet Security Networks AG
6  * Copyright (C) 2009 Steffen Klassert <steffen.klassert@secunet.com>
7  */
8 
9 #include <crypto/algapi.h>
10 #include <crypto/internal/aead.h>
11 #include <linux/atomic.h>
12 #include <linux/err.h>
13 #include <linux/init.h>
14 #include <linux/module.h>
15 #include <linux/slab.h>
16 #include <linux/notifier.h>
17 #include <linux/kobject.h>
18 #include <linux/cpu.h>
19 #include <crypto/pcrypt.h>
20 
21 static struct padata_instance *pencrypt;
22 static struct padata_instance *pdecrypt;
23 static struct kset           *pcrypt_kset;
24 
25 struct pcrypt_instance_ctx {
26 	struct crypto_aead_spawn spawn;
27 	struct padata_shell *psenc;
28 	struct padata_shell *psdec;
29 	atomic_t tfm_count;
30 };
31 
32 struct pcrypt_aead_ctx {
33 	struct crypto_aead *child;
34 	unsigned int cb_cpu;
35 };
36 
pcrypt_tfm_ictx(struct crypto_aead * tfm)37 static inline struct pcrypt_instance_ctx *pcrypt_tfm_ictx(
38 	struct crypto_aead *tfm)
39 {
40 	return aead_instance_ctx(aead_alg_instance(tfm));
41 }
42 
pcrypt_aead_setkey(struct crypto_aead * parent,const u8 * key,unsigned int keylen)43 static int pcrypt_aead_setkey(struct crypto_aead *parent,
44 			      const u8 *key, unsigned int keylen)
45 {
46 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
47 
48 	return crypto_aead_setkey(ctx->child, key, keylen);
49 }
50 
pcrypt_aead_setauthsize(struct crypto_aead * parent,unsigned int authsize)51 static int pcrypt_aead_setauthsize(struct crypto_aead *parent,
52 				   unsigned int authsize)
53 {
54 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(parent);
55 
56 	return crypto_aead_setauthsize(ctx->child, authsize);
57 }
58 
pcrypt_aead_serial(struct padata_priv * padata)59 static void pcrypt_aead_serial(struct padata_priv *padata)
60 {
61 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
62 	struct aead_request *req = pcrypt_request_ctx(preq);
63 
64 	aead_request_complete(req->base.data, padata->info);
65 }
66 
pcrypt_aead_done(struct crypto_async_request * areq,int err)67 static void pcrypt_aead_done(struct crypto_async_request *areq, int err)
68 {
69 	struct aead_request *req = areq->data;
70 	struct pcrypt_request *preq = aead_request_ctx(req);
71 	struct padata_priv *padata = pcrypt_request_padata(preq);
72 
73 	padata->info = err;
74 
75 	padata_do_serial(padata);
76 }
77 
pcrypt_aead_enc(struct padata_priv * padata)78 static void pcrypt_aead_enc(struct padata_priv *padata)
79 {
80 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
81 	struct aead_request *req = pcrypt_request_ctx(preq);
82 	int ret;
83 
84 	ret = crypto_aead_encrypt(req);
85 
86 	if (ret == -EINPROGRESS)
87 		return;
88 
89 	padata->info = ret;
90 	padata_do_serial(padata);
91 }
92 
pcrypt_aead_encrypt(struct aead_request * req)93 static int pcrypt_aead_encrypt(struct aead_request *req)
94 {
95 	int err;
96 	struct pcrypt_request *preq = aead_request_ctx(req);
97 	struct aead_request *creq = pcrypt_request_ctx(preq);
98 	struct padata_priv *padata = pcrypt_request_padata(preq);
99 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
100 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
101 	u32 flags = aead_request_flags(req);
102 	struct pcrypt_instance_ctx *ictx;
103 
104 	ictx = pcrypt_tfm_ictx(aead);
105 
106 	memset(padata, 0, sizeof(struct padata_priv));
107 
108 	padata->parallel = pcrypt_aead_enc;
109 	padata->serial = pcrypt_aead_serial;
110 
111 	aead_request_set_tfm(creq, ctx->child);
112 	aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
113 				  pcrypt_aead_done, req);
114 	aead_request_set_crypt(creq, req->src, req->dst,
115 			       req->cryptlen, req->iv);
116 	aead_request_set_ad(creq, req->assoclen);
117 
118 	err = padata_do_parallel(ictx->psenc, padata, &ctx->cb_cpu);
119 	if (!err)
120 		return -EINPROGRESS;
121 	if (err == -EBUSY)
122 		return -EAGAIN;
123 
124 	return err;
125 }
126 
pcrypt_aead_dec(struct padata_priv * padata)127 static void pcrypt_aead_dec(struct padata_priv *padata)
128 {
129 	struct pcrypt_request *preq = pcrypt_padata_request(padata);
130 	struct aead_request *req = pcrypt_request_ctx(preq);
131 	int ret;
132 
133 	ret = crypto_aead_decrypt(req);
134 
135 	if (ret == -EINPROGRESS)
136 		return;
137 
138 	padata->info = ret;
139 	padata_do_serial(padata);
140 }
141 
pcrypt_aead_decrypt(struct aead_request * req)142 static int pcrypt_aead_decrypt(struct aead_request *req)
143 {
144 	int err;
145 	struct pcrypt_request *preq = aead_request_ctx(req);
146 	struct aead_request *creq = pcrypt_request_ctx(preq);
147 	struct padata_priv *padata = pcrypt_request_padata(preq);
148 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
149 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(aead);
150 	u32 flags = aead_request_flags(req);
151 	struct pcrypt_instance_ctx *ictx;
152 
153 	ictx = pcrypt_tfm_ictx(aead);
154 
155 	memset(padata, 0, sizeof(struct padata_priv));
156 
157 	padata->parallel = pcrypt_aead_dec;
158 	padata->serial = pcrypt_aead_serial;
159 
160 	aead_request_set_tfm(creq, ctx->child);
161 	aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP,
162 				  pcrypt_aead_done, req);
163 	aead_request_set_crypt(creq, req->src, req->dst,
164 			       req->cryptlen, req->iv);
165 	aead_request_set_ad(creq, req->assoclen);
166 
167 	err = padata_do_parallel(ictx->psdec, padata, &ctx->cb_cpu);
168 	if (!err)
169 		return -EINPROGRESS;
170 	if (err == -EBUSY)
171 		return -EAGAIN;
172 
173 	return err;
174 }
175 
pcrypt_aead_init_tfm(struct crypto_aead * tfm)176 static int pcrypt_aead_init_tfm(struct crypto_aead *tfm)
177 {
178 	int cpu, cpu_index;
179 	struct aead_instance *inst = aead_alg_instance(tfm);
180 	struct pcrypt_instance_ctx *ictx = aead_instance_ctx(inst);
181 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
182 	struct crypto_aead *cipher;
183 
184 	cpu_index = (unsigned int)atomic_inc_return(&ictx->tfm_count) %
185 		    cpumask_weight(cpu_online_mask);
186 
187 	ctx->cb_cpu = cpumask_first(cpu_online_mask);
188 	for (cpu = 0; cpu < cpu_index; cpu++)
189 		ctx->cb_cpu = cpumask_next(ctx->cb_cpu, cpu_online_mask);
190 
191 	cipher = crypto_spawn_aead(&ictx->spawn);
192 
193 	if (IS_ERR(cipher))
194 		return PTR_ERR(cipher);
195 
196 	ctx->child = cipher;
197 	crypto_aead_set_reqsize(tfm, sizeof(struct pcrypt_request) +
198 				     sizeof(struct aead_request) +
199 				     crypto_aead_reqsize(cipher));
200 
201 	return 0;
202 }
203 
pcrypt_aead_exit_tfm(struct crypto_aead * tfm)204 static void pcrypt_aead_exit_tfm(struct crypto_aead *tfm)
205 {
206 	struct pcrypt_aead_ctx *ctx = crypto_aead_ctx(tfm);
207 
208 	crypto_free_aead(ctx->child);
209 }
210 
pcrypt_free(struct aead_instance * inst)211 static void pcrypt_free(struct aead_instance *inst)
212 {
213 	struct pcrypt_instance_ctx *ctx = aead_instance_ctx(inst);
214 
215 	crypto_drop_aead(&ctx->spawn);
216 	padata_free_shell(ctx->psdec);
217 	padata_free_shell(ctx->psenc);
218 	kfree(inst);
219 }
220 
pcrypt_init_instance(struct crypto_instance * inst,struct crypto_alg * alg)221 static int pcrypt_init_instance(struct crypto_instance *inst,
222 				struct crypto_alg *alg)
223 {
224 	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME,
225 		     "pcrypt(%s)", alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
226 		return -ENAMETOOLONG;
227 
228 	memcpy(inst->alg.cra_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
229 
230 	inst->alg.cra_priority = alg->cra_priority + 100;
231 	inst->alg.cra_blocksize = alg->cra_blocksize;
232 	inst->alg.cra_alignmask = alg->cra_alignmask;
233 
234 	return 0;
235 }
236 
pcrypt_create_aead(struct crypto_template * tmpl,struct rtattr ** tb,u32 type,u32 mask)237 static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb,
238 			      u32 type, u32 mask)
239 {
240 	struct pcrypt_instance_ctx *ctx;
241 	struct crypto_attr_type *algt;
242 	struct aead_instance *inst;
243 	struct aead_alg *alg;
244 	const char *name;
245 	int err;
246 
247 	algt = crypto_get_attr_type(tb);
248 	if (IS_ERR(algt))
249 		return PTR_ERR(algt);
250 
251 	name = crypto_attr_alg_name(tb[1]);
252 	if (IS_ERR(name))
253 		return PTR_ERR(name);
254 
255 	inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
256 	if (!inst)
257 		return -ENOMEM;
258 
259 	err = -ENOMEM;
260 
261 	ctx = aead_instance_ctx(inst);
262 	ctx->psenc = padata_alloc_shell(pencrypt);
263 	if (!ctx->psenc)
264 		goto out_free_inst;
265 
266 	ctx->psdec = padata_alloc_shell(pdecrypt);
267 	if (!ctx->psdec)
268 		goto out_free_psenc;
269 
270 	crypto_set_aead_spawn(&ctx->spawn, aead_crypto_instance(inst));
271 
272 	err = crypto_grab_aead(&ctx->spawn, name, 0, 0);
273 	if (err)
274 		goto out_free_psdec;
275 
276 	alg = crypto_spawn_aead_alg(&ctx->spawn);
277 	err = pcrypt_init_instance(aead_crypto_instance(inst), &alg->base);
278 	if (err)
279 		goto out_drop_aead;
280 
281 	inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC;
282 
283 	inst->alg.ivsize = crypto_aead_alg_ivsize(alg);
284 	inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg);
285 
286 	inst->alg.base.cra_ctxsize = sizeof(struct pcrypt_aead_ctx);
287 
288 	inst->alg.init = pcrypt_aead_init_tfm;
289 	inst->alg.exit = pcrypt_aead_exit_tfm;
290 
291 	inst->alg.setkey = pcrypt_aead_setkey;
292 	inst->alg.setauthsize = pcrypt_aead_setauthsize;
293 	inst->alg.encrypt = pcrypt_aead_encrypt;
294 	inst->alg.decrypt = pcrypt_aead_decrypt;
295 
296 	inst->free = pcrypt_free;
297 
298 	err = aead_register_instance(tmpl, inst);
299 	if (err)
300 		goto out_drop_aead;
301 
302 out:
303 	return err;
304 
305 out_drop_aead:
306 	crypto_drop_aead(&ctx->spawn);
307 out_free_psdec:
308 	padata_free_shell(ctx->psdec);
309 out_free_psenc:
310 	padata_free_shell(ctx->psenc);
311 out_free_inst:
312 	kfree(inst);
313 	goto out;
314 }
315 
pcrypt_create(struct crypto_template * tmpl,struct rtattr ** tb)316 static int pcrypt_create(struct crypto_template *tmpl, struct rtattr **tb)
317 {
318 	struct crypto_attr_type *algt;
319 
320 	algt = crypto_get_attr_type(tb);
321 	if (IS_ERR(algt))
322 		return PTR_ERR(algt);
323 
324 	switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) {
325 	case CRYPTO_ALG_TYPE_AEAD:
326 		return pcrypt_create_aead(tmpl, tb, algt->type, algt->mask);
327 	}
328 
329 	return -EINVAL;
330 }
331 
pcrypt_sysfs_add(struct padata_instance * pinst,const char * name)332 static int pcrypt_sysfs_add(struct padata_instance *pinst, const char *name)
333 {
334 	int ret;
335 
336 	pinst->kobj.kset = pcrypt_kset;
337 	ret = kobject_add(&pinst->kobj, NULL, "%s", name);
338 	if (!ret)
339 		kobject_uevent(&pinst->kobj, KOBJ_ADD);
340 
341 	return ret;
342 }
343 
pcrypt_init_padata(struct padata_instance ** pinst,const char * name)344 static int pcrypt_init_padata(struct padata_instance **pinst, const char *name)
345 {
346 	int ret = -ENOMEM;
347 
348 	*pinst = padata_alloc_possible(name);
349 	if (!*pinst)
350 		return ret;
351 
352 	ret = pcrypt_sysfs_add(*pinst, name);
353 	if (ret)
354 		padata_free(*pinst);
355 
356 	return ret;
357 }
358 
pcrypt_fini_padata(struct padata_instance * pinst)359 static void pcrypt_fini_padata(struct padata_instance *pinst)
360 {
361 	padata_stop(pinst);
362 	padata_free(pinst);
363 }
364 
365 static struct crypto_template pcrypt_tmpl = {
366 	.name = "pcrypt",
367 	.create = pcrypt_create,
368 	.module = THIS_MODULE,
369 };
370 
pcrypt_init(void)371 static int __init pcrypt_init(void)
372 {
373 	int err = -ENOMEM;
374 
375 	pcrypt_kset = kset_create_and_add("pcrypt", NULL, kernel_kobj);
376 	if (!pcrypt_kset)
377 		goto err;
378 
379 	err = pcrypt_init_padata(&pencrypt, "pencrypt");
380 	if (err)
381 		goto err_unreg_kset;
382 
383 	err = pcrypt_init_padata(&pdecrypt, "pdecrypt");
384 	if (err)
385 		goto err_deinit_pencrypt;
386 
387 	padata_start(pencrypt);
388 	padata_start(pdecrypt);
389 
390 	return crypto_register_template(&pcrypt_tmpl);
391 
392 err_deinit_pencrypt:
393 	pcrypt_fini_padata(pencrypt);
394 err_unreg_kset:
395 	kset_unregister(pcrypt_kset);
396 err:
397 	return err;
398 }
399 
pcrypt_exit(void)400 static void __exit pcrypt_exit(void)
401 {
402 	crypto_unregister_template(&pcrypt_tmpl);
403 
404 	pcrypt_fini_padata(pencrypt);
405 	pcrypt_fini_padata(pdecrypt);
406 
407 	kset_unregister(pcrypt_kset);
408 }
409 
410 subsys_initcall(pcrypt_init);
411 module_exit(pcrypt_exit);
412 
413 MODULE_LICENSE("GPL");
414 MODULE_AUTHOR("Steffen Klassert <steffen.klassert@secunet.com>");
415 MODULE_DESCRIPTION("Parallel crypto wrapper");
416 MODULE_ALIAS_CRYPTO("pcrypt");
417