• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * aes-ccm-glue.c - AES-CCM transform for ARMv8 with Crypto Extensions
3  *
4  * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <asm/neon.h>
12 #include <asm/simd.h>
13 #include <asm/unaligned.h>
14 #include <crypto/aes.h>
15 #include <crypto/scatterwalk.h>
16 #include <crypto/internal/aead.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/module.h>
19 
20 #include "aes-ce-setkey.h"
21 
num_rounds(struct crypto_aes_ctx * ctx)22 static int num_rounds(struct crypto_aes_ctx *ctx)
23 {
24 	/*
25 	 * # of rounds specified by AES:
26 	 * 128 bit key		10 rounds
27 	 * 192 bit key		12 rounds
28 	 * 256 bit key		14 rounds
29 	 * => n byte key	=> 6 + (n/4) rounds
30 	 */
31 	return 6 + ctx->key_length / 4;
32 }
33 
34 asmlinkage void ce_aes_ccm_auth_data(u8 mac[], u8 const in[], u32 abytes,
35 				     u32 *macp, u32 const rk[], u32 rounds);
36 
37 asmlinkage void ce_aes_ccm_encrypt(u8 out[], u8 const in[], u32 cbytes,
38 				   u32 const rk[], u32 rounds, u8 mac[],
39 				   u8 ctr[]);
40 
41 asmlinkage void ce_aes_ccm_decrypt(u8 out[], u8 const in[], u32 cbytes,
42 				   u32 const rk[], u32 rounds, u8 mac[],
43 				   u8 ctr[]);
44 
45 asmlinkage void ce_aes_ccm_final(u8 mac[], u8 const ctr[], u32 const rk[],
46 				 u32 rounds);
47 
48 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
49 
ccm_setkey(struct crypto_aead * tfm,const u8 * in_key,unsigned int key_len)50 static int ccm_setkey(struct crypto_aead *tfm, const u8 *in_key,
51 		      unsigned int key_len)
52 {
53 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(tfm);
54 	int ret;
55 
56 	ret = ce_aes_expandkey(ctx, in_key, key_len);
57 	if (!ret)
58 		return 0;
59 
60 	tfm->base.crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
61 	return -EINVAL;
62 }
63 
ccm_setauthsize(struct crypto_aead * tfm,unsigned int authsize)64 static int ccm_setauthsize(struct crypto_aead *tfm, unsigned int authsize)
65 {
66 	if ((authsize & 1) || authsize < 4)
67 		return -EINVAL;
68 	return 0;
69 }
70 
ccm_init_mac(struct aead_request * req,u8 maciv[],u32 msglen)71 static int ccm_init_mac(struct aead_request *req, u8 maciv[], u32 msglen)
72 {
73 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
74 	__be32 *n = (__be32 *)&maciv[AES_BLOCK_SIZE - 8];
75 	u32 l = req->iv[0] + 1;
76 
77 	/* verify that CCM dimension 'L' is set correctly in the IV */
78 	if (l < 2 || l > 8)
79 		return -EINVAL;
80 
81 	/* verify that msglen can in fact be represented in L bytes */
82 	if (l < 4 && msglen >> (8 * l))
83 		return -EOVERFLOW;
84 
85 	/*
86 	 * Even if the CCM spec allows L values of up to 8, the Linux cryptoapi
87 	 * uses a u32 type to represent msglen so the top 4 bytes are always 0.
88 	 */
89 	n[0] = 0;
90 	n[1] = cpu_to_be32(msglen);
91 
92 	memcpy(maciv, req->iv, AES_BLOCK_SIZE - l);
93 
94 	/*
95 	 * Meaning of byte 0 according to CCM spec (RFC 3610/NIST 800-38C)
96 	 * - bits 0..2	: max # of bytes required to represent msglen, minus 1
97 	 *                (already set by caller)
98 	 * - bits 3..5	: size of auth tag (1 => 4 bytes, 2 => 6 bytes, etc)
99 	 * - bit 6	: indicates presence of authenticate-only data
100 	 */
101 	maciv[0] |= (crypto_aead_authsize(aead) - 2) << 2;
102 	if (req->assoclen)
103 		maciv[0] |= 0x40;
104 
105 	memset(&req->iv[AES_BLOCK_SIZE - l], 0, l);
106 	return 0;
107 }
108 
ccm_update_mac(struct crypto_aes_ctx * key,u8 mac[],u8 const in[],u32 abytes,u32 * macp,bool use_neon)109 static void ccm_update_mac(struct crypto_aes_ctx *key, u8 mac[], u8 const in[],
110 			   u32 abytes, u32 *macp, bool use_neon)
111 {
112 	if (likely(use_neon)) {
113 		ce_aes_ccm_auth_data(mac, in, abytes, macp, key->key_enc,
114 				     num_rounds(key));
115 	} else {
116 		if (*macp > 0 && *macp < AES_BLOCK_SIZE) {
117 			int added = min(abytes, AES_BLOCK_SIZE - *macp);
118 
119 			crypto_xor(&mac[*macp], in, added);
120 
121 			*macp += added;
122 			in += added;
123 			abytes -= added;
124 		}
125 
126 		while (abytes >= AES_BLOCK_SIZE) {
127 			__aes_arm64_encrypt(key->key_enc, mac, mac,
128 					    num_rounds(key));
129 			crypto_xor(mac, in, AES_BLOCK_SIZE);
130 
131 			in += AES_BLOCK_SIZE;
132 			abytes -= AES_BLOCK_SIZE;
133 		}
134 
135 		if (abytes > 0) {
136 			__aes_arm64_encrypt(key->key_enc, mac, mac,
137 					    num_rounds(key));
138 			crypto_xor(mac, in, abytes);
139 			*macp = abytes;
140 		}
141 	}
142 }
143 
ccm_calculate_auth_mac(struct aead_request * req,u8 mac[],bool use_neon)144 static void ccm_calculate_auth_mac(struct aead_request *req, u8 mac[],
145 				   bool use_neon)
146 {
147 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
148 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
149 	struct __packed { __be16 l; __be32 h; u16 len; } ltag;
150 	struct scatter_walk walk;
151 	u32 len = req->assoclen;
152 	u32 macp = 0;
153 
154 	/* prepend the AAD with a length tag */
155 	if (len < 0xff00) {
156 		ltag.l = cpu_to_be16(len);
157 		ltag.len = 2;
158 	} else  {
159 		ltag.l = cpu_to_be16(0xfffe);
160 		put_unaligned_be32(len, &ltag.h);
161 		ltag.len = 6;
162 	}
163 
164 	ccm_update_mac(ctx, mac, (u8 *)&ltag, ltag.len, &macp, use_neon);
165 	scatterwalk_start(&walk, req->src);
166 
167 	do {
168 		u32 n = scatterwalk_clamp(&walk, len);
169 		u8 *p;
170 
171 		if (!n) {
172 			scatterwalk_start(&walk, sg_next(walk.sg));
173 			n = scatterwalk_clamp(&walk, len);
174 		}
175 		p = scatterwalk_map(&walk);
176 		ccm_update_mac(ctx, mac, p, n, &macp, use_neon);
177 		len -= n;
178 
179 		scatterwalk_unmap(p);
180 		scatterwalk_advance(&walk, n);
181 		scatterwalk_done(&walk, 0, len);
182 	} while (len);
183 }
184 
ccm_crypt_fallback(struct skcipher_walk * walk,u8 mac[],u8 iv0[],struct crypto_aes_ctx * ctx,bool enc)185 static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
186 			      struct crypto_aes_ctx *ctx, bool enc)
187 {
188 	u8 buf[AES_BLOCK_SIZE];
189 	int err = 0;
190 
191 	while (walk->nbytes) {
192 		int blocks = walk->nbytes / AES_BLOCK_SIZE;
193 		u32 tail = walk->nbytes % AES_BLOCK_SIZE;
194 		u8 *dst = walk->dst.virt.addr;
195 		u8 *src = walk->src.virt.addr;
196 		u32 nbytes = walk->nbytes;
197 
198 		if (nbytes == walk->total && tail > 0) {
199 			blocks++;
200 			tail = 0;
201 		}
202 
203 		do {
204 			u32 bsize = AES_BLOCK_SIZE;
205 
206 			if (nbytes < AES_BLOCK_SIZE)
207 				bsize = nbytes;
208 
209 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
210 			__aes_arm64_encrypt(ctx->key_enc, buf, walk->iv,
211 					    num_rounds(ctx));
212 			__aes_arm64_encrypt(ctx->key_enc, mac, mac,
213 					    num_rounds(ctx));
214 			if (enc)
215 				crypto_xor(mac, src, bsize);
216 			crypto_xor_cpy(dst, src, buf, bsize);
217 			if (!enc)
218 				crypto_xor(mac, dst, bsize);
219 			dst += bsize;
220 			src += bsize;
221 			nbytes -= bsize;
222 		} while (--blocks);
223 
224 		err = skcipher_walk_done(walk, tail);
225 	}
226 
227 	if (!err) {
228 		__aes_arm64_encrypt(ctx->key_enc, buf, iv0, num_rounds(ctx));
229 		__aes_arm64_encrypt(ctx->key_enc, mac, mac, num_rounds(ctx));
230 		crypto_xor(mac, buf, AES_BLOCK_SIZE);
231 	}
232 	return err;
233 }
234 
ccm_encrypt(struct aead_request * req)235 static int ccm_encrypt(struct aead_request *req)
236 {
237 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
238 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
239 	struct skcipher_walk walk;
240 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
241 	u8 buf[AES_BLOCK_SIZE];
242 	u32 len = req->cryptlen;
243 	bool use_neon = may_use_simd();
244 	int err;
245 
246 	err = ccm_init_mac(req, mac, len);
247 	if (err)
248 		return err;
249 
250 	if (likely(use_neon))
251 		kernel_neon_begin();
252 
253 	if (req->assoclen)
254 		ccm_calculate_auth_mac(req, mac, use_neon);
255 
256 	/* preserve the original iv for the final round */
257 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
258 
259 	err = skcipher_walk_aead_encrypt(&walk, req, true);
260 
261 	if (likely(use_neon)) {
262 		while (walk.nbytes) {
263 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
264 
265 			if (walk.nbytes == walk.total)
266 				tail = 0;
267 
268 			ce_aes_ccm_encrypt(walk.dst.virt.addr,
269 					   walk.src.virt.addr,
270 					   walk.nbytes - tail, ctx->key_enc,
271 					   num_rounds(ctx), mac, walk.iv);
272 
273 			err = skcipher_walk_done(&walk, tail);
274 		}
275 		if (!err)
276 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
277 					 num_rounds(ctx));
278 
279 		kernel_neon_end();
280 	} else {
281 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
282 	}
283 	if (err)
284 		return err;
285 
286 	/* copy authtag to end of dst */
287 	scatterwalk_map_and_copy(mac, req->dst, req->assoclen + req->cryptlen,
288 				 crypto_aead_authsize(aead), 1);
289 
290 	return 0;
291 }
292 
ccm_decrypt(struct aead_request * req)293 static int ccm_decrypt(struct aead_request *req)
294 {
295 	struct crypto_aead *aead = crypto_aead_reqtfm(req);
296 	struct crypto_aes_ctx *ctx = crypto_aead_ctx(aead);
297 	unsigned int authsize = crypto_aead_authsize(aead);
298 	struct skcipher_walk walk;
299 	u8 __aligned(8) mac[AES_BLOCK_SIZE];
300 	u8 buf[AES_BLOCK_SIZE];
301 	u32 len = req->cryptlen - authsize;
302 	bool use_neon = may_use_simd();
303 	int err;
304 
305 	err = ccm_init_mac(req, mac, len);
306 	if (err)
307 		return err;
308 
309 	if (likely(use_neon))
310 		kernel_neon_begin();
311 
312 	if (req->assoclen)
313 		ccm_calculate_auth_mac(req, mac, use_neon);
314 
315 	/* preserve the original iv for the final round */
316 	memcpy(buf, req->iv, AES_BLOCK_SIZE);
317 
318 	err = skcipher_walk_aead_decrypt(&walk, req, true);
319 
320 	if (likely(use_neon)) {
321 		while (walk.nbytes) {
322 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
323 
324 			if (walk.nbytes == walk.total)
325 				tail = 0;
326 
327 			ce_aes_ccm_decrypt(walk.dst.virt.addr,
328 					   walk.src.virt.addr,
329 					   walk.nbytes - tail, ctx->key_enc,
330 					   num_rounds(ctx), mac, walk.iv);
331 
332 			err = skcipher_walk_done(&walk, tail);
333 		}
334 		if (!err)
335 			ce_aes_ccm_final(mac, buf, ctx->key_enc,
336 					 num_rounds(ctx));
337 
338 		kernel_neon_end();
339 	} else {
340 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
341 	}
342 
343 	if (err)
344 		return err;
345 
346 	/* compare calculated auth tag with the stored one */
347 	scatterwalk_map_and_copy(buf, req->src,
348 				 req->assoclen + req->cryptlen - authsize,
349 				 authsize, 0);
350 
351 	if (crypto_memneq(mac, buf, authsize))
352 		return -EBADMSG;
353 	return 0;
354 }
355 
356 static struct aead_alg ccm_aes_alg = {
357 	.base = {
358 		.cra_name		= "ccm(aes)",
359 		.cra_driver_name	= "ccm-aes-ce",
360 		.cra_priority		= 300,
361 		.cra_blocksize		= 1,
362 		.cra_ctxsize		= sizeof(struct crypto_aes_ctx),
363 		.cra_module		= THIS_MODULE,
364 	},
365 	.ivsize		= AES_BLOCK_SIZE,
366 	.chunksize	= AES_BLOCK_SIZE,
367 	.maxauthsize	= AES_BLOCK_SIZE,
368 	.setkey		= ccm_setkey,
369 	.setauthsize	= ccm_setauthsize,
370 	.encrypt	= ccm_encrypt,
371 	.decrypt	= ccm_decrypt,
372 };
373 
aes_mod_init(void)374 static int __init aes_mod_init(void)
375 {
376 	if (!(elf_hwcap & HWCAP_AES))
377 		return -ENODEV;
378 	return crypto_register_aead(&ccm_aes_alg);
379 }
380 
aes_mod_exit(void)381 static void __exit aes_mod_exit(void)
382 {
383 	crypto_unregister_aead(&ccm_aes_alg);
384 }
385 
386 module_init(aes_mod_init);
387 module_exit(aes_mod_exit);
388 
389 MODULE_DESCRIPTION("Synchronous AES in CCM mode using ARMv8 Crypto Extensions");
390 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
391 MODULE_LICENSE("GPL v2");
392 MODULE_ALIAS_CRYPTO("ccm(aes)");
393