• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3  * NEON-accelerated implementation of Speck128-XTS and Speck64-XTS
4  *
5  * Copyright (c) 2018 Google, Inc
6  *
7  * Note: the NIST recommendation for XTS only specifies a 128-bit block size,
8  * but a 64-bit version (needed for Speck64) is fairly straightforward; the math
9  * is just done in GF(2^64) instead of GF(2^128), with the reducing polynomial
10  * x^64 + x^4 + x^3 + x + 1 from the original XEX paper (Rogaway, 2004:
11  * "Efficient Instantiations of Tweakable Blockciphers and Refinements to Modes
12  * OCB and PMAC"), represented as 0x1B.
13  */
14 
15 #include <asm/hwcap.h>
16 #include <asm/neon.h>
17 #include <asm/simd.h>
18 #include <crypto/algapi.h>
19 #include <crypto/gf128mul.h>
20 #include <crypto/speck.h>
21 #include <crypto/xts.h>
22 #include <linux/kernel.h>
23 #include <linux/module.h>
24 
25 /* The assembly functions only handle multiples of 128 bytes */
26 #define SPECK_NEON_CHUNK_SIZE	128
27 
28 /* Speck128 */
29 
30 struct speck128_xts_tfm_ctx {
31 	struct speck128_tfm_ctx main_key;
32 	struct speck128_tfm_ctx tweak_key;
33 };
34 
35 asmlinkage void speck128_xts_encrypt_neon(const u64 *round_keys, int nrounds,
36 					  void *dst, const void *src,
37 					  unsigned int nbytes, void *tweak);
38 
39 asmlinkage void speck128_xts_decrypt_neon(const u64 *round_keys, int nrounds,
40 					  void *dst, const void *src,
41 					  unsigned int nbytes, void *tweak);
42 
43 typedef void (*speck128_crypt_one_t)(const struct speck128_tfm_ctx *,
44 				     u8 *, const u8 *);
45 typedef void (*speck128_xts_crypt_many_t)(const u64 *, int, void *,
46 					  const void *, unsigned int, void *);
47 
48 static __always_inline int
__speck128_xts_crypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,speck128_crypt_one_t crypt_one,speck128_xts_crypt_many_t crypt_many)49 __speck128_xts_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
50 		     struct scatterlist *src, unsigned int nbytes,
51 		     speck128_crypt_one_t crypt_one,
52 		     speck128_xts_crypt_many_t crypt_many)
53 {
54 	struct crypto_blkcipher *tfm = desc->tfm;
55 	const struct speck128_xts_tfm_ctx *ctx = crypto_blkcipher_ctx(tfm);
56 	struct blkcipher_walk walk;
57 	le128 tweak;
58 	int err;
59 
60 	blkcipher_walk_init(&walk, dst, src, nbytes);
61 	err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE);
62 
63 	crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
64 
65 	while (walk.nbytes > 0) {
66 		unsigned int nbytes = walk.nbytes;
67 		u8 *dst = walk.dst.virt.addr;
68 		const u8 *src = walk.src.virt.addr;
69 
70 		if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
71 			unsigned int count;
72 
73 			count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
74 			kernel_neon_begin();
75 			(*crypt_many)(ctx->main_key.round_keys,
76 				      ctx->main_key.nrounds,
77 				      dst, src, count, &tweak);
78 			kernel_neon_end();
79 			dst += count;
80 			src += count;
81 			nbytes -= count;
82 		}
83 
84 		/* Handle any remainder with generic code */
85 		while (nbytes >= sizeof(tweak)) {
86 			le128_xor((le128 *)dst, (const le128 *)src, &tweak);
87 			(*crypt_one)(&ctx->main_key, dst, dst);
88 			le128_xor((le128 *)dst, (const le128 *)dst, &tweak);
89 			gf128mul_x_ble((be128 *)&tweak, (const be128 *)&tweak);
90 
91 			dst += sizeof(tweak);
92 			src += sizeof(tweak);
93 			nbytes -= sizeof(tweak);
94 		}
95 		err = blkcipher_walk_done(desc, &walk, nbytes);
96 	}
97 
98 	return err;
99 }
100 
speck128_xts_encrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)101 static int speck128_xts_encrypt(struct blkcipher_desc *desc,
102 				struct scatterlist *dst,
103 				struct scatterlist *src,
104 				unsigned int nbytes)
105 {
106 	return __speck128_xts_crypt(desc, dst, src, nbytes,
107 				    crypto_speck128_encrypt,
108 				    speck128_xts_encrypt_neon);
109 }
110 
speck128_xts_decrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)111 static int speck128_xts_decrypt(struct blkcipher_desc *desc,
112 				struct scatterlist *dst,
113 				struct scatterlist *src,
114 				unsigned int nbytes)
115 {
116 	return __speck128_xts_crypt(desc, dst, src, nbytes,
117 				    crypto_speck128_decrypt,
118 				    speck128_xts_decrypt_neon);
119 }
120 
speck128_xts_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)121 static int speck128_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
122 			       unsigned int keylen)
123 {
124 	struct speck128_xts_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
125 	int err;
126 
127 	if (keylen % 2)
128 		return -EINVAL;
129 
130 	keylen /= 2;
131 
132 	err = crypto_speck128_setkey(&ctx->main_key, key, keylen);
133 	if (err)
134 		return err;
135 
136 	return crypto_speck128_setkey(&ctx->tweak_key, key + keylen, keylen);
137 }
138 
139 /* Speck64 */
140 
141 struct speck64_xts_tfm_ctx {
142 	struct speck64_tfm_ctx main_key;
143 	struct speck64_tfm_ctx tweak_key;
144 };
145 
146 asmlinkage void speck64_xts_encrypt_neon(const u32 *round_keys, int nrounds,
147 					 void *dst, const void *src,
148 					 unsigned int nbytes, void *tweak);
149 
150 asmlinkage void speck64_xts_decrypt_neon(const u32 *round_keys, int nrounds,
151 					 void *dst, const void *src,
152 					 unsigned int nbytes, void *tweak);
153 
154 typedef void (*speck64_crypt_one_t)(const struct speck64_tfm_ctx *,
155 				    u8 *, const u8 *);
156 typedef void (*speck64_xts_crypt_many_t)(const u32 *, int, void *,
157 					 const void *, unsigned int, void *);
158 
159 static __always_inline int
__speck64_xts_crypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes,speck64_crypt_one_t crypt_one,speck64_xts_crypt_many_t crypt_many)160 __speck64_xts_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
161 		    struct scatterlist *src, unsigned int nbytes,
162 		    speck64_crypt_one_t crypt_one,
163 		    speck64_xts_crypt_many_t crypt_many)
164 {
165 	struct crypto_blkcipher *tfm = desc->tfm;
166 	const struct speck64_xts_tfm_ctx *ctx = crypto_blkcipher_ctx(tfm);
167 	struct blkcipher_walk walk;
168 	__le64 tweak;
169 	int err;
170 
171 	blkcipher_walk_init(&walk, dst, src, nbytes);
172 	err = blkcipher_walk_virt_block(desc, &walk, SPECK_NEON_CHUNK_SIZE);
173 
174 	crypto_speck64_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
175 
176 	while (walk.nbytes > 0) {
177 		unsigned int nbytes = walk.nbytes;
178 		u8 *dst = walk.dst.virt.addr;
179 		const u8 *src = walk.src.virt.addr;
180 
181 		if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
182 			unsigned int count;
183 
184 			count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
185 			kernel_neon_begin();
186 			(*crypt_many)(ctx->main_key.round_keys,
187 				      ctx->main_key.nrounds,
188 				      dst, src, count, &tweak);
189 			kernel_neon_end();
190 			dst += count;
191 			src += count;
192 			nbytes -= count;
193 		}
194 
195 		/* Handle any remainder with generic code */
196 		while (nbytes >= sizeof(tweak)) {
197 			*(__le64 *)dst = *(__le64 *)src ^ tweak;
198 			(*crypt_one)(&ctx->main_key, dst, dst);
199 			*(__le64 *)dst ^= tweak;
200 			tweak = cpu_to_le64((le64_to_cpu(tweak) << 1) ^
201 					    ((tweak & cpu_to_le64(1ULL << 63)) ?
202 					     0x1B : 0));
203 			dst += sizeof(tweak);
204 			src += sizeof(tweak);
205 			nbytes -= sizeof(tweak);
206 		}
207 		err = blkcipher_walk_done(desc, &walk, nbytes);
208 	}
209 
210 	return err;
211 }
212 
speck64_xts_encrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)213 static int speck64_xts_encrypt(struct blkcipher_desc *desc,
214 			       struct scatterlist *dst, struct scatterlist *src,
215 			       unsigned int nbytes)
216 {
217 	return __speck64_xts_crypt(desc, dst, src, nbytes,
218 				   crypto_speck64_encrypt,
219 				   speck64_xts_encrypt_neon);
220 }
221 
speck64_xts_decrypt(struct blkcipher_desc * desc,struct scatterlist * dst,struct scatterlist * src,unsigned int nbytes)222 static int speck64_xts_decrypt(struct blkcipher_desc *desc,
223 			       struct scatterlist *dst, struct scatterlist *src,
224 			       unsigned int nbytes)
225 {
226 	return __speck64_xts_crypt(desc, dst, src, nbytes,
227 				   crypto_speck64_decrypt,
228 				   speck64_xts_decrypt_neon);
229 }
230 
speck64_xts_setkey(struct crypto_tfm * tfm,const u8 * key,unsigned int keylen)231 static int speck64_xts_setkey(struct crypto_tfm *tfm, const u8 *key,
232 			      unsigned int keylen)
233 {
234 	struct speck64_xts_tfm_ctx *ctx = crypto_tfm_ctx(tfm);
235 	int err;
236 
237 	if (keylen % 2)
238 		return -EINVAL;
239 
240 	keylen /= 2;
241 
242 	err = crypto_speck64_setkey(&ctx->main_key, key, keylen);
243 	if (err)
244 		return err;
245 
246 	return crypto_speck64_setkey(&ctx->tweak_key, key + keylen, keylen);
247 }
248 
249 static struct crypto_alg speck_algs[] = {
250 	{
251 		.cra_name		= "xts(speck128)",
252 		.cra_driver_name	= "xts-speck128-neon",
253 		.cra_priority		= 300,
254 		.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
255 		.cra_blocksize		= SPECK128_BLOCK_SIZE,
256 		.cra_type		= &crypto_blkcipher_type,
257 		.cra_ctxsize		= sizeof(struct speck128_xts_tfm_ctx),
258 		.cra_alignmask		= 7,
259 		.cra_module		= THIS_MODULE,
260 		.cra_u = {
261 			.blkcipher = {
262 				.min_keysize		= 2 * SPECK128_128_KEY_SIZE,
263 				.max_keysize		= 2 * SPECK128_256_KEY_SIZE,
264 				.ivsize			= SPECK128_BLOCK_SIZE,
265 				.setkey			= speck128_xts_setkey,
266 				.encrypt		= speck128_xts_encrypt,
267 				.decrypt		= speck128_xts_decrypt,
268 			}
269 		}
270 	}, {
271 		.cra_name		= "xts(speck64)",
272 		.cra_driver_name	= "xts-speck64-neon",
273 		.cra_priority		= 300,
274 		.cra_flags		= CRYPTO_ALG_TYPE_BLKCIPHER,
275 		.cra_blocksize		= SPECK64_BLOCK_SIZE,
276 		.cra_type		= &crypto_blkcipher_type,
277 		.cra_ctxsize		= sizeof(struct speck64_xts_tfm_ctx),
278 		.cra_alignmask		= 7,
279 		.cra_module		= THIS_MODULE,
280 		.cra_u = {
281 			.blkcipher = {
282 				.min_keysize		= 2 * SPECK64_96_KEY_SIZE,
283 				.max_keysize		= 2 * SPECK64_128_KEY_SIZE,
284 				.ivsize			= SPECK64_BLOCK_SIZE,
285 				.setkey			= speck64_xts_setkey,
286 				.encrypt		= speck64_xts_encrypt,
287 				.decrypt		= speck64_xts_decrypt,
288 			}
289 		}
290 	}
291 };
292 
speck_neon_module_init(void)293 static int __init speck_neon_module_init(void)
294 {
295 	if (!(elf_hwcap & HWCAP_NEON))
296 		return -ENODEV;
297 	return crypto_register_algs(speck_algs, ARRAY_SIZE(speck_algs));
298 }
299 
speck_neon_module_exit(void)300 static void __exit speck_neon_module_exit(void)
301 {
302 	crypto_unregister_algs(speck_algs, ARRAY_SIZE(speck_algs));
303 }
304 
305 module_init(speck_neon_module_init);
306 module_exit(speck_neon_module_exit);
307 
308 MODULE_DESCRIPTION("Speck block cipher (NEON-accelerated)");
309 MODULE_LICENSE("GPL");
310 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
311 MODULE_ALIAS_CRYPTO("xts(speck128)");
312 MODULE_ALIAS_CRYPTO("xts-speck128-neon");
313 MODULE_ALIAS_CRYPTO("xts(speck64)");
314 MODULE_ALIAS_CRYPTO("xts-speck64-neon");
315