1 /*
2 * Fallback for sync aes(ctr) in contexts where kernel mode NEON
3 * is not allowed
4 *
5 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
10 */
11
12 #include <crypto/aes.h>
13 #include <crypto/internal/skcipher.h>
14
15 asmlinkage void __aes_arm64_encrypt(u32 *rk, u8 *out, const u8 *in, int rounds);
16
aes_ctr_encrypt_fallback(struct crypto_aes_ctx * ctx,struct skcipher_request * req)17 static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx *ctx,
18 struct skcipher_request *req)
19 {
20 struct skcipher_walk walk;
21 u8 buf[AES_BLOCK_SIZE];
22 int err;
23
24 err = skcipher_walk_virt(&walk, req, true);
25
26 while (walk.nbytes > 0) {
27 u8 *dst = walk.dst.virt.addr;
28 u8 *src = walk.src.virt.addr;
29 int nbytes = walk.nbytes;
30 int tail = 0;
31
32 if (nbytes < walk.total) {
33 nbytes = round_down(nbytes, AES_BLOCK_SIZE);
34 tail = walk.nbytes % AES_BLOCK_SIZE;
35 }
36
37 do {
38 int bsize = min(nbytes, AES_BLOCK_SIZE);
39
40 __aes_arm64_encrypt(ctx->key_enc, buf, walk.iv,
41 6 + ctx->key_length / 4);
42 crypto_xor_cpy(dst, src, buf, bsize);
43 crypto_inc(walk.iv, AES_BLOCK_SIZE);
44
45 dst += AES_BLOCK_SIZE;
46 src += AES_BLOCK_SIZE;
47 nbytes -= AES_BLOCK_SIZE;
48 } while (nbytes > 0);
49
50 err = skcipher_walk_done(&walk, tail);
51 }
52 return err;
53 }
54