1 /* SPDX-License-Identifier: GPL-2.0 */
2 /*
3 * Shared glue code for 128bit block ciphers
4 */
5
6 #ifndef _CRYPTO_GLUE_HELPER_H
7 #define _CRYPTO_GLUE_HELPER_H
8
9 #include <crypto/internal/skcipher.h>
10 #include <linux/kernel.h>
11 #include <asm/fpu/api.h>
12 #include <crypto/b128ops.h>
13
14 typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
15 typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
16 typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
17 le128 *iv);
18 typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
19 le128 *iv);
20
21 #define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
22 #define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
23 #define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
24 #define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn))
25
26 struct common_glue_func_entry {
27 unsigned int num_blocks; /* number of blocks that @fn will process */
28 union {
29 common_glue_func_t ecb;
30 common_glue_cbc_func_t cbc;
31 common_glue_ctr_func_t ctr;
32 common_glue_xts_func_t xts;
33 } fn_u;
34 };
35
36 struct common_glue_ctx {
37 unsigned int num_funcs;
38 int fpu_blocks_limit; /* -1 means fpu not needed at all */
39
40 /*
41 * First funcs entry must have largest num_blocks and last funcs entry
42 * must have num_blocks == 1!
43 */
44 struct common_glue_func_entry funcs[];
45 };
46
glue_fpu_begin(unsigned int bsize,int fpu_blocks_limit,struct skcipher_walk * walk,bool fpu_enabled,unsigned int nbytes)47 static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
48 struct skcipher_walk *walk,
49 bool fpu_enabled, unsigned int nbytes)
50 {
51 if (likely(fpu_blocks_limit < 0))
52 return false;
53
54 if (fpu_enabled)
55 return true;
56
57 /*
58 * Vector-registers are only used when chunk to be processed is large
59 * enough, so do not enable FPU until it is necessary.
60 */
61 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
62 return false;
63
64 /* prevent sleeping if FPU is in use */
65 skcipher_walk_atomise(walk);
66
67 kernel_fpu_begin();
68 return true;
69 }
70
glue_fpu_end(bool fpu_enabled)71 static inline void glue_fpu_end(bool fpu_enabled)
72 {
73 if (fpu_enabled)
74 kernel_fpu_end();
75 }
76
le128_to_be128(be128 * dst,const le128 * src)77 static inline void le128_to_be128(be128 *dst, const le128 *src)
78 {
79 dst->a = cpu_to_be64(le64_to_cpu(src->a));
80 dst->b = cpu_to_be64(le64_to_cpu(src->b));
81 }
82
be128_to_le128(le128 * dst,const be128 * src)83 static inline void be128_to_le128(le128 *dst, const be128 *src)
84 {
85 dst->a = cpu_to_le64(be64_to_cpu(src->a));
86 dst->b = cpu_to_le64(be64_to_cpu(src->b));
87 }
88
le128_inc(le128 * i)89 static inline void le128_inc(le128 *i)
90 {
91 u64 a = le64_to_cpu(i->a);
92 u64 b = le64_to_cpu(i->b);
93
94 b++;
95 if (!b)
96 a++;
97
98 i->a = cpu_to_le64(a);
99 i->b = cpu_to_le64(b);
100 }
101
102 extern int glue_ecb_req_128bit(const struct common_glue_ctx *gctx,
103 struct skcipher_request *req);
104
105 extern int glue_cbc_encrypt_req_128bit(const common_glue_func_t fn,
106 struct skcipher_request *req);
107
108 extern int glue_cbc_decrypt_req_128bit(const struct common_glue_ctx *gctx,
109 struct skcipher_request *req);
110
111 extern int glue_ctr_req_128bit(const struct common_glue_ctx *gctx,
112 struct skcipher_request *req);
113
114 extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
115 struct skcipher_request *req,
116 common_glue_func_t tweak_fn, void *tweak_ctx,
117 void *crypt_ctx, bool decrypt);
118
119 extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
120 le128 *iv, common_glue_func_t fn);
121
122 #endif /* _CRYPTO_GLUE_HELPER_H */
123