• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
4  *
5  * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <asm/neon.h>
9 #include <asm/simd.h>
10 #include <asm/unaligned.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha2.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/cpufeature.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
18 
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22 MODULE_ALIAS_CRYPTO("sha224");
23 MODULE_ALIAS_CRYPTO("sha256");
24 
25 struct sha256_ce_state {
26 	struct sha256_state	sst;
27 	u32			finalize;
28 };
29 
30 extern const u32 sha256_ce_offsetof_count;
31 extern const u32 sha256_ce_offsetof_finalize;
32 
33 asmlinkage int __sha256_ce_transform(struct sha256_ce_state *sst, u8 const *src,
34 				     int blocks);
35 
36 asmlinkage void __sha256_ce_finup2x(const struct sha256_state *sctx,
37 				    const u8 *data1, const u8 *data2, int len,
38 				    u8 out1[SHA256_DIGEST_SIZE],
39 				    u8 out2[SHA256_DIGEST_SIZE]);
40 
sha256_ce_transform(struct sha256_state * sst,u8 const * src,int blocks)41 static void sha256_ce_transform(struct sha256_state *sst, u8 const *src,
42 				int blocks)
43 {
44 	while (blocks) {
45 		int rem;
46 
47 		kernel_neon_begin();
48 		rem = __sha256_ce_transform(container_of(sst,
49 							 struct sha256_ce_state,
50 							 sst), src, blocks);
51 		kernel_neon_end();
52 		src += (blocks - rem) * SHA256_BLOCK_SIZE;
53 		blocks = rem;
54 	}
55 }
56 
57 const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
58 					      sst.count);
59 const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
60 						 finalize);
61 
62 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
63 
sha256_arm64_transform(struct sha256_state * sst,u8 const * src,int blocks)64 static void sha256_arm64_transform(struct sha256_state *sst, u8 const *src,
65 				   int blocks)
66 {
67 	sha256_block_data_order(sst->state, src, blocks);
68 }
69 
sha256_ce_update(struct shash_desc * desc,const u8 * data,unsigned int len)70 static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
71 			    unsigned int len)
72 {
73 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
74 
75 	if (!crypto_simd_usable())
76 		return sha256_base_do_update(desc, data, len,
77 					     sha256_arm64_transform);
78 
79 	sctx->finalize = 0;
80 	sha256_base_do_update(desc, data, len, sha256_ce_transform);
81 
82 	return 0;
83 }
84 
sha256_ce_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)85 static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
86 			   unsigned int len, u8 *out)
87 {
88 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
89 	bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
90 
91 	if (!crypto_simd_usable()) {
92 		if (len)
93 			sha256_base_do_update(desc, data, len,
94 					      sha256_arm64_transform);
95 		sha256_base_do_finalize(desc, sha256_arm64_transform);
96 		return sha256_base_finish(desc, out);
97 	}
98 
99 	/*
100 	 * Allow the asm code to perform the finalization if there is no
101 	 * partial data and the input is a round multiple of the block size.
102 	 */
103 	sctx->finalize = finalize;
104 
105 	sha256_base_do_update(desc, data, len, sha256_ce_transform);
106 	if (!finalize)
107 		sha256_base_do_finalize(desc, sha256_ce_transform);
108 	return sha256_base_finish(desc, out);
109 }
110 
sha256_ce_final(struct shash_desc * desc,u8 * out)111 static int sha256_ce_final(struct shash_desc *desc, u8 *out)
112 {
113 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
114 
115 	if (!crypto_simd_usable()) {
116 		sha256_base_do_finalize(desc, sha256_arm64_transform);
117 		return sha256_base_finish(desc, out);
118 	}
119 
120 	sctx->finalize = 0;
121 	sha256_base_do_finalize(desc, sha256_ce_transform);
122 	return sha256_base_finish(desc, out);
123 }
124 
sha256_ce_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)125 static int sha256_ce_digest(struct shash_desc *desc, const u8 *data,
126 			    unsigned int len, u8 *out)
127 {
128 	sha256_base_init(desc);
129 	return sha256_ce_finup(desc, data, len, out);
130 }
131 
sha256_ce_finup_mb(struct shash_desc * desc,const u8 * const data[],unsigned int len,u8 * const outs[],unsigned int num_msgs)132 static int sha256_ce_finup_mb(struct shash_desc *desc,
133 			      const u8 * const data[], unsigned int len,
134 			      u8 * const outs[], unsigned int num_msgs)
135 {
136 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
137 
138 	/*
139 	 * num_msgs != 2 should not happen here, since this algorithm sets
140 	 * mb_max_msgs=2, and the crypto API handles num_msgs <= 1 before
141 	 * calling into the algorithm's finup_mb method.
142 	 */
143 	if (WARN_ON_ONCE(num_msgs != 2))
144 		return -EOPNOTSUPP;
145 
146 	if (unlikely(!crypto_simd_usable()))
147 		return -EOPNOTSUPP;
148 
149 	/* __sha256_ce_finup2x() assumes SHA256_BLOCK_SIZE <= len <= INT_MAX. */
150 	if (unlikely(len < SHA256_BLOCK_SIZE || len > INT_MAX))
151 		return -EOPNOTSUPP;
152 
153 	/* __sha256_ce_finup2x() assumes the following offsets. */
154 	BUILD_BUG_ON(offsetof(struct sha256_state, state) != 0);
155 	BUILD_BUG_ON(offsetof(struct sha256_state, count) != 32);
156 	BUILD_BUG_ON(offsetof(struct sha256_state, buf) != 40);
157 
158 	kernel_neon_begin();
159 	__sha256_ce_finup2x(&sctx->sst, data[0], data[1], len, outs[0],
160 			    outs[1]);
161 	kernel_neon_end();
162 	return 0;
163 }
164 
sha256_ce_export(struct shash_desc * desc,void * out)165 static int sha256_ce_export(struct shash_desc *desc, void *out)
166 {
167 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
168 
169 	memcpy(out, &sctx->sst, sizeof(struct sha256_state));
170 	return 0;
171 }
172 
sha256_ce_import(struct shash_desc * desc,const void * in)173 static int sha256_ce_import(struct shash_desc *desc, const void *in)
174 {
175 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
176 
177 	memcpy(&sctx->sst, in, sizeof(struct sha256_state));
178 	sctx->finalize = 0;
179 	return 0;
180 }
181 
182 static struct shash_alg algs[] = { {
183 	.init			= sha224_base_init,
184 	.update			= sha256_ce_update,
185 	.final			= sha256_ce_final,
186 	.finup			= sha256_ce_finup,
187 	.export			= sha256_ce_export,
188 	.import			= sha256_ce_import,
189 	.descsize		= sizeof(struct sha256_ce_state),
190 	.statesize		= sizeof(struct sha256_state),
191 	.digestsize		= SHA224_DIGEST_SIZE,
192 	.base			= {
193 		.cra_name		= "sha224",
194 		.cra_driver_name	= "sha224-ce",
195 		.cra_priority		= 200,
196 		.cra_blocksize		= SHA256_BLOCK_SIZE,
197 		.cra_module		= THIS_MODULE,
198 	}
199 }, {
200 	.init			= sha256_base_init,
201 	.update			= sha256_ce_update,
202 	.final			= sha256_ce_final,
203 	.finup			= sha256_ce_finup,
204 	.digest			= sha256_ce_digest,
205 	.finup_mb		= sha256_ce_finup_mb,
206 	.export			= sha256_ce_export,
207 	.import			= sha256_ce_import,
208 	.descsize		= sizeof(struct sha256_ce_state),
209 	.mb_max_msgs		= 2,
210 	.statesize		= sizeof(struct sha256_state),
211 	.digestsize		= SHA256_DIGEST_SIZE,
212 	.base			= {
213 		.cra_name		= "sha256",
214 		.cra_driver_name	= "sha256-ce",
215 		.cra_priority		= 200,
216 		.cra_blocksize		= SHA256_BLOCK_SIZE,
217 		.cra_module		= THIS_MODULE,
218 	}
219 } };
220 
sha2_ce_mod_init(void)221 static int __init sha2_ce_mod_init(void)
222 {
223 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
224 }
225 
sha2_ce_mod_fini(void)226 static void __exit sha2_ce_mod_fini(void)
227 {
228 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
229 }
230 
231 module_cpu_feature_match(SHA2, sha2_ce_mod_init);
232 module_exit(sha2_ce_mod_fini);
233