• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
4  *
5  * Copyright (C) 2014 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6  */
7 
8 #include <asm/neon.h>
9 #include <asm/simd.h>
10 #include <asm/unaligned.h>
11 #include <crypto/internal/hash.h>
12 #include <crypto/internal/simd.h>
13 #include <crypto/sha.h>
14 #include <crypto/sha256_base.h>
15 #include <linux/cpufeature.h>
16 #include <linux/crypto.h>
17 #include <linux/module.h>
18 
19 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22 MODULE_ALIAS_CRYPTO("sha224");
23 MODULE_ALIAS_CRYPTO("sha256");
24 
25 struct sha256_ce_state {
26 	struct sha256_state	sst;
27 	u32			finalize;
28 };
29 
30 asmlinkage void sha2_ce_transform(struct sha256_ce_state *sst, u8 const *src,
31 				  int blocks);
32 
__sha2_ce_transform(struct sha256_state * sst,u8 const * src,int blocks)33 static void __sha2_ce_transform(struct sha256_state *sst, u8 const *src,
34 				int blocks)
35 {
36 	return sha2_ce_transform(container_of(sst, struct sha256_ce_state, sst),
37 				 src, blocks);
38 }
39 
40 const u32 sha256_ce_offsetof_count = offsetof(struct sha256_ce_state,
41 					      sst.count);
42 const u32 sha256_ce_offsetof_finalize = offsetof(struct sha256_ce_state,
43 						 finalize);
44 
45 asmlinkage void sha256_block_data_order(u32 *digest, u8 const *src, int blocks);
46 
__sha256_block_data_order(struct sha256_state * sst,u8 const * src,int blocks)47 static void __sha256_block_data_order(struct sha256_state *sst, u8 const *src,
48 				      int blocks)
49 {
50 	return sha256_block_data_order(sst->state, src, blocks);
51 }
52 
sha256_ce_update(struct shash_desc * desc,const u8 * data,unsigned int len)53 static int sha256_ce_update(struct shash_desc *desc, const u8 *data,
54 			    unsigned int len)
55 {
56 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
57 
58 	if (!crypto_simd_usable())
59 		return sha256_base_do_update(desc, data, len,
60 				__sha256_block_data_order);
61 
62 	sctx->finalize = 0;
63 	kernel_neon_begin();
64 	sha256_base_do_update(desc, data, len, __sha2_ce_transform);
65 	kernel_neon_end();
66 
67 	return 0;
68 }
69 
sha256_ce_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)70 static int sha256_ce_finup(struct shash_desc *desc, const u8 *data,
71 			   unsigned int len, u8 *out)
72 {
73 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
74 	bool finalize = !sctx->sst.count && !(len % SHA256_BLOCK_SIZE) && len;
75 
76 	if (!crypto_simd_usable()) {
77 		if (len)
78 			sha256_base_do_update(desc, data, len,
79 				__sha256_block_data_order);
80 		sha256_base_do_finalize(desc, __sha256_block_data_order);
81 		return sha256_base_finish(desc, out);
82 	}
83 
84 	/*
85 	 * Allow the asm code to perform the finalization if there is no
86 	 * partial data and the input is a round multiple of the block size.
87 	 */
88 	sctx->finalize = finalize;
89 
90 	kernel_neon_begin();
91 	sha256_base_do_update(desc, data, len, __sha2_ce_transform);
92 	if (!finalize)
93 		sha256_base_do_finalize(desc, __sha2_ce_transform);
94 	kernel_neon_end();
95 	return sha256_base_finish(desc, out);
96 }
97 
sha256_ce_final(struct shash_desc * desc,u8 * out)98 static int sha256_ce_final(struct shash_desc *desc, u8 *out)
99 {
100 	struct sha256_ce_state *sctx = shash_desc_ctx(desc);
101 
102 	if (!crypto_simd_usable()) {
103 		sha256_base_do_finalize(desc, __sha256_block_data_order);
104 		return sha256_base_finish(desc, out);
105 	}
106 
107 	sctx->finalize = 0;
108 	kernel_neon_begin();
109 	sha256_base_do_finalize(desc, __sha2_ce_transform);
110 	kernel_neon_end();
111 	return sha256_base_finish(desc, out);
112 }
113 
114 static struct shash_alg algs[] = { {
115 	.init			= sha224_base_init,
116 	.update			= sha256_ce_update,
117 	.final			= sha256_ce_final,
118 	.finup			= sha256_ce_finup,
119 	.descsize		= sizeof(struct sha256_ce_state),
120 	.digestsize		= SHA224_DIGEST_SIZE,
121 	.base			= {
122 		.cra_name		= "sha224",
123 		.cra_driver_name	= "sha224-ce",
124 		.cra_priority		= 200,
125 		.cra_blocksize		= SHA256_BLOCK_SIZE,
126 		.cra_module		= THIS_MODULE,
127 	}
128 }, {
129 	.init			= sha256_base_init,
130 	.update			= sha256_ce_update,
131 	.final			= sha256_ce_final,
132 	.finup			= sha256_ce_finup,
133 	.descsize		= sizeof(struct sha256_ce_state),
134 	.digestsize		= SHA256_DIGEST_SIZE,
135 	.base			= {
136 		.cra_name		= "sha256",
137 		.cra_driver_name	= "sha256-ce",
138 		.cra_priority		= 200,
139 		.cra_blocksize		= SHA256_BLOCK_SIZE,
140 		.cra_module		= THIS_MODULE,
141 	}
142 } };
143 
sha2_ce_mod_init(void)144 static int __init sha2_ce_mod_init(void)
145 {
146 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
147 }
148 
sha2_ce_mod_fini(void)149 static void __exit sha2_ce_mod_fini(void)
150 {
151 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
152 }
153 
154 module_cpu_feature_match(SHA2, sha2_ce_mod_init);
155 module_exit(sha2_ce_mod_fini);
156