• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * sha2-ce-glue.c - SHA-224/SHA-256 using ARMv8 Crypto Extensions
3  *
4  * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10 
11 #include <crypto/internal/hash.h>
12 #include <crypto/sha.h>
13 #include <linux/crypto.h>
14 #include <linux/module.h>
15 
16 #include <asm/hwcap.h>
17 #include <asm/simd.h>
18 #include <asm/neon.h>
19 #include <asm/unaligned.h>
20 
21 MODULE_DESCRIPTION("SHA-224/SHA-256 secure hash using ARMv8 Crypto Extensions");
22 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
23 MODULE_LICENSE("GPL v2");
24 
25 asmlinkage void sha2_ce_transform(int blocks, u8 const *src, u32 *state,
26 				  u8 *head);
27 
sha224_init(struct shash_desc * desc)28 static int sha224_init(struct shash_desc *desc)
29 {
30 	struct sha256_state *sctx = shash_desc_ctx(desc);
31 
32 	*sctx = (struct sha256_state){
33 		.state = {
34 			SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3,
35 			SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7,
36 		}
37 	};
38 	return 0;
39 }
40 
sha256_init(struct shash_desc * desc)41 static int sha256_init(struct shash_desc *desc)
42 {
43 	struct sha256_state *sctx = shash_desc_ctx(desc);
44 
45 	*sctx = (struct sha256_state){
46 		.state = {
47 			SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3,
48 			SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7,
49 		}
50 	};
51 	return 0;
52 }
53 
sha2_update(struct shash_desc * desc,const u8 * data,unsigned int len)54 static int sha2_update(struct shash_desc *desc, const u8 *data,
55 		       unsigned int len)
56 {
57 	struct sha256_state *sctx = shash_desc_ctx(desc);
58 	unsigned int partial;
59 
60 	if (!may_use_simd())
61 		return crypto_sha256_update(desc, data, len);
62 
63 	partial = sctx->count % SHA256_BLOCK_SIZE;
64 	sctx->count += len;
65 
66 	if ((partial + len) >= SHA256_BLOCK_SIZE) {
67 		int blocks;
68 
69 		if (partial) {
70 			int p = SHA256_BLOCK_SIZE - partial;
71 
72 			memcpy(sctx->buf + partial, data, p);
73 			data += p;
74 			len -= p;
75 		}
76 
77 		blocks = len / SHA256_BLOCK_SIZE;
78 		len %= SHA256_BLOCK_SIZE;
79 
80 		kernel_neon_begin();
81 		sha2_ce_transform(blocks, data, sctx->state,
82 				  partial ? sctx->buf : NULL);
83 		kernel_neon_end();
84 
85 		data += blocks * SHA256_BLOCK_SIZE;
86 		partial = 0;
87 	}
88 	if (len)
89 		memcpy(sctx->buf + partial, data, len);
90 	return 0;
91 }
92 
sha2_final(struct shash_desc * desc)93 static void sha2_final(struct shash_desc *desc)
94 {
95 	static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
96 
97 	struct sha256_state *sctx = shash_desc_ctx(desc);
98 	__be64 bits = cpu_to_be64(sctx->count << 3);
99 	u32 padlen = SHA256_BLOCK_SIZE
100 		     - ((sctx->count + sizeof(bits)) % SHA256_BLOCK_SIZE);
101 
102 	sha2_update(desc, padding, padlen);
103 	sha2_update(desc, (const u8 *)&bits, sizeof(bits));
104 }
105 
sha224_final(struct shash_desc * desc,u8 * out)106 static int sha224_final(struct shash_desc *desc, u8 *out)
107 {
108 	struct sha256_state *sctx = shash_desc_ctx(desc);
109 	__be32 *dst = (__be32 *)out;
110 	int i;
111 
112 	sha2_final(desc);
113 
114 	for (i = 0; i < SHA224_DIGEST_SIZE / sizeof(__be32); i++)
115 		put_unaligned_be32(sctx->state[i], dst++);
116 
117 	*sctx = (struct sha256_state){};
118 	return 0;
119 }
120 
sha256_final(struct shash_desc * desc,u8 * out)121 static int sha256_final(struct shash_desc *desc, u8 *out)
122 {
123 	struct sha256_state *sctx = shash_desc_ctx(desc);
124 	__be32 *dst = (__be32 *)out;
125 	int i;
126 
127 	sha2_final(desc);
128 
129 	for (i = 0; i < SHA256_DIGEST_SIZE / sizeof(__be32); i++)
130 		put_unaligned_be32(sctx->state[i], dst++);
131 
132 	*sctx = (struct sha256_state){};
133 	return 0;
134 }
135 
sha2_export(struct shash_desc * desc,void * out)136 static int sha2_export(struct shash_desc *desc, void *out)
137 {
138 	struct sha256_state *sctx = shash_desc_ctx(desc);
139 	struct sha256_state *dst = out;
140 
141 	*dst = *sctx;
142 	return 0;
143 }
144 
sha2_import(struct shash_desc * desc,const void * in)145 static int sha2_import(struct shash_desc *desc, const void *in)
146 {
147 	struct sha256_state *sctx = shash_desc_ctx(desc);
148 	struct sha256_state const *src = in;
149 
150 	*sctx = *src;
151 	return 0;
152 }
153 
154 static struct shash_alg algs[] = { {
155 	.init			= sha224_init,
156 	.update			= sha2_update,
157 	.final			= sha224_final,
158 	.export			= sha2_export,
159 	.import			= sha2_import,
160 	.descsize		= sizeof(struct sha256_state),
161 	.digestsize		= SHA224_DIGEST_SIZE,
162 	.statesize		= sizeof(struct sha256_state),
163 	.base			= {
164 		.cra_name		= "sha224",
165 		.cra_driver_name	= "sha224-ce",
166 		.cra_priority		= 300,
167 		.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
168 		.cra_blocksize		= SHA256_BLOCK_SIZE,
169 		.cra_module		= THIS_MODULE,
170 	}
171 }, {
172 	.init			= sha256_init,
173 	.update			= sha2_update,
174 	.final			= sha256_final,
175 	.export			= sha2_export,
176 	.import			= sha2_import,
177 	.descsize		= sizeof(struct sha256_state),
178 	.digestsize		= SHA256_DIGEST_SIZE,
179 	.statesize		= sizeof(struct sha256_state),
180 	.base			= {
181 		.cra_name		= "sha256",
182 		.cra_driver_name	= "sha256-ce",
183 		.cra_priority		= 300,
184 		.cra_flags		= CRYPTO_ALG_TYPE_SHASH,
185 		.cra_blocksize		= SHA256_BLOCK_SIZE,
186 		.cra_module		= THIS_MODULE,
187 	}
188 } };
189 
sha2_ce_mod_init(void)190 static int __init sha2_ce_mod_init(void)
191 {
192 	if (!(elf_hwcap2 & HWCAP2_SHA2))
193 		return -ENODEV;
194 	return crypto_register_shashes(algs, ARRAY_SIZE(algs));
195 }
196 
sha2_ce_mod_fini(void)197 static void __exit sha2_ce_mod_fini(void)
198 {
199 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
200 }
201 
202 module_init(sha2_ce_mod_init);
203 module_exit(sha2_ce_mod_fini);
204