• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Glue code for the SHA256 Secure Hash Algorithm assembly implementation
3  * using optimized ARM assembler and NEON instructions.
4  *
5  * Copyright � 2015 Google Inc.
6  *
7  * This file is based on sha256_ssse3_glue.c:
8  *   Copyright (C) 2013 Intel Corporation
9  *   Author: Tim Chen <tim.c.chen@linux.intel.com>
10  *
11  * This program is free software; you can redistribute it and/or modify it
12  * under the terms of the GNU General Public License as published by the Free
13  * Software Foundation; either version 2 of the License, or (at your option)
14  * any later version.
15  *
16  */
17 
18 #include <crypto/internal/hash.h>
19 #include <linux/crypto.h>
20 #include <linux/init.h>
21 #include <linux/module.h>
22 #include <linux/mm.h>
23 #include <linux/cryptohash.h>
24 #include <linux/types.h>
25 #include <linux/string.h>
26 #include <crypto/sha.h>
27 #include <asm/byteorder.h>
28 #include <asm/simd.h>
29 #include <asm/neon.h>
30 #include "sha256_glue.h"
31 
32 asmlinkage void sha256_block_data_order(u32 *digest, const void *data,
33 				      unsigned int num_blks);
34 
35 
sha256_init(struct shash_desc * desc)36 int sha256_init(struct shash_desc *desc)
37 {
38 	struct sha256_state *sctx = shash_desc_ctx(desc);
39 
40 	sctx->state[0] = SHA256_H0;
41 	sctx->state[1] = SHA256_H1;
42 	sctx->state[2] = SHA256_H2;
43 	sctx->state[3] = SHA256_H3;
44 	sctx->state[4] = SHA256_H4;
45 	sctx->state[5] = SHA256_H5;
46 	sctx->state[6] = SHA256_H6;
47 	sctx->state[7] = SHA256_H7;
48 	sctx->count = 0;
49 
50 	return 0;
51 }
52 
sha224_init(struct shash_desc * desc)53 int sha224_init(struct shash_desc *desc)
54 {
55 	struct sha256_state *sctx = shash_desc_ctx(desc);
56 
57 	sctx->state[0] = SHA224_H0;
58 	sctx->state[1] = SHA224_H1;
59 	sctx->state[2] = SHA224_H2;
60 	sctx->state[3] = SHA224_H3;
61 	sctx->state[4] = SHA224_H4;
62 	sctx->state[5] = SHA224_H5;
63 	sctx->state[6] = SHA224_H6;
64 	sctx->state[7] = SHA224_H7;
65 	sctx->count = 0;
66 
67 	return 0;
68 }
69 
__sha256_update(struct shash_desc * desc,const u8 * data,unsigned int len,unsigned int partial)70 int __sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len,
71 		    unsigned int partial)
72 {
73 	struct sha256_state *sctx = shash_desc_ctx(desc);
74 	unsigned int done = 0;
75 
76 	sctx->count += len;
77 
78 	if (partial) {
79 		done = SHA256_BLOCK_SIZE - partial;
80 		memcpy(sctx->buf + partial, data, done);
81 		sha256_block_data_order(sctx->state, sctx->buf, 1);
82 	}
83 
84 	if (len - done >= SHA256_BLOCK_SIZE) {
85 		const unsigned int rounds = (len - done) / SHA256_BLOCK_SIZE;
86 
87 		sha256_block_data_order(sctx->state, data + done, rounds);
88 		done += rounds * SHA256_BLOCK_SIZE;
89 	}
90 
91 	memcpy(sctx->buf, data + done, len - done);
92 
93 	return 0;
94 }
95 
sha256_update(struct shash_desc * desc,const u8 * data,unsigned int len)96 int sha256_update(struct shash_desc *desc, const u8 *data, unsigned int len)
97 {
98 	struct sha256_state *sctx = shash_desc_ctx(desc);
99 	unsigned int partial = sctx->count % SHA256_BLOCK_SIZE;
100 
101 	/* Handle the fast case right here */
102 	if (partial + len < SHA256_BLOCK_SIZE) {
103 		sctx->count += len;
104 		memcpy(sctx->buf + partial, data, len);
105 
106 		return 0;
107 	}
108 
109 	return __sha256_update(desc, data, len, partial);
110 }
111 
112 /* Add padding and return the message digest. */
sha256_final(struct shash_desc * desc,u8 * out)113 static int sha256_final(struct shash_desc *desc, u8 *out)
114 {
115 	struct sha256_state *sctx = shash_desc_ctx(desc);
116 	unsigned int i, index, padlen;
117 	__be32 *dst = (__be32 *)out;
118 	__be64 bits;
119 	static const u8 padding[SHA256_BLOCK_SIZE] = { 0x80, };
120 
121 	/* save number of bits */
122 	bits = cpu_to_be64(sctx->count << 3);
123 
124 	/* Pad out to 56 mod 64 and append length */
125 	index = sctx->count % SHA256_BLOCK_SIZE;
126 	padlen = (index < 56) ? (56 - index) : ((SHA256_BLOCK_SIZE+56)-index);
127 
128 	/* We need to fill a whole block for __sha256_update */
129 	if (padlen <= 56) {
130 		sctx->count += padlen;
131 		memcpy(sctx->buf + index, padding, padlen);
132 	} else {
133 		__sha256_update(desc, padding, padlen, index);
134 	}
135 	__sha256_update(desc, (const u8 *)&bits, sizeof(bits), 56);
136 
137 	/* Store state in digest */
138 	for (i = 0; i < 8; i++)
139 		dst[i] = cpu_to_be32(sctx->state[i]);
140 
141 	/* Wipe context */
142 	memset(sctx, 0, sizeof(*sctx));
143 
144 	return 0;
145 }
146 
sha224_final(struct shash_desc * desc,u8 * out)147 static int sha224_final(struct shash_desc *desc, u8 *out)
148 {
149 	u8 D[SHA256_DIGEST_SIZE];
150 
151 	sha256_final(desc, D);
152 
153 	memcpy(out, D, SHA224_DIGEST_SIZE);
154 	memzero_explicit(D, SHA256_DIGEST_SIZE);
155 
156 	return 0;
157 }
158 
sha256_export(struct shash_desc * desc,void * out)159 int sha256_export(struct shash_desc *desc, void *out)
160 {
161 	struct sha256_state *sctx = shash_desc_ctx(desc);
162 
163 	memcpy(out, sctx, sizeof(*sctx));
164 
165 	return 0;
166 }
167 
sha256_import(struct shash_desc * desc,const void * in)168 int sha256_import(struct shash_desc *desc, const void *in)
169 {
170 	struct sha256_state *sctx = shash_desc_ctx(desc);
171 
172 	memcpy(sctx, in, sizeof(*sctx));
173 
174 	return 0;
175 }
176 
177 static struct shash_alg algs[] = { {
178 	.digestsize	=	SHA256_DIGEST_SIZE,
179 	.init		=	sha256_init,
180 	.update		=	sha256_update,
181 	.final		=	sha256_final,
182 	.export		=	sha256_export,
183 	.import		=	sha256_import,
184 	.descsize	=	sizeof(struct sha256_state),
185 	.statesize	=	sizeof(struct sha256_state),
186 	.base		=	{
187 		.cra_name	=	"sha256",
188 		.cra_driver_name =	"sha256-asm",
189 		.cra_priority	=	150,
190 		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
191 		.cra_blocksize	=	SHA256_BLOCK_SIZE,
192 		.cra_module	=	THIS_MODULE,
193 	}
194 }, {
195 	.digestsize	=	SHA224_DIGEST_SIZE,
196 	.init		=	sha224_init,
197 	.update		=	sha256_update,
198 	.final		=	sha224_final,
199 	.export		=	sha256_export,
200 	.import		=	sha256_import,
201 	.descsize	=	sizeof(struct sha256_state),
202 	.statesize	=	sizeof(struct sha256_state),
203 	.base		=	{
204 		.cra_name	=	"sha224",
205 		.cra_driver_name =	"sha224-asm",
206 		.cra_priority	=	150,
207 		.cra_flags	=	CRYPTO_ALG_TYPE_SHASH,
208 		.cra_blocksize	=	SHA224_BLOCK_SIZE,
209 		.cra_module	=	THIS_MODULE,
210 	}
211 } };
212 
sha256_mod_init(void)213 static int __init sha256_mod_init(void)
214 {
215 	int res = crypto_register_shashes(algs, ARRAY_SIZE(algs));
216 
217 	if (res < 0)
218 		return res;
219 
220 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon()) {
221 		res = crypto_register_shashes(sha256_neon_algs,
222 					      ARRAY_SIZE(sha256_neon_algs));
223 
224 		if (res < 0)
225 			crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
226 	}
227 
228 	return res;
229 }
230 
sha256_mod_fini(void)231 static void __exit sha256_mod_fini(void)
232 {
233 	crypto_unregister_shashes(algs, ARRAY_SIZE(algs));
234 
235 	if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) && cpu_has_neon())
236 		crypto_unregister_shashes(sha256_neon_algs,
237 					  ARRAY_SIZE(sha256_neon_algs));
238 }
239 
240 module_init(sha256_mod_init);
241 module_exit(sha256_mod_fini);
242 
243 MODULE_LICENSE("GPL");
244 MODULE_DESCRIPTION("SHA256 Secure Hash Algorithm (ARM), including NEON");
245 
246 MODULE_ALIAS("sha256");
247