1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Synchronous Cryptographic Hash operations.
4 *
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8 #include <crypto/scatterwalk.h>
9 #include <linux/cryptouser.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/seq_file.h>
14 #include <linux/string.h>
15 #include <net/netlink.h>
16
17 #include "hash.h"
18
shash_no_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)19 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
20 unsigned int keylen)
21 {
22 return -ENOSYS;
23 }
24 EXPORT_SYMBOL_GPL(shash_no_setkey);
25
shash_set_needkey(struct crypto_shash * tfm,struct shash_alg * alg)26 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
27 {
28 if (crypto_shash_alg_needs_key(alg))
29 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
30 }
31
crypto_shash_setkey(struct crypto_shash * tfm,const u8 * key,unsigned int keylen)32 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
33 unsigned int keylen)
34 {
35 struct shash_alg *shash = crypto_shash_alg(tfm);
36 int err;
37
38 err = shash->setkey(tfm, key, keylen);
39 if (unlikely(err)) {
40 shash_set_needkey(tfm, shash);
41 return err;
42 }
43
44 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
45 return 0;
46 }
47 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
48
crypto_shash_update(struct shash_desc * desc,const u8 * data,unsigned int len)49 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
50 unsigned int len)
51 {
52 return crypto_shash_alg(desc->tfm)->update(desc, data, len);
53 }
54 EXPORT_SYMBOL_GPL(crypto_shash_update);
55
crypto_shash_final(struct shash_desc * desc,u8 * out)56 int crypto_shash_final(struct shash_desc *desc, u8 *out)
57 {
58 return crypto_shash_alg(desc->tfm)->final(desc, out);
59 }
60 EXPORT_SYMBOL_GPL(crypto_shash_final);
61
shash_default_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)62 static int shash_default_finup(struct shash_desc *desc, const u8 *data,
63 unsigned int len, u8 *out)
64 {
65 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
66
67 return shash->update(desc, data, len) ?:
68 shash->final(desc, out);
69 }
70
crypto_shash_finup(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)71 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
72 unsigned int len, u8 *out)
73 {
74 return crypto_shash_alg(desc->tfm)->finup(desc, data, len, out);
75 }
76 EXPORT_SYMBOL_GPL(crypto_shash_finup);
77
78 static noinline_for_stack int
shash_finup_mb_fallback(struct shash_desc * desc,const u8 * const data[],unsigned int len,u8 * const outs[],unsigned int num_msgs)79 shash_finup_mb_fallback(struct shash_desc *desc, const u8 * const data[],
80 unsigned int len, u8 * const outs[],
81 unsigned int num_msgs)
82 {
83 struct crypto_shash *tfm = desc->tfm;
84 SHASH_DESC_ON_STACK(desc2, tfm);
85 unsigned int i;
86 int err;
87
88 for (i = 0; i < num_msgs - 1; i++) {
89 desc2->tfm = tfm;
90 memcpy(shash_desc_ctx(desc2), shash_desc_ctx(desc),
91 crypto_shash_descsize(tfm));
92 err = crypto_shash_finup(desc2, data[i], len, outs[i]);
93 if (err)
94 return err;
95 }
96 return crypto_shash_finup(desc, data[i], len, outs[i]);
97 }
98
crypto_shash_finup_mb(struct shash_desc * desc,const u8 * const data[],unsigned int len,u8 * const outs[],unsigned int num_msgs)99 int crypto_shash_finup_mb(struct shash_desc *desc, const u8 * const data[],
100 unsigned int len, u8 * const outs[],
101 unsigned int num_msgs)
102 {
103 struct shash_alg *alg = crypto_shash_alg(desc->tfm);
104 int err;
105
106 if (num_msgs == 1)
107 return crypto_shash_finup(desc, data[0], len, outs[0]);
108
109 if (num_msgs == 0)
110 return 0;
111
112 if (WARN_ON_ONCE(num_msgs > alg->mb_max_msgs))
113 goto fallback;
114
115 err = alg->finup_mb(desc, data, len, outs, num_msgs);
116 if (unlikely(err == -EOPNOTSUPP))
117 goto fallback;
118 return err;
119
120 fallback:
121 return shash_finup_mb_fallback(desc, data, len, outs, num_msgs);
122 }
123 EXPORT_SYMBOL_GPL(crypto_shash_finup_mb);
124
shash_default_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)125 static int shash_default_digest(struct shash_desc *desc, const u8 *data,
126 unsigned int len, u8 *out)
127 {
128 struct shash_alg *shash = crypto_shash_alg(desc->tfm);
129
130 return shash->init(desc) ?:
131 shash->finup(desc, data, len, out);
132 }
133
crypto_shash_digest(struct shash_desc * desc,const u8 * data,unsigned int len,u8 * out)134 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
135 unsigned int len, u8 *out)
136 {
137 struct crypto_shash *tfm = desc->tfm;
138
139 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
140 return -ENOKEY;
141
142 return crypto_shash_alg(tfm)->digest(desc, data, len, out);
143 }
144 EXPORT_SYMBOL_GPL(crypto_shash_digest);
145
crypto_shash_tfm_digest(struct crypto_shash * tfm,const u8 * data,unsigned int len,u8 * out)146 int crypto_shash_tfm_digest(struct crypto_shash *tfm, const u8 *data,
147 unsigned int len, u8 *out)
148 {
149 SHASH_DESC_ON_STACK(desc, tfm);
150 int err;
151
152 desc->tfm = tfm;
153
154 err = crypto_shash_digest(desc, data, len, out);
155
156 shash_desc_zero(desc);
157
158 return err;
159 }
160 EXPORT_SYMBOL_GPL(crypto_shash_tfm_digest);
161
crypto_shash_export(struct shash_desc * desc,void * out)162 int crypto_shash_export(struct shash_desc *desc, void *out)
163 {
164 struct crypto_shash *tfm = desc->tfm;
165 struct shash_alg *shash = crypto_shash_alg(tfm);
166
167 if (shash->export)
168 return shash->export(desc, out);
169
170 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(tfm));
171 return 0;
172 }
173 EXPORT_SYMBOL_GPL(crypto_shash_export);
174
crypto_shash_import(struct shash_desc * desc,const void * in)175 int crypto_shash_import(struct shash_desc *desc, const void *in)
176 {
177 struct crypto_shash *tfm = desc->tfm;
178 struct shash_alg *shash = crypto_shash_alg(tfm);
179
180 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
181 return -ENOKEY;
182
183 if (shash->import)
184 return shash->import(desc, in);
185
186 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(tfm));
187 return 0;
188 }
189 EXPORT_SYMBOL_GPL(crypto_shash_import);
190
crypto_shash_exit_tfm(struct crypto_tfm * tfm)191 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
192 {
193 struct crypto_shash *hash = __crypto_shash_cast(tfm);
194 struct shash_alg *alg = crypto_shash_alg(hash);
195
196 alg->exit_tfm(hash);
197 }
198
crypto_shash_init_tfm(struct crypto_tfm * tfm)199 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
200 {
201 struct crypto_shash *hash = __crypto_shash_cast(tfm);
202 struct shash_alg *alg = crypto_shash_alg(hash);
203 int err;
204
205 hash->descsize = alg->descsize;
206
207 shash_set_needkey(hash, alg);
208
209 if (alg->exit_tfm)
210 tfm->exit = crypto_shash_exit_tfm;
211
212 if (!alg->init_tfm)
213 return 0;
214
215 err = alg->init_tfm(hash);
216 if (err)
217 return err;
218
219 /* ->init_tfm() may have increased the descsize. */
220 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
221 if (alg->exit_tfm)
222 alg->exit_tfm(hash);
223 return -EINVAL;
224 }
225
226 return 0;
227 }
228
crypto_shash_free_instance(struct crypto_instance * inst)229 static void crypto_shash_free_instance(struct crypto_instance *inst)
230 {
231 struct shash_instance *shash = shash_instance(inst);
232
233 shash->free(shash);
234 }
235
crypto_shash_report(struct sk_buff * skb,struct crypto_alg * alg)236 static int __maybe_unused crypto_shash_report(
237 struct sk_buff *skb, struct crypto_alg *alg)
238 {
239 struct crypto_report_hash rhash;
240 struct shash_alg *salg = __crypto_shash_alg(alg);
241
242 memset(&rhash, 0, sizeof(rhash));
243
244 strscpy(rhash.type, "shash", sizeof(rhash.type));
245
246 rhash.blocksize = alg->cra_blocksize;
247 rhash.digestsize = salg->digestsize;
248
249 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
250 }
251
252 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
253 __maybe_unused;
crypto_shash_show(struct seq_file * m,struct crypto_alg * alg)254 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
255 {
256 struct shash_alg *salg = __crypto_shash_alg(alg);
257
258 seq_printf(m, "type : shash\n");
259 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
260 seq_printf(m, "digestsize : %u\n", salg->digestsize);
261 }
262
263 const struct crypto_type crypto_shash_type = {
264 .extsize = crypto_alg_extsize,
265 .init_tfm = crypto_shash_init_tfm,
266 .free = crypto_shash_free_instance,
267 #ifdef CONFIG_PROC_FS
268 .show = crypto_shash_show,
269 #endif
270 #if IS_ENABLED(CONFIG_CRYPTO_USER)
271 .report = crypto_shash_report,
272 #endif
273 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
274 .maskset = CRYPTO_ALG_TYPE_MASK,
275 .type = CRYPTO_ALG_TYPE_SHASH,
276 .tfmsize = offsetof(struct crypto_shash, base),
277 };
278
crypto_grab_shash(struct crypto_shash_spawn * spawn,struct crypto_instance * inst,const char * name,u32 type,u32 mask)279 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
280 struct crypto_instance *inst,
281 const char *name, u32 type, u32 mask)
282 {
283 spawn->base.frontend = &crypto_shash_type;
284 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
285 }
286 EXPORT_SYMBOL_GPL(crypto_grab_shash);
287
crypto_alloc_shash(const char * alg_name,u32 type,u32 mask)288 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
289 u32 mask)
290 {
291 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
292 }
293 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
294
crypto_has_shash(const char * alg_name,u32 type,u32 mask)295 int crypto_has_shash(const char *alg_name, u32 type, u32 mask)
296 {
297 return crypto_type_has_alg(alg_name, &crypto_shash_type, type, mask);
298 }
299 EXPORT_SYMBOL_GPL(crypto_has_shash);
300
crypto_clone_shash(struct crypto_shash * hash)301 struct crypto_shash *crypto_clone_shash(struct crypto_shash *hash)
302 {
303 struct crypto_tfm *tfm = crypto_shash_tfm(hash);
304 struct shash_alg *alg = crypto_shash_alg(hash);
305 struct crypto_shash *nhash;
306 int err;
307
308 if (!crypto_shash_alg_has_setkey(alg)) {
309 tfm = crypto_tfm_get(tfm);
310 if (IS_ERR(tfm))
311 return ERR_CAST(tfm);
312
313 return hash;
314 }
315
316 if (!alg->clone_tfm && (alg->init_tfm || alg->base.cra_init))
317 return ERR_PTR(-ENOSYS);
318
319 nhash = crypto_clone_tfm(&crypto_shash_type, tfm);
320 if (IS_ERR(nhash))
321 return nhash;
322
323 nhash->descsize = hash->descsize;
324
325 if (alg->clone_tfm) {
326 err = alg->clone_tfm(nhash, hash);
327 if (err) {
328 crypto_free_shash(nhash);
329 return ERR_PTR(err);
330 }
331 }
332
333 return nhash;
334 }
335 EXPORT_SYMBOL_GPL(crypto_clone_shash);
336
hash_prepare_alg(struct hash_alg_common * alg)337 int hash_prepare_alg(struct hash_alg_common *alg)
338 {
339 struct crypto_alg *base = &alg->base;
340
341 if (alg->digestsize > HASH_MAX_DIGESTSIZE)
342 return -EINVAL;
343
344 /* alignmask is not useful for hashes, so it is not supported. */
345 if (base->cra_alignmask)
346 return -EINVAL;
347
348 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
349
350 return 0;
351 }
352
shash_prepare_alg(struct shash_alg * alg)353 static int shash_prepare_alg(struct shash_alg *alg)
354 {
355 struct crypto_alg *base = &alg->halg.base;
356 int err;
357
358 if (alg->descsize > HASH_MAX_DESCSIZE)
359 return -EINVAL;
360
361 if ((alg->export && !alg->import) || (alg->import && !alg->export))
362 return -EINVAL;
363
364 if (alg->mb_max_msgs > 1) {
365 if (alg->mb_max_msgs > HASH_MAX_MB_MSGS)
366 return -EINVAL;
367 if (!alg->finup_mb)
368 return -EINVAL;
369 } else {
370 if (alg->finup_mb)
371 return -EINVAL;
372 alg->mb_max_msgs = 1;
373 }
374
375 err = hash_prepare_alg(&alg->halg);
376 if (err)
377 return err;
378
379 base->cra_type = &crypto_shash_type;
380 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
381
382 /*
383 * Handle missing optional functions. For each one we can either
384 * install a default here, or we can leave the pointer as NULL and check
385 * the pointer for NULL in crypto_shash_*(), avoiding an indirect call
386 * when the default behavior is desired. For ->finup and ->digest we
387 * install defaults, since for optimal performance algorithms should
388 * implement these anyway. On the other hand, for ->import and
389 * ->export the common case and best performance comes from the simple
390 * memcpy of the shash_desc_ctx, so when those pointers are NULL we
391 * leave them NULL and provide the memcpy with no indirect call.
392 */
393 if (!alg->finup)
394 alg->finup = shash_default_finup;
395 if (!alg->digest)
396 alg->digest = shash_default_digest;
397 if (!alg->export)
398 alg->halg.statesize = alg->descsize;
399 if (!alg->setkey)
400 alg->setkey = shash_no_setkey;
401
402 return 0;
403 }
404
crypto_register_shash(struct shash_alg * alg)405 int crypto_register_shash(struct shash_alg *alg)
406 {
407 struct crypto_alg *base = &alg->base;
408 int err;
409
410 err = shash_prepare_alg(alg);
411 if (err)
412 return err;
413
414 return crypto_register_alg(base);
415 }
416 EXPORT_SYMBOL_GPL(crypto_register_shash);
417
crypto_unregister_shash(struct shash_alg * alg)418 void crypto_unregister_shash(struct shash_alg *alg)
419 {
420 crypto_unregister_alg(&alg->base);
421 }
422 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
423
crypto_register_shashes(struct shash_alg * algs,int count)424 int crypto_register_shashes(struct shash_alg *algs, int count)
425 {
426 int i, ret;
427
428 for (i = 0; i < count; i++) {
429 ret = crypto_register_shash(&algs[i]);
430 if (ret)
431 goto err;
432 }
433
434 return 0;
435
436 err:
437 for (--i; i >= 0; --i)
438 crypto_unregister_shash(&algs[i]);
439
440 return ret;
441 }
442 EXPORT_SYMBOL_GPL(crypto_register_shashes);
443
crypto_unregister_shashes(struct shash_alg * algs,int count)444 void crypto_unregister_shashes(struct shash_alg *algs, int count)
445 {
446 int i;
447
448 for (i = count - 1; i >= 0; --i)
449 crypto_unregister_shash(&algs[i]);
450 }
451 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
452
shash_register_instance(struct crypto_template * tmpl,struct shash_instance * inst)453 int shash_register_instance(struct crypto_template *tmpl,
454 struct shash_instance *inst)
455 {
456 int err;
457
458 if (WARN_ON(!inst->free))
459 return -EINVAL;
460
461 err = shash_prepare_alg(&inst->alg);
462 if (err)
463 return err;
464
465 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
466 }
467 EXPORT_SYMBOL_GPL(shash_register_instance);
468
shash_free_singlespawn_instance(struct shash_instance * inst)469 void shash_free_singlespawn_instance(struct shash_instance *inst)
470 {
471 crypto_drop_spawn(shash_instance_ctx(inst));
472 kfree(inst);
473 }
474 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
475
476 MODULE_LICENSE("GPL");
477 MODULE_DESCRIPTION("Synchronous cryptographic hash type");
478