1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Synchronous Cryptographic Hash operations.
5 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 #include <crypto/scatterwalk.h>
9 #include <crypto/internal/hash.h>
10 #include <linux/err.h>
11 #include <linux/kernel.h>
12 #include <linux/module.h>
13 #include <linux/slab.h>
14 #include <linux/seq_file.h>
15 #include <linux/cryptouser.h>
16 #include <net/netlink.h>
17 #include <linux/compiler.h>
21 static const struct crypto_type crypto_shash_type;
23 int shash_no_setkey(struct crypto_shash *tfm, const u8 *key,
28 EXPORT_SYMBOL_GPL(shash_no_setkey);
30 static int shash_setkey_unaligned(struct crypto_shash *tfm, const u8 *key,
33 struct shash_alg *shash = crypto_shash_alg(tfm);
34 unsigned long alignmask = crypto_shash_alignmask(tfm);
36 u8 *buffer, *alignbuffer;
39 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1));
40 buffer = kmalloc(absize, GFP_ATOMIC);
44 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
45 memcpy(alignbuffer, key, keylen);
46 err = shash->setkey(tfm, alignbuffer, keylen);
51 static void shash_set_needkey(struct crypto_shash *tfm, struct shash_alg *alg)
53 if (crypto_shash_alg_needs_key(alg))
54 crypto_shash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
57 int crypto_shash_setkey(struct crypto_shash *tfm, const u8 *key,
60 struct shash_alg *shash = crypto_shash_alg(tfm);
61 unsigned long alignmask = crypto_shash_alignmask(tfm);
64 if ((unsigned long)key & alignmask)
65 err = shash_setkey_unaligned(tfm, key, keylen);
67 err = shash->setkey(tfm, key, keylen);
70 shash_set_needkey(tfm, shash);
74 crypto_shash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
77 EXPORT_SYMBOL_GPL(crypto_shash_setkey);
79 static int shash_update_unaligned(struct shash_desc *desc, const u8 *data,
82 struct crypto_shash *tfm = desc->tfm;
83 struct shash_alg *shash = crypto_shash_alg(tfm);
84 unsigned long alignmask = crypto_shash_alignmask(tfm);
85 unsigned int unaligned_len = alignmask + 1 -
86 ((unsigned long)data & alignmask);
88 * We cannot count on __aligned() working for large values:
89 * https://patchwork.kernel.org/patch/9507697/
91 u8 ubuf[MAX_ALGAPI_ALIGNMASK * 2];
92 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
95 if (WARN_ON(buf + unaligned_len > ubuf + sizeof(ubuf)))
98 if (unaligned_len > len)
101 memcpy(buf, data, unaligned_len);
102 err = shash->update(desc, buf, unaligned_len);
103 memset(buf, 0, unaligned_len);
106 shash->update(desc, data + unaligned_len, len - unaligned_len);
109 int crypto_shash_update(struct shash_desc *desc, const u8 *data,
112 struct crypto_shash *tfm = desc->tfm;
113 struct shash_alg *shash = crypto_shash_alg(tfm);
114 unsigned long alignmask = crypto_shash_alignmask(tfm);
116 if ((unsigned long)data & alignmask)
117 return shash_update_unaligned(desc, data, len);
119 return shash->update(desc, data, len);
121 EXPORT_SYMBOL_GPL(crypto_shash_update);
123 static int shash_final_unaligned(struct shash_desc *desc, u8 *out)
125 struct crypto_shash *tfm = desc->tfm;
126 unsigned long alignmask = crypto_shash_alignmask(tfm);
127 struct shash_alg *shash = crypto_shash_alg(tfm);
128 unsigned int ds = crypto_shash_digestsize(tfm);
130 * We cannot count on __aligned() working for large values:
131 * https://patchwork.kernel.org/patch/9507697/
133 u8 ubuf[MAX_ALGAPI_ALIGNMASK + HASH_MAX_DIGESTSIZE];
134 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1);
137 if (WARN_ON(buf + ds > ubuf + sizeof(ubuf)))
140 err = shash->final(desc, buf);
144 memcpy(out, buf, ds);
151 int crypto_shash_final(struct shash_desc *desc, u8 *out)
153 struct crypto_shash *tfm = desc->tfm;
154 struct shash_alg *shash = crypto_shash_alg(tfm);
155 unsigned long alignmask = crypto_shash_alignmask(tfm);
157 if ((unsigned long)out & alignmask)
158 return shash_final_unaligned(desc, out);
160 return shash->final(desc, out);
162 EXPORT_SYMBOL_GPL(crypto_shash_final);
164 static int shash_finup_unaligned(struct shash_desc *desc, const u8 *data,
165 unsigned int len, u8 *out)
167 return crypto_shash_update(desc, data, len) ?:
168 crypto_shash_final(desc, out);
171 int crypto_shash_finup(struct shash_desc *desc, const u8 *data,
172 unsigned int len, u8 *out)
174 struct crypto_shash *tfm = desc->tfm;
175 struct shash_alg *shash = crypto_shash_alg(tfm);
176 unsigned long alignmask = crypto_shash_alignmask(tfm);
178 if (((unsigned long)data | (unsigned long)out) & alignmask)
179 return shash_finup_unaligned(desc, data, len, out);
181 return shash->finup(desc, data, len, out);
183 EXPORT_SYMBOL_GPL(crypto_shash_finup);
185 static int shash_digest_unaligned(struct shash_desc *desc, const u8 *data,
186 unsigned int len, u8 *out)
188 return crypto_shash_init(desc) ?:
189 crypto_shash_finup(desc, data, len, out);
192 int crypto_shash_digest(struct shash_desc *desc, const u8 *data,
193 unsigned int len, u8 *out)
195 struct crypto_shash *tfm = desc->tfm;
196 struct shash_alg *shash = crypto_shash_alg(tfm);
197 unsigned long alignmask = crypto_shash_alignmask(tfm);
199 if (crypto_shash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
202 if (((unsigned long)data | (unsigned long)out) & alignmask)
203 return shash_digest_unaligned(desc, data, len, out);
205 return shash->digest(desc, data, len, out);
207 EXPORT_SYMBOL_GPL(crypto_shash_digest);
209 static int shash_default_export(struct shash_desc *desc, void *out)
211 memcpy(out, shash_desc_ctx(desc), crypto_shash_descsize(desc->tfm));
215 static int shash_default_import(struct shash_desc *desc, const void *in)
217 memcpy(shash_desc_ctx(desc), in, crypto_shash_descsize(desc->tfm));
221 static int shash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
224 struct crypto_shash **ctx = crypto_ahash_ctx(tfm);
226 return crypto_shash_setkey(*ctx, key, keylen);
229 static int shash_async_init(struct ahash_request *req)
231 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
232 struct shash_desc *desc = ahash_request_ctx(req);
236 return crypto_shash_init(desc);
239 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc)
241 struct crypto_hash_walk walk;
244 for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
245 nbytes = crypto_hash_walk_done(&walk, nbytes))
246 nbytes = crypto_shash_update(desc, walk.data, nbytes);
250 EXPORT_SYMBOL_GPL(shash_ahash_update);
252 static int shash_async_update(struct ahash_request *req)
254 return shash_ahash_update(req, ahash_request_ctx(req));
257 static int shash_async_final(struct ahash_request *req)
259 return crypto_shash_final(ahash_request_ctx(req), req->result);
262 int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc)
264 struct crypto_hash_walk walk;
267 nbytes = crypto_hash_walk_first(req, &walk);
269 return crypto_shash_final(desc, req->result);
272 nbytes = crypto_hash_walk_last(&walk) ?
273 crypto_shash_finup(desc, walk.data, nbytes,
275 crypto_shash_update(desc, walk.data, nbytes);
276 nbytes = crypto_hash_walk_done(&walk, nbytes);
277 } while (nbytes > 0);
281 EXPORT_SYMBOL_GPL(shash_ahash_finup);
283 static int shash_async_finup(struct ahash_request *req)
285 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
286 struct shash_desc *desc = ahash_request_ctx(req);
290 return shash_ahash_finup(req, desc);
293 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc)
295 unsigned int nbytes = req->nbytes;
296 struct scatterlist *sg;
301 (sg = req->src, offset = sg->offset,
302 nbytes <= min(sg->length, ((unsigned int)(PAGE_SIZE)) - offset))) {
305 data = kmap_atomic(sg_page(sg));
306 err = crypto_shash_digest(desc, data + offset, nbytes,
310 err = crypto_shash_init(desc) ?:
311 shash_ahash_finup(req, desc);
315 EXPORT_SYMBOL_GPL(shash_ahash_digest);
317 static int shash_async_digest(struct ahash_request *req)
319 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
320 struct shash_desc *desc = ahash_request_ctx(req);
324 return shash_ahash_digest(req, desc);
327 static int shash_async_export(struct ahash_request *req, void *out)
329 return crypto_shash_export(ahash_request_ctx(req), out);
332 static int shash_async_import(struct ahash_request *req, const void *in)
334 struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req));
335 struct shash_desc *desc = ahash_request_ctx(req);
339 return crypto_shash_import(desc, in);
342 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm)
344 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
346 crypto_free_shash(*ctx);
349 int crypto_init_shash_ops_async(struct crypto_tfm *tfm)
351 struct crypto_alg *calg = tfm->__crt_alg;
352 struct shash_alg *alg = __crypto_shash_alg(calg);
353 struct crypto_ahash *crt = __crypto_ahash_cast(tfm);
354 struct crypto_shash **ctx = crypto_tfm_ctx(tfm);
355 struct crypto_shash *shash;
357 if (!crypto_mod_get(calg))
360 shash = crypto_create_tfm(calg, &crypto_shash_type);
362 crypto_mod_put(calg);
363 return PTR_ERR(shash);
367 tfm->exit = crypto_exit_shash_ops_async;
369 crt->init = shash_async_init;
370 crt->update = shash_async_update;
371 crt->final = shash_async_final;
372 crt->finup = shash_async_finup;
373 crt->digest = shash_async_digest;
374 if (crypto_shash_alg_has_setkey(alg))
375 crt->setkey = shash_async_setkey;
377 crypto_ahash_set_flags(crt, crypto_shash_get_flags(shash) &
378 CRYPTO_TFM_NEED_KEY);
380 crt->export = shash_async_export;
381 crt->import = shash_async_import;
383 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash);
388 static void crypto_shash_exit_tfm(struct crypto_tfm *tfm)
390 struct crypto_shash *hash = __crypto_shash_cast(tfm);
391 struct shash_alg *alg = crypto_shash_alg(hash);
396 static int crypto_shash_init_tfm(struct crypto_tfm *tfm)
398 struct crypto_shash *hash = __crypto_shash_cast(tfm);
399 struct shash_alg *alg = crypto_shash_alg(hash);
402 hash->descsize = alg->descsize;
404 shash_set_needkey(hash, alg);
407 tfm->exit = crypto_shash_exit_tfm;
412 err = alg->init_tfm(hash);
416 /* ->init_tfm() may have increased the descsize. */
417 if (WARN_ON_ONCE(hash->descsize > HASH_MAX_DESCSIZE)) {
426 static void crypto_shash_free_instance(struct crypto_instance *inst)
428 struct shash_instance *shash = shash_instance(inst);
434 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
436 struct crypto_report_hash rhash;
437 struct shash_alg *salg = __crypto_shash_alg(alg);
439 memset(&rhash, 0, sizeof(rhash));
441 strscpy(rhash.type, "shash", sizeof(rhash.type));
443 rhash.blocksize = alg->cra_blocksize;
444 rhash.digestsize = salg->digestsize;
446 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
449 static int crypto_shash_report(struct sk_buff *skb, struct crypto_alg *alg)
455 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
457 static void crypto_shash_show(struct seq_file *m, struct crypto_alg *alg)
459 struct shash_alg *salg = __crypto_shash_alg(alg);
461 seq_printf(m, "type : shash\n");
462 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
463 seq_printf(m, "digestsize : %u\n", salg->digestsize);
466 static const struct crypto_type crypto_shash_type = {
467 .extsize = crypto_alg_extsize,
468 .init_tfm = crypto_shash_init_tfm,
469 .free = crypto_shash_free_instance,
470 #ifdef CONFIG_PROC_FS
471 .show = crypto_shash_show,
473 .report = crypto_shash_report,
474 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
475 .maskset = CRYPTO_ALG_TYPE_MASK,
476 .type = CRYPTO_ALG_TYPE_SHASH,
477 .tfmsize = offsetof(struct crypto_shash, base),
480 int crypto_grab_shash(struct crypto_shash_spawn *spawn,
481 struct crypto_instance *inst,
482 const char *name, u32 type, u32 mask)
484 spawn->base.frontend = &crypto_shash_type;
485 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
487 EXPORT_SYMBOL_GPL(crypto_grab_shash);
489 struct crypto_shash *crypto_alloc_shash(const char *alg_name, u32 type,
492 return crypto_alloc_tfm(alg_name, &crypto_shash_type, type, mask);
494 EXPORT_SYMBOL_GPL(crypto_alloc_shash);
496 static int shash_prepare_alg(struct shash_alg *alg)
498 struct crypto_alg *base = &alg->base;
500 if (alg->digestsize > HASH_MAX_DIGESTSIZE ||
501 alg->descsize > HASH_MAX_DESCSIZE ||
502 alg->statesize > HASH_MAX_STATESIZE)
505 if ((alg->export && !alg->import) || (alg->import && !alg->export))
508 base->cra_type = &crypto_shash_type;
509 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
510 base->cra_flags |= CRYPTO_ALG_TYPE_SHASH;
513 alg->finup = shash_finup_unaligned;
515 alg->digest = shash_digest_unaligned;
517 alg->export = shash_default_export;
518 alg->import = shash_default_import;
519 alg->statesize = alg->descsize;
522 alg->setkey = shash_no_setkey;
527 int crypto_register_shash(struct shash_alg *alg)
529 struct crypto_alg *base = &alg->base;
532 err = shash_prepare_alg(alg);
536 return crypto_register_alg(base);
538 EXPORT_SYMBOL_GPL(crypto_register_shash);
540 void crypto_unregister_shash(struct shash_alg *alg)
542 crypto_unregister_alg(&alg->base);
544 EXPORT_SYMBOL_GPL(crypto_unregister_shash);
546 int crypto_register_shashes(struct shash_alg *algs, int count)
550 for (i = 0; i < count; i++) {
551 ret = crypto_register_shash(&algs[i]);
559 for (--i; i >= 0; --i)
560 crypto_unregister_shash(&algs[i]);
564 EXPORT_SYMBOL_GPL(crypto_register_shashes);
566 void crypto_unregister_shashes(struct shash_alg *algs, int count)
570 for (i = count - 1; i >= 0; --i)
571 crypto_unregister_shash(&algs[i]);
573 EXPORT_SYMBOL_GPL(crypto_unregister_shashes);
575 int shash_register_instance(struct crypto_template *tmpl,
576 struct shash_instance *inst)
580 if (WARN_ON(!inst->free))
583 err = shash_prepare_alg(&inst->alg);
587 return crypto_register_instance(tmpl, shash_crypto_instance(inst));
589 EXPORT_SYMBOL_GPL(shash_register_instance);
591 void shash_free_singlespawn_instance(struct shash_instance *inst)
593 crypto_drop_spawn(shash_instance_ctx(inst));
596 EXPORT_SYMBOL_GPL(shash_free_singlespawn_instance);
598 MODULE_LICENSE("GPL");
599 MODULE_DESCRIPTION("Synchronous cryptographic hash type");