1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Asynchronous Cryptographic Hash operations.
5 * This is the asynchronous version of hash.c with notification of
6 * completion via a callback.
8 * Copyright (c) 2008 Loc Ho <lho@amcc.com>
11 #include <crypto/internal/hash.h>
12 #include <crypto/scatterwalk.h>
13 #include <linux/err.h>
14 #include <linux/kernel.h>
15 #include <linux/module.h>
16 #include <linux/sched.h>
17 #include <linux/slab.h>
18 #include <linux/seq_file.h>
19 #include <linux/cryptouser.h>
20 #include <linux/compiler.h>
21 #include <net/netlink.h>
25 static const struct crypto_type crypto_ahash_type;
27 struct ahash_request_priv {
28 crypto_completion_t complete;
32 void *ubuf[] CRYPTO_MINALIGN_ATTR;
35 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash)
37 return container_of(crypto_hash_alg_common(hash), struct ahash_alg,
41 static int hash_walk_next(struct crypto_hash_walk *walk)
43 unsigned int alignmask = walk->alignmask;
44 unsigned int offset = walk->offset;
45 unsigned int nbytes = min(walk->entrylen,
46 ((unsigned int)(PAGE_SIZE)) - offset);
48 walk->data = kmap_local_page(walk->pg);
51 if (offset & alignmask) {
52 unsigned int unaligned = alignmask + 1 - (offset & alignmask);
54 if (nbytes > unaligned)
58 walk->entrylen -= nbytes;
62 static int hash_walk_new_entry(struct crypto_hash_walk *walk)
64 struct scatterlist *sg;
67 walk->offset = sg->offset;
68 walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
69 walk->offset = offset_in_page(walk->offset);
70 walk->entrylen = sg->length;
72 if (walk->entrylen > walk->total)
73 walk->entrylen = walk->total;
74 walk->total -= walk->entrylen;
76 return hash_walk_next(walk);
79 int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
81 unsigned int alignmask = walk->alignmask;
83 walk->data -= walk->offset;
85 if (walk->entrylen && (walk->offset & alignmask) && !err) {
88 walk->offset = ALIGN(walk->offset, alignmask + 1);
89 nbytes = min(walk->entrylen,
90 (unsigned int)(PAGE_SIZE - walk->offset));
92 walk->entrylen -= nbytes;
93 walk->data += walk->offset;
98 kunmap_local(walk->data);
99 crypto_yield(walk->flags);
104 if (walk->entrylen) {
107 return hash_walk_next(walk);
113 walk->sg = sg_next(walk->sg);
115 return hash_walk_new_entry(walk);
117 EXPORT_SYMBOL_GPL(crypto_hash_walk_done);
119 int crypto_hash_walk_first(struct ahash_request *req,
120 struct crypto_hash_walk *walk)
122 walk->total = req->nbytes;
129 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
131 walk->flags = req->base.flags;
133 return hash_walk_new_entry(walk);
135 EXPORT_SYMBOL_GPL(crypto_hash_walk_first);
137 static int ahash_setkey_unaligned(struct crypto_ahash *tfm, const u8 *key,
140 unsigned long alignmask = crypto_ahash_alignmask(tfm);
142 u8 *buffer, *alignbuffer;
143 unsigned long absize;
145 absize = keylen + alignmask;
146 buffer = kmalloc(absize, GFP_KERNEL);
150 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
151 memcpy(alignbuffer, key, keylen);
152 ret = tfm->setkey(tfm, alignbuffer, keylen);
153 kfree_sensitive(buffer);
157 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key,
163 static void ahash_set_needkey(struct crypto_ahash *tfm)
165 const struct hash_alg_common *alg = crypto_hash_alg_common(tfm);
167 if (tfm->setkey != ahash_nosetkey &&
168 !(alg->base.cra_flags & CRYPTO_ALG_OPTIONAL_KEY))
169 crypto_ahash_set_flags(tfm, CRYPTO_TFM_NEED_KEY);
172 int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
175 unsigned long alignmask = crypto_ahash_alignmask(tfm);
178 if ((unsigned long)key & alignmask)
179 err = ahash_setkey_unaligned(tfm, key, keylen);
181 err = tfm->setkey(tfm, key, keylen);
184 ahash_set_needkey(tfm);
188 crypto_ahash_clear_flags(tfm, CRYPTO_TFM_NEED_KEY);
191 EXPORT_SYMBOL_GPL(crypto_ahash_setkey);
193 static int ahash_save_req(struct ahash_request *req, crypto_completion_t cplt,
196 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
197 unsigned long alignmask = crypto_ahash_alignmask(tfm);
198 unsigned int ds = crypto_ahash_digestsize(tfm);
199 struct ahash_request *subreq;
200 unsigned int subreq_size;
201 unsigned int reqsize;
206 subreq_size = sizeof(*subreq);
207 reqsize = crypto_ahash_reqsize(tfm);
208 reqsize = ALIGN(reqsize, crypto_tfm_ctx_alignment());
209 subreq_size += reqsize;
211 subreq_size += alignmask & ~(crypto_tfm_ctx_alignment() - 1);
213 flags = ahash_request_flags(req);
214 gfp = (flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : GFP_ATOMIC;
215 subreq = kmalloc(subreq_size, gfp);
219 ahash_request_set_tfm(subreq, tfm);
220 ahash_request_set_callback(subreq, flags, cplt, req);
222 result = (u8 *)(subreq + 1) + reqsize;
223 result = PTR_ALIGN(result, alignmask + 1);
225 ahash_request_set_crypt(subreq, req->src, result, req->nbytes);
230 state = kmalloc(crypto_ahash_statesize(tfm), gfp);
236 crypto_ahash_export(req, state);
237 crypto_ahash_import(subreq, state);
238 kfree_sensitive(state);
246 static void ahash_restore_req(struct ahash_request *req, int err)
248 struct ahash_request *subreq = req->priv;
251 memcpy(req->result, subreq->result,
252 crypto_ahash_digestsize(crypto_ahash_reqtfm(req)));
256 kfree_sensitive(subreq);
259 static void ahash_op_unaligned_done(void *data, int err)
261 struct ahash_request *areq = data;
263 if (err == -EINPROGRESS)
266 /* First copy req->result into req->priv.result */
267 ahash_restore_req(areq, err);
270 /* Complete the ORIGINAL request. */
271 ahash_request_complete(areq, err);
274 static int ahash_op_unaligned(struct ahash_request *req,
275 int (*op)(struct ahash_request *),
280 err = ahash_save_req(req, ahash_op_unaligned_done, has_state);
285 if (err == -EINPROGRESS || err == -EBUSY)
288 ahash_restore_req(req, err);
293 static int crypto_ahash_op(struct ahash_request *req,
294 int (*op)(struct ahash_request *),
297 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
298 unsigned long alignmask = crypto_ahash_alignmask(tfm);
300 if ((unsigned long)req->result & alignmask)
301 return ahash_op_unaligned(req, op, has_state);
306 int crypto_ahash_final(struct ahash_request *req)
308 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
309 struct crypto_alg *alg = tfm->base.__crt_alg;
310 unsigned int nbytes = req->nbytes;
313 crypto_stats_get(alg);
314 ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final, true);
315 crypto_stats_ahash_final(nbytes, ret, alg);
318 EXPORT_SYMBOL_GPL(crypto_ahash_final);
320 int crypto_ahash_finup(struct ahash_request *req)
322 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
323 struct crypto_alg *alg = tfm->base.__crt_alg;
324 unsigned int nbytes = req->nbytes;
327 crypto_stats_get(alg);
328 ret = crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup, true);
329 crypto_stats_ahash_final(nbytes, ret, alg);
332 EXPORT_SYMBOL_GPL(crypto_ahash_finup);
334 int crypto_ahash_digest(struct ahash_request *req)
336 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
337 struct crypto_alg *alg = tfm->base.__crt_alg;
338 unsigned int nbytes = req->nbytes;
341 crypto_stats_get(alg);
342 if (crypto_ahash_get_flags(tfm) & CRYPTO_TFM_NEED_KEY)
345 ret = crypto_ahash_op(req, tfm->digest, false);
346 crypto_stats_ahash_final(nbytes, ret, alg);
349 EXPORT_SYMBOL_GPL(crypto_ahash_digest);
351 static void ahash_def_finup_done2(void *data, int err)
353 struct ahash_request *areq = data;
355 if (err == -EINPROGRESS)
358 ahash_restore_req(areq, err);
360 ahash_request_complete(areq, err);
363 static int ahash_def_finup_finish1(struct ahash_request *req, int err)
365 struct ahash_request *subreq = req->priv;
370 subreq->base.complete = ahash_def_finup_done2;
372 err = crypto_ahash_reqtfm(req)->final(subreq);
373 if (err == -EINPROGRESS || err == -EBUSY)
377 ahash_restore_req(req, err);
381 static void ahash_def_finup_done1(void *data, int err)
383 struct ahash_request *areq = data;
384 struct ahash_request *subreq;
386 if (err == -EINPROGRESS)
390 subreq->base.flags &= CRYPTO_TFM_REQ_MAY_BACKLOG;
392 err = ahash_def_finup_finish1(areq, err);
393 if (err == -EINPROGRESS || err == -EBUSY)
397 ahash_request_complete(areq, err);
400 static int ahash_def_finup(struct ahash_request *req)
402 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
405 err = ahash_save_req(req, ahash_def_finup_done1, true);
409 err = tfm->update(req->priv);
410 if (err == -EINPROGRESS || err == -EBUSY)
413 return ahash_def_finup_finish1(req, err);
416 static void crypto_ahash_exit_tfm(struct crypto_tfm *tfm)
418 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
419 struct ahash_alg *alg = crypto_ahash_alg(hash);
424 static int crypto_ahash_init_tfm(struct crypto_tfm *tfm)
426 struct crypto_ahash *hash = __crypto_ahash_cast(tfm);
427 struct ahash_alg *alg = crypto_ahash_alg(hash);
429 hash->setkey = ahash_nosetkey;
431 if (tfm->__crt_alg->cra_type != &crypto_ahash_type)
432 return crypto_init_shash_ops_async(tfm);
434 hash->init = alg->init;
435 hash->update = alg->update;
436 hash->final = alg->final;
437 hash->finup = alg->finup ?: ahash_def_finup;
438 hash->digest = alg->digest;
439 hash->export = alg->export;
440 hash->import = alg->import;
443 hash->setkey = alg->setkey;
444 ahash_set_needkey(hash);
448 tfm->exit = crypto_ahash_exit_tfm;
450 return alg->init_tfm ? alg->init_tfm(hash) : 0;
453 static unsigned int crypto_ahash_extsize(struct crypto_alg *alg)
455 if (alg->cra_type != &crypto_ahash_type)
456 return sizeof(struct crypto_shash *);
458 return crypto_alg_extsize(alg);
461 static void crypto_ahash_free_instance(struct crypto_instance *inst)
463 struct ahash_instance *ahash = ahash_instance(inst);
469 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
471 struct crypto_report_hash rhash;
473 memset(&rhash, 0, sizeof(rhash));
475 strscpy(rhash.type, "ahash", sizeof(rhash.type));
477 rhash.blocksize = alg->cra_blocksize;
478 rhash.digestsize = __crypto_hash_alg_common(alg)->digestsize;
480 return nla_put(skb, CRYPTOCFGA_REPORT_HASH, sizeof(rhash), &rhash);
483 static int crypto_ahash_report(struct sk_buff *skb, struct crypto_alg *alg)
489 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
491 static void crypto_ahash_show(struct seq_file *m, struct crypto_alg *alg)
493 seq_printf(m, "type : ahash\n");
494 seq_printf(m, "async : %s\n", alg->cra_flags & CRYPTO_ALG_ASYNC ?
496 seq_printf(m, "blocksize : %u\n", alg->cra_blocksize);
497 seq_printf(m, "digestsize : %u\n",
498 __crypto_hash_alg_common(alg)->digestsize);
501 static const struct crypto_type crypto_ahash_type = {
502 .extsize = crypto_ahash_extsize,
503 .init_tfm = crypto_ahash_init_tfm,
504 .free = crypto_ahash_free_instance,
505 #ifdef CONFIG_PROC_FS
506 .show = crypto_ahash_show,
508 .report = crypto_ahash_report,
509 .maskclear = ~CRYPTO_ALG_TYPE_MASK,
510 .maskset = CRYPTO_ALG_TYPE_AHASH_MASK,
511 .type = CRYPTO_ALG_TYPE_AHASH,
512 .tfmsize = offsetof(struct crypto_ahash, base),
515 int crypto_grab_ahash(struct crypto_ahash_spawn *spawn,
516 struct crypto_instance *inst,
517 const char *name, u32 type, u32 mask)
519 spawn->base.frontend = &crypto_ahash_type;
520 return crypto_grab_spawn(&spawn->base, inst, name, type, mask);
522 EXPORT_SYMBOL_GPL(crypto_grab_ahash);
524 struct crypto_ahash *crypto_alloc_ahash(const char *alg_name, u32 type,
527 return crypto_alloc_tfm(alg_name, &crypto_ahash_type, type, mask);
529 EXPORT_SYMBOL_GPL(crypto_alloc_ahash);
531 int crypto_has_ahash(const char *alg_name, u32 type, u32 mask)
533 return crypto_type_has_alg(alg_name, &crypto_ahash_type, type, mask);
535 EXPORT_SYMBOL_GPL(crypto_has_ahash);
537 static int ahash_prepare_alg(struct ahash_alg *alg)
539 struct crypto_alg *base = &alg->halg.base;
541 if (alg->halg.digestsize > HASH_MAX_DIGESTSIZE ||
542 alg->halg.statesize > HASH_MAX_STATESIZE ||
543 alg->halg.statesize == 0)
546 base->cra_type = &crypto_ahash_type;
547 base->cra_flags &= ~CRYPTO_ALG_TYPE_MASK;
548 base->cra_flags |= CRYPTO_ALG_TYPE_AHASH;
553 int crypto_register_ahash(struct ahash_alg *alg)
555 struct crypto_alg *base = &alg->halg.base;
558 err = ahash_prepare_alg(alg);
562 return crypto_register_alg(base);
564 EXPORT_SYMBOL_GPL(crypto_register_ahash);
566 void crypto_unregister_ahash(struct ahash_alg *alg)
568 crypto_unregister_alg(&alg->halg.base);
570 EXPORT_SYMBOL_GPL(crypto_unregister_ahash);
572 int crypto_register_ahashes(struct ahash_alg *algs, int count)
576 for (i = 0; i < count; i++) {
577 ret = crypto_register_ahash(&algs[i]);
585 for (--i; i >= 0; --i)
586 crypto_unregister_ahash(&algs[i]);
590 EXPORT_SYMBOL_GPL(crypto_register_ahashes);
592 void crypto_unregister_ahashes(struct ahash_alg *algs, int count)
596 for (i = count - 1; i >= 0; --i)
597 crypto_unregister_ahash(&algs[i]);
599 EXPORT_SYMBOL_GPL(crypto_unregister_ahashes);
601 int ahash_register_instance(struct crypto_template *tmpl,
602 struct ahash_instance *inst)
606 if (WARN_ON(!inst->free))
609 err = ahash_prepare_alg(&inst->alg);
613 return crypto_register_instance(tmpl, ahash_crypto_instance(inst));
615 EXPORT_SYMBOL_GPL(ahash_register_instance);
617 bool crypto_hash_alg_has_setkey(struct hash_alg_common *halg)
619 struct crypto_alg *alg = &halg->base;
621 if (alg->cra_type != &crypto_ahash_type)
622 return crypto_shash_alg_has_setkey(__crypto_shash_alg(alg));
624 return __crypto_ahash_alg(alg)->setkey != NULL;
626 EXPORT_SYMBOL_GPL(crypto_hash_alg_has_setkey);
628 MODULE_LICENSE("GPL");
629 MODULE_DESCRIPTION("Asynchronous cryptographic hash type");