1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
34 #ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36 EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
39 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
41 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
45 EXPORT_SYMBOL_GPL(crypto_mod_get);
47 void crypto_mod_put(struct crypto_alg *alg)
49 struct module *module = alg->cra_module;
54 EXPORT_SYMBOL_GPL(crypto_mod_put);
56 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
59 struct crypto_alg *q, *alg = NULL;
62 list_for_each_entry(q, &crypto_alg_list, cra_list) {
65 if (crypto_is_moribund(q))
68 if ((q->cra_flags ^ type) & mask)
71 if (crypto_is_larval(q) &&
72 !crypto_is_test_larval((struct crypto_larval *)q) &&
73 ((struct crypto_larval *)q)->mask != mask)
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
81 if (unlikely(!crypto_mod_get(q)))
84 best = q->cra_priority;
96 static void crypto_larval_destroy(struct crypto_alg *alg)
98 struct crypto_larval *larval = (void *)alg;
100 BUG_ON(!crypto_is_larval(alg));
101 if (!IS_ERR_OR_NULL(larval->adult))
102 crypto_mod_put(larval->adult);
106 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
108 struct crypto_larval *larval;
110 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
112 return ERR_PTR(-ENOMEM);
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
124 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
126 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
132 larval = crypto_larval_alloc(name, type, mask);
134 return ERR_CAST(larval);
136 refcount_set(&larval->alg.cra_refcnt, 2);
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
142 list_add(&alg->cra_list, &crypto_alg_list);
144 up_write(&crypto_alg_sem);
146 if (alg != &larval->alg) {
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg);
155 void crypto_larval_kill(struct crypto_alg *alg)
157 struct crypto_larval *larval = (void *)alg;
159 down_write(&crypto_alg_sem);
160 list_del(&alg->cra_list);
161 up_write(&crypto_alg_sem);
162 complete_all(&larval->completion);
165 EXPORT_SYMBOL_GPL(crypto_larval_kill);
167 void crypto_wait_for_test(struct crypto_larval *larval)
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 if (WARN_ON_ONCE(err != NOTIFY_STOP))
175 err = wait_for_completion_killable(&larval->completion);
178 crypto_larval_kill(&larval->alg);
180 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
182 static void crypto_start_test(struct crypto_larval *larval)
184 if (!crypto_is_test_larval(larval))
187 if (larval->test_started)
190 down_write(&crypto_alg_sem);
191 if (larval->test_started) {
192 up_write(&crypto_alg_sem);
196 larval->test_started = true;
197 up_write(&crypto_alg_sem);
199 crypto_wait_for_test(larval);
202 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
204 struct crypto_larval *larval = (void *)alg;
207 if (!crypto_boot_test_finished())
208 crypto_start_test(larval);
210 timeout = wait_for_completion_killable_timeout(
211 &larval->completion, 60 * HZ);
215 alg = ERR_PTR(-EINTR);
217 alg = ERR_PTR(-ETIMEDOUT);
219 alg = ERR_PTR(-ENOENT);
220 else if (IS_ERR(alg))
222 else if (crypto_is_test_larval(larval) &&
223 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 alg = ERR_PTR(-EAGAIN);
225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226 alg = ERR_PTR(-EAGAIN);
227 else if (!crypto_mod_get(alg))
228 alg = ERR_PTR(-EAGAIN);
229 crypto_mod_put(&larval->alg);
234 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
237 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
238 struct crypto_alg *alg;
241 if (!((type | mask) & CRYPTO_ALG_TESTED))
242 test |= CRYPTO_ALG_TESTED;
244 down_read(&crypto_alg_sem);
245 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246 (mask | test) & ~fips);
248 if (((type | mask) ^ fips) & fips)
252 if (!crypto_is_larval(alg) &&
253 ((type ^ alg->cra_flags) & mask)) {
254 /* Algorithm is disallowed in FIPS mode. */
256 alg = ERR_PTR(-ENOENT);
259 alg = __crypto_alg_lookup(name, type, mask);
260 if (alg && !crypto_is_larval(alg)) {
263 alg = ERR_PTR(-ELIBBAD);
266 up_read(&crypto_alg_sem);
271 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
274 struct crypto_alg *alg;
277 return ERR_PTR(-ENOENT);
279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
280 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
282 alg = crypto_alg_lookup(name, type, mask);
283 if (!alg && !(mask & CRYPTO_NOLOAD)) {
284 request_module("crypto-%s", name);
286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287 CRYPTO_ALG_NEED_FALLBACK))
288 request_module("crypto-%s-all", name);
290 alg = crypto_alg_lookup(name, type, mask);
293 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294 alg = crypto_larval_wait(alg);
296 alg = crypto_larval_add(name, type, mask);
301 int crypto_probing_notify(unsigned long val, void *v)
305 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
306 if (ok == NOTIFY_DONE) {
307 request_module("cryptomgr");
308 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
313 EXPORT_SYMBOL_GPL(crypto_probing_notify);
315 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
317 struct crypto_alg *alg;
318 struct crypto_alg *larval;
322 * If the internal flag is set for a cipher, require a caller to
323 * invoke the cipher with the internal flag to use that cipher.
324 * Also, if a caller wants to allocate a cipher that may or may
325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
326 * !(mask & CRYPTO_ALG_INTERNAL).
328 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
329 mask |= CRYPTO_ALG_INTERNAL;
331 larval = crypto_larval_lookup(name, type, mask);
332 if (IS_ERR(larval) || !crypto_is_larval(larval))
335 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
337 if (ok == NOTIFY_STOP)
338 alg = crypto_larval_wait(larval);
340 crypto_mod_put(larval);
341 alg = ERR_PTR(-ENOENT);
343 crypto_larval_kill(larval);
346 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
348 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
350 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
353 return type_obj->init(tfm, type, mask);
357 static void crypto_exit_ops(struct crypto_tfm *tfm)
359 const struct crypto_type *type = tfm->__crt_alg->cra_type;
361 if (type && tfm->exit)
365 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
367 const struct crypto_type *type_obj = alg->cra_type;
370 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
372 return len + type_obj->ctxsize(alg, type, mask);
374 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
378 case CRYPTO_ALG_TYPE_CIPHER:
379 len += crypto_cipher_ctxsize(alg);
382 case CRYPTO_ALG_TYPE_COMPRESS:
383 len += crypto_compress_ctxsize(alg);
390 void crypto_shoot_alg(struct crypto_alg *alg)
392 down_write(&crypto_alg_sem);
393 alg->cra_flags |= CRYPTO_ALG_DYING;
394 up_write(&crypto_alg_sem);
396 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
398 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
401 struct crypto_tfm *tfm = NULL;
402 unsigned int tfm_size;
405 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
406 tfm = kzalloc(tfm_size, GFP_KERNEL);
410 tfm->__crt_alg = alg;
411 refcount_set(&tfm->refcnt, 1);
413 err = crypto_init_ops(tfm, type, mask);
417 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
418 goto cra_init_failed;
423 crypto_exit_ops(tfm);
426 crypto_shoot_alg(alg);
433 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
436 * crypto_alloc_base - Locate algorithm and allocate transform
437 * @alg_name: Name of algorithm
438 * @type: Type of algorithm
439 * @mask: Mask for type comparison
441 * This function should not be used by new algorithm types.
442 * Please use crypto_alloc_tfm instead.
444 * crypto_alloc_base() will first attempt to locate an already loaded
445 * algorithm. If that fails and the kernel supports dynamically loadable
446 * modules, it will then attempt to load a module of the same name or
447 * alias. If that fails it will send a query to any loaded crypto manager
448 * to construct an algorithm on the fly. A refcount is grabbed on the
449 * algorithm which is then associated with the new transform.
451 * The returned transform is of a non-determinate type. Most people
452 * should use one of the more specific allocation functions such as
453 * crypto_alloc_skcipher().
455 * In case of error the return value is an error pointer.
457 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
459 struct crypto_tfm *tfm;
463 struct crypto_alg *alg;
465 alg = crypto_alg_mod_lookup(alg_name, type, mask);
471 tfm = __crypto_alloc_tfm(alg, type, mask);
481 if (fatal_signal_pending(current)) {
489 EXPORT_SYMBOL_GPL(crypto_alloc_base);
491 static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
492 const struct crypto_type *frontend, int node,
495 struct crypto_tfm *tfm;
496 unsigned int tfmsize;
500 tfmsize = frontend->tfmsize;
501 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
503 mem = kzalloc_node(total, gfp, node);
505 return ERR_PTR(-ENOMEM);
507 tfm = (struct crypto_tfm *)(mem + tfmsize);
508 tfm->__crt_alg = alg;
510 refcount_set(&tfm->refcnt, 1);
515 void *crypto_create_tfm_node(struct crypto_alg *alg,
516 const struct crypto_type *frontend,
519 struct crypto_tfm *tfm;
523 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
527 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
529 err = frontend->init_tfm(tfm);
533 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
534 goto cra_init_failed;
539 crypto_exit_ops(tfm);
542 crypto_shoot_alg(alg);
548 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
550 void *crypto_clone_tfm(const struct crypto_type *frontend,
551 struct crypto_tfm *otfm)
553 struct crypto_alg *alg = otfm->__crt_alg;
554 struct crypto_tfm *tfm;
557 mem = ERR_PTR(-ESTALE);
558 if (unlikely(!crypto_mod_get(alg)))
561 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
567 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
568 tfm->crt_flags = otfm->crt_flags;
569 tfm->exit = otfm->exit;
574 EXPORT_SYMBOL_GPL(crypto_clone_tfm);
576 struct crypto_alg *crypto_find_alg(const char *alg_name,
577 const struct crypto_type *frontend,
581 type &= frontend->maskclear;
582 mask &= frontend->maskclear;
583 type |= frontend->type;
584 mask |= frontend->maskset;
587 return crypto_alg_mod_lookup(alg_name, type, mask);
589 EXPORT_SYMBOL_GPL(crypto_find_alg);
592 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
593 * @alg_name: Name of algorithm
594 * @frontend: Frontend algorithm type
595 * @type: Type of algorithm
596 * @mask: Mask for type comparison
597 * @node: NUMA node in which users desire to put requests, if node is
598 * NUMA_NO_NODE, it means users have no special requirement.
600 * crypto_alloc_tfm() will first attempt to locate an already loaded
601 * algorithm. If that fails and the kernel supports dynamically loadable
602 * modules, it will then attempt to load a module of the same name or
603 * alias. If that fails it will send a query to any loaded crypto manager
604 * to construct an algorithm on the fly. A refcount is grabbed on the
605 * algorithm which is then associated with the new transform.
607 * The returned transform is of a non-determinate type. Most people
608 * should use one of the more specific allocation functions such as
609 * crypto_alloc_skcipher().
611 * In case of error the return value is an error pointer.
614 void *crypto_alloc_tfm_node(const char *alg_name,
615 const struct crypto_type *frontend, u32 type, u32 mask,
622 struct crypto_alg *alg;
624 alg = crypto_find_alg(alg_name, frontend, type, mask);
630 tfm = crypto_create_tfm_node(alg, frontend, node);
640 if (fatal_signal_pending(current)) {
648 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
651 * crypto_destroy_tfm - Free crypto transform
652 * @mem: Start of tfm slab
653 * @tfm: Transform to free
655 * This function frees up the transform and any associated resources,
656 * then drops the refcount on the associated algorithm.
658 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
660 struct crypto_alg *alg;
662 if (IS_ERR_OR_NULL(mem))
665 if (!refcount_dec_and_test(&tfm->refcnt))
667 alg = tfm->__crt_alg;
669 if (!tfm->exit && alg->cra_exit)
671 crypto_exit_ops(tfm);
673 kfree_sensitive(mem);
675 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
677 int crypto_has_alg(const char *name, u32 type, u32 mask)
680 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
689 EXPORT_SYMBOL_GPL(crypto_has_alg);
691 void crypto_req_done(void *data, int err)
693 struct crypto_wait *wait = data;
695 if (err == -EINPROGRESS)
699 complete(&wait->completion);
701 EXPORT_SYMBOL_GPL(crypto_req_done);
703 MODULE_DESCRIPTION("Cryptographic core API");
704 MODULE_LICENSE("GPL");