1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/jump_label.h>
16 #include <linux/kernel.h>
17 #include <linux/kmod.h>
18 #include <linux/module.h>
19 #include <linux/param.h>
20 #include <linux/sched/signal.h>
21 #include <linux/slab.h>
22 #include <linux/string.h>
23 #include <linux/completion.h>
26 LIST_HEAD(crypto_alg_list);
27 EXPORT_SYMBOL_GPL(crypto_alg_list);
28 DECLARE_RWSEM(crypto_alg_sem);
29 EXPORT_SYMBOL_GPL(crypto_alg_sem);
31 BLOCKING_NOTIFIER_HEAD(crypto_chain);
32 EXPORT_SYMBOL_GPL(crypto_chain);
34 DEFINE_STATIC_KEY_FALSE(crypto_boot_test_finished);
35 EXPORT_SYMBOL_GPL(crypto_boot_test_finished);
37 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
39 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
41 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
43 EXPORT_SYMBOL_GPL(crypto_mod_get);
45 void crypto_mod_put(struct crypto_alg *alg)
47 struct module *module = alg->cra_module;
52 EXPORT_SYMBOL_GPL(crypto_mod_put);
54 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 struct crypto_alg *q, *alg = NULL;
60 list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 if (crypto_is_moribund(q))
66 if ((q->cra_flags ^ type) & mask)
69 if (crypto_is_larval(q) &&
70 !crypto_is_test_larval((struct crypto_larval *)q) &&
71 ((struct crypto_larval *)q)->mask != mask)
74 exact = !strcmp(q->cra_driver_name, name);
75 fuzzy = !strcmp(q->cra_name, name);
76 if (!exact && !(fuzzy && q->cra_priority > best))
79 if (unlikely(!crypto_mod_get(q)))
82 best = q->cra_priority;
94 static void crypto_larval_destroy(struct crypto_alg *alg)
96 struct crypto_larval *larval = (void *)alg;
98 BUG_ON(!crypto_is_larval(alg));
99 if (!IS_ERR_OR_NULL(larval->adult))
100 crypto_mod_put(larval->adult);
104 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106 struct crypto_larval *larval;
108 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110 return ERR_PTR(-ENOMEM);
113 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
114 larval->alg.cra_priority = -1;
115 larval->alg.cra_destroy = crypto_larval_destroy;
117 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
118 init_completion(&larval->completion);
122 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 struct crypto_alg *alg;
128 struct crypto_larval *larval;
130 larval = crypto_larval_alloc(name, type, mask);
132 return ERR_CAST(larval);
134 refcount_set(&larval->alg.cra_refcnt, 2);
136 down_write(&crypto_alg_sem);
137 alg = __crypto_alg_lookup(name, type, mask);
140 list_add(&alg->cra_list, &crypto_alg_list);
142 up_write(&crypto_alg_sem);
144 if (alg != &larval->alg) {
146 if (crypto_is_larval(alg))
147 alg = crypto_larval_wait(alg);
153 void crypto_larval_kill(struct crypto_alg *alg)
155 struct crypto_larval *larval = (void *)alg;
157 down_write(&crypto_alg_sem);
158 list_del(&alg->cra_list);
159 up_write(&crypto_alg_sem);
160 complete_all(&larval->completion);
163 EXPORT_SYMBOL_GPL(crypto_larval_kill);
165 void crypto_wait_for_test(struct crypto_larval *larval)
169 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
170 if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 err = wait_for_completion_killable(&larval->completion);
176 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
179 crypto_larval_kill(&larval->alg);
181 EXPORT_SYMBOL_GPL(crypto_wait_for_test);
183 static void crypto_start_test(struct crypto_larval *larval)
185 if (!crypto_is_test_larval(larval))
188 if (larval->test_started)
191 down_write(&crypto_alg_sem);
192 if (larval->test_started) {
193 up_write(&crypto_alg_sem);
197 larval->test_started = true;
198 up_write(&crypto_alg_sem);
200 crypto_wait_for_test(larval);
203 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
205 struct crypto_larval *larval = (void *)alg;
208 if (!static_branch_likely(&crypto_boot_test_finished))
209 crypto_start_test(larval);
211 timeout = wait_for_completion_killable_timeout(
212 &larval->completion, 60 * HZ);
216 alg = ERR_PTR(-EINTR);
218 alg = ERR_PTR(-ETIMEDOUT);
220 alg = ERR_PTR(-ENOENT);
221 else if (IS_ERR(alg))
223 else if (crypto_is_test_larval(larval) &&
224 !(alg->cra_flags & CRYPTO_ALG_TESTED))
225 alg = ERR_PTR(-EAGAIN);
226 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
227 alg = ERR_PTR(-EAGAIN);
228 else if (!crypto_mod_get(alg))
229 alg = ERR_PTR(-EAGAIN);
230 crypto_mod_put(&larval->alg);
235 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
238 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
239 struct crypto_alg *alg;
242 if (!((type | mask) & CRYPTO_ALG_TESTED))
243 test |= CRYPTO_ALG_TESTED;
245 down_read(&crypto_alg_sem);
246 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
247 (mask | test) & ~fips);
249 if (((type | mask) ^ fips) & fips)
253 if (!crypto_is_larval(alg) &&
254 ((type ^ alg->cra_flags) & mask)) {
255 /* Algorithm is disallowed in FIPS mode. */
257 alg = ERR_PTR(-ENOENT);
260 alg = __crypto_alg_lookup(name, type, mask);
261 if (alg && !crypto_is_larval(alg)) {
264 alg = ERR_PTR(-ELIBBAD);
267 up_read(&crypto_alg_sem);
272 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
275 struct crypto_alg *alg;
278 return ERR_PTR(-ENOENT);
280 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
283 alg = crypto_alg_lookup(name, type, mask);
284 if (!alg && !(mask & CRYPTO_NOLOAD)) {
285 request_module("crypto-%s", name);
287 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
288 CRYPTO_ALG_NEED_FALLBACK))
289 request_module("crypto-%s-all", name);
291 alg = crypto_alg_lookup(name, type, mask);
294 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
295 alg = crypto_larval_wait(alg);
297 alg = crypto_larval_add(name, type, mask);
302 int crypto_probing_notify(unsigned long val, void *v)
306 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
307 if (ok == NOTIFY_DONE) {
308 request_module("cryptomgr");
309 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
314 EXPORT_SYMBOL_GPL(crypto_probing_notify);
316 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
318 struct crypto_alg *alg;
319 struct crypto_alg *larval;
323 * If the internal flag is set for a cipher, require a caller to
324 * to invoke the cipher with the internal flag to use that cipher.
325 * Also, if a caller wants to allocate a cipher that may or may
326 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
327 * !(mask & CRYPTO_ALG_INTERNAL).
329 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
330 mask |= CRYPTO_ALG_INTERNAL;
332 larval = crypto_larval_lookup(name, type, mask);
333 if (IS_ERR(larval) || !crypto_is_larval(larval))
336 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
338 if (ok == NOTIFY_STOP)
339 alg = crypto_larval_wait(larval);
341 crypto_mod_put(larval);
342 alg = ERR_PTR(-ENOENT);
344 crypto_larval_kill(larval);
347 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
349 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
351 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
354 return type_obj->init(tfm, type, mask);
358 static void crypto_exit_ops(struct crypto_tfm *tfm)
360 const struct crypto_type *type = tfm->__crt_alg->cra_type;
362 if (type && tfm->exit)
366 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
368 const struct crypto_type *type_obj = alg->cra_type;
371 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
373 return len + type_obj->ctxsize(alg, type, mask);
375 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
379 case CRYPTO_ALG_TYPE_CIPHER:
380 len += crypto_cipher_ctxsize(alg);
383 case CRYPTO_ALG_TYPE_COMPRESS:
384 len += crypto_compress_ctxsize(alg);
391 void crypto_shoot_alg(struct crypto_alg *alg)
393 down_write(&crypto_alg_sem);
394 alg->cra_flags |= CRYPTO_ALG_DYING;
395 up_write(&crypto_alg_sem);
397 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
399 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
402 struct crypto_tfm *tfm = NULL;
403 unsigned int tfm_size;
406 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
407 tfm = kzalloc(tfm_size, GFP_KERNEL);
411 tfm->__crt_alg = alg;
413 err = crypto_init_ops(tfm, type, mask);
417 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
418 goto cra_init_failed;
423 crypto_exit_ops(tfm);
426 crypto_shoot_alg(alg);
433 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
436 * crypto_alloc_base - Locate algorithm and allocate transform
437 * @alg_name: Name of algorithm
438 * @type: Type of algorithm
439 * @mask: Mask for type comparison
441 * This function should not be used by new algorithm types.
442 * Please use crypto_alloc_tfm instead.
444 * crypto_alloc_base() will first attempt to locate an already loaded
445 * algorithm. If that fails and the kernel supports dynamically loadable
446 * modules, it will then attempt to load a module of the same name or
447 * alias. If that fails it will send a query to any loaded crypto manager
448 * to construct an algorithm on the fly. A refcount is grabbed on the
449 * algorithm which is then associated with the new transform.
451 * The returned transform is of a non-determinate type. Most people
452 * should use one of the more specific allocation functions such as
453 * crypto_alloc_skcipher().
455 * In case of error the return value is an error pointer.
457 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
459 struct crypto_tfm *tfm;
463 struct crypto_alg *alg;
465 alg = crypto_alg_mod_lookup(alg_name, type, mask);
471 tfm = __crypto_alloc_tfm(alg, type, mask);
481 if (fatal_signal_pending(current)) {
489 EXPORT_SYMBOL_GPL(crypto_alloc_base);
491 void *crypto_create_tfm_node(struct crypto_alg *alg,
492 const struct crypto_type *frontend,
496 struct crypto_tfm *tfm = NULL;
497 unsigned int tfmsize;
501 tfmsize = frontend->tfmsize;
502 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
504 mem = kzalloc_node(total, GFP_KERNEL, node);
508 tfm = (struct crypto_tfm *)(mem + tfmsize);
509 tfm->__crt_alg = alg;
512 err = frontend->init_tfm(tfm);
516 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
517 goto cra_init_failed;
522 crypto_exit_ops(tfm);
525 crypto_shoot_alg(alg);
532 EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
534 struct crypto_alg *crypto_find_alg(const char *alg_name,
535 const struct crypto_type *frontend,
539 type &= frontend->maskclear;
540 mask &= frontend->maskclear;
541 type |= frontend->type;
542 mask |= frontend->maskset;
545 return crypto_alg_mod_lookup(alg_name, type, mask);
547 EXPORT_SYMBOL_GPL(crypto_find_alg);
550 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
551 * @alg_name: Name of algorithm
552 * @frontend: Frontend algorithm type
553 * @type: Type of algorithm
554 * @mask: Mask for type comparison
555 * @node: NUMA node in which users desire to put requests, if node is
556 * NUMA_NO_NODE, it means users have no special requirement.
558 * crypto_alloc_tfm() will first attempt to locate an already loaded
559 * algorithm. If that fails and the kernel supports dynamically loadable
560 * modules, it will then attempt to load a module of the same name or
561 * alias. If that fails it will send a query to any loaded crypto manager
562 * to construct an algorithm on the fly. A refcount is grabbed on the
563 * algorithm which is then associated with the new transform.
565 * The returned transform is of a non-determinate type. Most people
566 * should use one of the more specific allocation functions such as
567 * crypto_alloc_skcipher().
569 * In case of error the return value is an error pointer.
572 void *crypto_alloc_tfm_node(const char *alg_name,
573 const struct crypto_type *frontend, u32 type, u32 mask,
580 struct crypto_alg *alg;
582 alg = crypto_find_alg(alg_name, frontend, type, mask);
588 tfm = crypto_create_tfm_node(alg, frontend, node);
598 if (fatal_signal_pending(current)) {
606 EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
609 * crypto_destroy_tfm - Free crypto transform
610 * @mem: Start of tfm slab
611 * @tfm: Transform to free
613 * This function frees up the transform and any associated resources,
614 * then drops the refcount on the associated algorithm.
616 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
618 struct crypto_alg *alg;
620 if (IS_ERR_OR_NULL(mem))
623 alg = tfm->__crt_alg;
625 if (!tfm->exit && alg->cra_exit)
627 crypto_exit_ops(tfm);
629 kfree_sensitive(mem);
631 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
633 int crypto_has_alg(const char *name, u32 type, u32 mask)
636 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
645 EXPORT_SYMBOL_GPL(crypto_has_alg);
647 void crypto_req_done(struct crypto_async_request *req, int err)
649 struct crypto_wait *wait = req->data;
651 if (err == -EINPROGRESS)
655 complete(&wait->completion);
657 EXPORT_SYMBOL_GPL(crypto_req_done);
659 MODULE_DESCRIPTION("Cryptographic core API");
660 MODULE_LICENSE("GPL");