1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/kernel.h>
16 #include <linux/kmod.h>
17 #include <linux/module.h>
18 #include <linux/param.h>
19 #include <linux/sched/signal.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <linux/completion.h>
25 LIST_HEAD(crypto_alg_list);
26 EXPORT_SYMBOL_GPL(crypto_alg_list);
27 DECLARE_RWSEM(crypto_alg_sem);
28 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30 BLOCKING_NOTIFIER_HEAD(crypto_chain);
31 EXPORT_SYMBOL_GPL(crypto_chain);
33 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
35 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
37 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
39 EXPORT_SYMBOL_GPL(crypto_mod_get);
41 void crypto_mod_put(struct crypto_alg *alg)
43 struct module *module = alg->cra_module;
48 EXPORT_SYMBOL_GPL(crypto_mod_put);
50 static inline int crypto_is_test_larval(struct crypto_larval *larval)
52 return larval->alg.cra_driver_name[0];
55 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
58 struct crypto_alg *q, *alg = NULL;
61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
64 if (crypto_is_moribund(q))
67 if ((q->cra_flags ^ type) & mask)
70 if (crypto_is_larval(q) &&
71 !crypto_is_test_larval((struct crypto_larval *)q) &&
72 ((struct crypto_larval *)q)->mask != mask)
75 exact = !strcmp(q->cra_driver_name, name);
76 fuzzy = !strcmp(q->cra_name, name);
77 if (!exact && !(fuzzy && q->cra_priority > best))
80 if (unlikely(!crypto_mod_get(q)))
83 best = q->cra_priority;
95 static void crypto_larval_destroy(struct crypto_alg *alg)
97 struct crypto_larval *larval = (void *)alg;
99 BUG_ON(!crypto_is_larval(alg));
100 if (!IS_ERR_OR_NULL(larval->adult))
101 crypto_mod_put(larval->adult);
105 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107 struct crypto_larval *larval;
109 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 return ERR_PTR(-ENOMEM);
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
118 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
123 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
131 larval = crypto_larval_alloc(name, type, mask);
133 return ERR_CAST(larval);
135 refcount_set(&larval->alg.cra_refcnt, 2);
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
141 list_add(&alg->cra_list, &crypto_alg_list);
143 up_write(&crypto_alg_sem);
145 if (alg != &larval->alg) {
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
154 void crypto_larval_kill(struct crypto_alg *alg)
156 struct crypto_larval *larval = (void *)alg;
158 down_write(&crypto_alg_sem);
159 list_del(&alg->cra_list);
160 up_write(&crypto_alg_sem);
161 complete_all(&larval->completion);
164 EXPORT_SYMBOL_GPL(crypto_larval_kill);
166 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
168 struct crypto_larval *larval = (void *)alg;
171 timeout = wait_for_completion_killable_timeout(
172 &larval->completion, 60 * HZ);
176 alg = ERR_PTR(-EINTR);
178 alg = ERR_PTR(-ETIMEDOUT);
180 alg = ERR_PTR(-ENOENT);
181 else if (IS_ERR(alg))
183 else if (crypto_is_test_larval(larval) &&
184 !(alg->cra_flags & CRYPTO_ALG_TESTED))
185 alg = ERR_PTR(-EAGAIN);
186 else if (!crypto_mod_get(alg))
187 alg = ERR_PTR(-EAGAIN);
188 crypto_mod_put(&larval->alg);
193 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
196 struct crypto_alg *alg;
199 if (!((type | mask) & CRYPTO_ALG_TESTED))
200 test |= CRYPTO_ALG_TESTED;
202 down_read(&crypto_alg_sem);
203 alg = __crypto_alg_lookup(name, type | test, mask | test);
205 alg = __crypto_alg_lookup(name, type, mask);
206 if (alg && !crypto_is_larval(alg)) {
209 alg = ERR_PTR(-ELIBBAD);
212 up_read(&crypto_alg_sem);
217 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
220 struct crypto_alg *alg;
223 return ERR_PTR(-ENOENT);
225 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
228 alg = crypto_alg_lookup(name, type, mask);
229 if (!alg && !(mask & CRYPTO_NOLOAD)) {
230 request_module("crypto-%s", name);
232 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
233 CRYPTO_ALG_NEED_FALLBACK))
234 request_module("crypto-%s-all", name);
236 alg = crypto_alg_lookup(name, type, mask);
239 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
240 alg = crypto_larval_wait(alg);
242 alg = crypto_larval_add(name, type, mask);
247 int crypto_probing_notify(unsigned long val, void *v)
251 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
252 if (ok == NOTIFY_DONE) {
253 request_module("cryptomgr");
254 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
259 EXPORT_SYMBOL_GPL(crypto_probing_notify);
261 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
263 struct crypto_alg *alg;
264 struct crypto_alg *larval;
268 * If the internal flag is set for a cipher, require a caller to
269 * to invoke the cipher with the internal flag to use that cipher.
270 * Also, if a caller wants to allocate a cipher that may or may
271 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272 * !(mask & CRYPTO_ALG_INTERNAL).
274 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
275 mask |= CRYPTO_ALG_INTERNAL;
277 larval = crypto_larval_lookup(name, type, mask);
278 if (IS_ERR(larval) || !crypto_is_larval(larval))
281 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
283 if (ok == NOTIFY_STOP)
284 alg = crypto_larval_wait(larval);
286 crypto_mod_put(larval);
287 alg = ERR_PTR(-ENOENT);
289 crypto_larval_kill(larval);
292 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
294 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
296 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
299 return type_obj->init(tfm, type, mask);
301 switch (crypto_tfm_alg_type(tfm)) {
302 case CRYPTO_ALG_TYPE_CIPHER:
303 return crypto_init_cipher_ops(tfm);
305 case CRYPTO_ALG_TYPE_COMPRESS:
306 return crypto_init_compress_ops(tfm);
316 static void crypto_exit_ops(struct crypto_tfm *tfm)
318 const struct crypto_type *type = tfm->__crt_alg->cra_type;
320 if (type && tfm->exit)
324 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
326 const struct crypto_type *type_obj = alg->cra_type;
329 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
331 return len + type_obj->ctxsize(alg, type, mask);
333 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
337 case CRYPTO_ALG_TYPE_CIPHER:
338 len += crypto_cipher_ctxsize(alg);
341 case CRYPTO_ALG_TYPE_COMPRESS:
342 len += crypto_compress_ctxsize(alg);
349 static void crypto_shoot_alg(struct crypto_alg *alg)
351 down_write(&crypto_alg_sem);
352 alg->cra_flags |= CRYPTO_ALG_DYING;
353 up_write(&crypto_alg_sem);
356 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
359 struct crypto_tfm *tfm = NULL;
360 unsigned int tfm_size;
363 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
364 tfm = kzalloc(tfm_size, GFP_KERNEL);
368 tfm->__crt_alg = alg;
370 err = crypto_init_ops(tfm, type, mask);
374 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
375 goto cra_init_failed;
380 crypto_exit_ops(tfm);
383 crypto_shoot_alg(alg);
390 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
393 * crypto_alloc_base - Locate algorithm and allocate transform
394 * @alg_name: Name of algorithm
395 * @type: Type of algorithm
396 * @mask: Mask for type comparison
398 * This function should not be used by new algorithm types.
399 * Please use crypto_alloc_tfm instead.
401 * crypto_alloc_base() will first attempt to locate an already loaded
402 * algorithm. If that fails and the kernel supports dynamically loadable
403 * modules, it will then attempt to load a module of the same name or
404 * alias. If that fails it will send a query to any loaded crypto manager
405 * to construct an algorithm on the fly. A refcount is grabbed on the
406 * algorithm which is then associated with the new transform.
408 * The returned transform is of a non-determinate type. Most people
409 * should use one of the more specific allocation functions such as
410 * crypto_alloc_blkcipher.
412 * In case of error the return value is an error pointer.
414 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
416 struct crypto_tfm *tfm;
420 struct crypto_alg *alg;
422 alg = crypto_alg_mod_lookup(alg_name, type, mask);
428 tfm = __crypto_alloc_tfm(alg, type, mask);
438 if (fatal_signal_pending(current)) {
446 EXPORT_SYMBOL_GPL(crypto_alloc_base);
448 void *crypto_create_tfm(struct crypto_alg *alg,
449 const struct crypto_type *frontend)
452 struct crypto_tfm *tfm = NULL;
453 unsigned int tfmsize;
457 tfmsize = frontend->tfmsize;
458 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
460 mem = kzalloc(total, GFP_KERNEL);
464 tfm = (struct crypto_tfm *)(mem + tfmsize);
465 tfm->__crt_alg = alg;
467 err = frontend->init_tfm(tfm);
471 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
472 goto cra_init_failed;
477 crypto_exit_ops(tfm);
480 crypto_shoot_alg(alg);
487 EXPORT_SYMBOL_GPL(crypto_create_tfm);
489 struct crypto_alg *crypto_find_alg(const char *alg_name,
490 const struct crypto_type *frontend,
494 type &= frontend->maskclear;
495 mask &= frontend->maskclear;
496 type |= frontend->type;
497 mask |= frontend->maskset;
500 return crypto_alg_mod_lookup(alg_name, type, mask);
502 EXPORT_SYMBOL_GPL(crypto_find_alg);
505 * crypto_alloc_tfm - Locate algorithm and allocate transform
506 * @alg_name: Name of algorithm
507 * @frontend: Frontend algorithm type
508 * @type: Type of algorithm
509 * @mask: Mask for type comparison
511 * crypto_alloc_tfm() will first attempt to locate an already loaded
512 * algorithm. If that fails and the kernel supports dynamically loadable
513 * modules, it will then attempt to load a module of the same name or
514 * alias. If that fails it will send a query to any loaded crypto manager
515 * to construct an algorithm on the fly. A refcount is grabbed on the
516 * algorithm which is then associated with the new transform.
518 * The returned transform is of a non-determinate type. Most people
519 * should use one of the more specific allocation functions such as
520 * crypto_alloc_blkcipher.
522 * In case of error the return value is an error pointer.
524 void *crypto_alloc_tfm(const char *alg_name,
525 const struct crypto_type *frontend, u32 type, u32 mask)
531 struct crypto_alg *alg;
533 alg = crypto_find_alg(alg_name, frontend, type, mask);
539 tfm = crypto_create_tfm(alg, frontend);
549 if (fatal_signal_pending(current)) {
557 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
560 * crypto_destroy_tfm - Free crypto transform
561 * @mem: Start of tfm slab
562 * @tfm: Transform to free
564 * This function frees up the transform and any associated resources,
565 * then drops the refcount on the associated algorithm.
567 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
569 struct crypto_alg *alg;
574 alg = tfm->__crt_alg;
576 if (!tfm->exit && alg->cra_exit)
578 crypto_exit_ops(tfm);
582 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
584 int crypto_has_alg(const char *name, u32 type, u32 mask)
587 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
596 EXPORT_SYMBOL_GPL(crypto_has_alg);
598 void crypto_req_done(struct crypto_async_request *req, int err)
600 struct crypto_wait *wait = req->data;
602 if (err == -EINPROGRESS)
606 complete(&wait->completion);
608 EXPORT_SYMBOL_GPL(crypto_req_done);
610 MODULE_DESCRIPTION("Cryptographic core API");
611 MODULE_LICENSE("GPL");