1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Scatterlist Cryptographic API.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
13 #include <linux/err.h>
14 #include <linux/errno.h>
15 #include <linux/kernel.h>
16 #include <linux/kmod.h>
17 #include <linux/module.h>
18 #include <linux/param.h>
19 #include <linux/sched/signal.h>
20 #include <linux/slab.h>
21 #include <linux/string.h>
22 #include <linux/completion.h>
25 LIST_HEAD(crypto_alg_list);
26 EXPORT_SYMBOL_GPL(crypto_alg_list);
27 DECLARE_RWSEM(crypto_alg_sem);
28 EXPORT_SYMBOL_GPL(crypto_alg_sem);
30 BLOCKING_NOTIFIER_HEAD(crypto_chain);
31 EXPORT_SYMBOL_GPL(crypto_chain);
33 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
35 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
37 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
39 EXPORT_SYMBOL_GPL(crypto_mod_get);
41 void crypto_mod_put(struct crypto_alg *alg)
43 struct module *module = alg->cra_module;
48 EXPORT_SYMBOL_GPL(crypto_mod_put);
50 static inline int crypto_is_test_larval(struct crypto_larval *larval)
52 return larval->alg.cra_driver_name[0];
55 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
58 struct crypto_alg *q, *alg = NULL;
61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
64 if (crypto_is_moribund(q))
67 if ((q->cra_flags ^ type) & mask)
70 if (crypto_is_larval(q) &&
71 !crypto_is_test_larval((struct crypto_larval *)q) &&
72 ((struct crypto_larval *)q)->mask != mask)
75 exact = !strcmp(q->cra_driver_name, name);
76 fuzzy = !strcmp(q->cra_name, name);
77 if (!exact && !(fuzzy && q->cra_priority > best))
80 if (unlikely(!crypto_mod_get(q)))
83 best = q->cra_priority;
95 static void crypto_larval_destroy(struct crypto_alg *alg)
97 struct crypto_larval *larval = (void *)alg;
99 BUG_ON(!crypto_is_larval(alg));
101 crypto_mod_put(larval->adult);
105 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107 struct crypto_larval *larval;
109 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 return ERR_PTR(-ENOMEM);
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
118 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
123 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
131 larval = crypto_larval_alloc(name, type, mask);
133 return ERR_CAST(larval);
135 refcount_set(&larval->alg.cra_refcnt, 2);
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
141 list_add(&alg->cra_list, &crypto_alg_list);
143 up_write(&crypto_alg_sem);
145 if (alg != &larval->alg) {
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
154 void crypto_larval_kill(struct crypto_alg *alg)
156 struct crypto_larval *larval = (void *)alg;
158 down_write(&crypto_alg_sem);
159 list_del(&alg->cra_list);
160 up_write(&crypto_alg_sem);
161 complete_all(&larval->completion);
164 EXPORT_SYMBOL_GPL(crypto_larval_kill);
166 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
168 struct crypto_larval *larval = (void *)alg;
171 timeout = wait_for_completion_killable_timeout(
172 &larval->completion, 60 * HZ);
176 alg = ERR_PTR(-EINTR);
178 alg = ERR_PTR(-ETIMEDOUT);
180 alg = ERR_PTR(-ENOENT);
181 else if (crypto_is_test_larval(larval) &&
182 !(alg->cra_flags & CRYPTO_ALG_TESTED))
183 alg = ERR_PTR(-EAGAIN);
184 else if (!crypto_mod_get(alg))
185 alg = ERR_PTR(-EAGAIN);
186 crypto_mod_put(&larval->alg);
191 static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
194 struct crypto_alg *alg;
197 if (!((type | mask) & CRYPTO_ALG_TESTED))
198 test |= CRYPTO_ALG_TESTED;
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type | test, mask | test);
203 alg = __crypto_alg_lookup(name, type, mask);
204 if (alg && !crypto_is_larval(alg)) {
207 alg = ERR_PTR(-ELIBBAD);
210 up_read(&crypto_alg_sem);
215 static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
218 struct crypto_alg *alg;
221 return ERR_PTR(-ENOENT);
223 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
224 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226 alg = crypto_alg_lookup(name, type, mask);
227 if (!alg && !(mask & CRYPTO_NOLOAD)) {
228 request_module("crypto-%s", name);
230 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
231 CRYPTO_ALG_NEED_FALLBACK))
232 request_module("crypto-%s-all", name);
234 alg = crypto_alg_lookup(name, type, mask);
237 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
238 alg = crypto_larval_wait(alg);
240 alg = crypto_larval_add(name, type, mask);
245 int crypto_probing_notify(unsigned long val, void *v)
249 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
250 if (ok == NOTIFY_DONE) {
251 request_module("cryptomgr");
252 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
257 EXPORT_SYMBOL_GPL(crypto_probing_notify);
259 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
261 struct crypto_alg *alg;
262 struct crypto_alg *larval;
266 * If the internal flag is set for a cipher, require a caller to
267 * to invoke the cipher with the internal flag to use that cipher.
268 * Also, if a caller wants to allocate a cipher that may or may
269 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
270 * !(mask & CRYPTO_ALG_INTERNAL).
272 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
273 mask |= CRYPTO_ALG_INTERNAL;
275 larval = crypto_larval_lookup(name, type, mask);
276 if (IS_ERR(larval) || !crypto_is_larval(larval))
279 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
281 if (ok == NOTIFY_STOP)
282 alg = crypto_larval_wait(larval);
284 crypto_mod_put(larval);
285 alg = ERR_PTR(-ENOENT);
287 crypto_larval_kill(larval);
290 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
292 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
294 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
297 return type_obj->init(tfm, type, mask);
299 switch (crypto_tfm_alg_type(tfm)) {
300 case CRYPTO_ALG_TYPE_CIPHER:
301 return crypto_init_cipher_ops(tfm);
303 case CRYPTO_ALG_TYPE_COMPRESS:
304 return crypto_init_compress_ops(tfm);
314 static void crypto_exit_ops(struct crypto_tfm *tfm)
316 const struct crypto_type *type = tfm->__crt_alg->cra_type;
318 if (type && tfm->exit)
322 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
324 const struct crypto_type *type_obj = alg->cra_type;
327 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
329 return len + type_obj->ctxsize(alg, type, mask);
331 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
335 case CRYPTO_ALG_TYPE_CIPHER:
336 len += crypto_cipher_ctxsize(alg);
339 case CRYPTO_ALG_TYPE_COMPRESS:
340 len += crypto_compress_ctxsize(alg);
347 void crypto_shoot_alg(struct crypto_alg *alg)
349 down_write(&crypto_alg_sem);
350 alg->cra_flags |= CRYPTO_ALG_DYING;
351 up_write(&crypto_alg_sem);
353 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
355 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
358 struct crypto_tfm *tfm = NULL;
359 unsigned int tfm_size;
362 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
363 tfm = kzalloc(tfm_size, GFP_KERNEL);
367 tfm->__crt_alg = alg;
369 err = crypto_init_ops(tfm, type, mask);
373 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
374 goto cra_init_failed;
379 crypto_exit_ops(tfm);
382 crypto_shoot_alg(alg);
389 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
392 * crypto_alloc_base - Locate algorithm and allocate transform
393 * @alg_name: Name of algorithm
394 * @type: Type of algorithm
395 * @mask: Mask for type comparison
397 * This function should not be used by new algorithm types.
398 * Please use crypto_alloc_tfm instead.
400 * crypto_alloc_base() will first attempt to locate an already loaded
401 * algorithm. If that fails and the kernel supports dynamically loadable
402 * modules, it will then attempt to load a module of the same name or
403 * alias. If that fails it will send a query to any loaded crypto manager
404 * to construct an algorithm on the fly. A refcount is grabbed on the
405 * algorithm which is then associated with the new transform.
407 * The returned transform is of a non-determinate type. Most people
408 * should use one of the more specific allocation functions such as
409 * crypto_alloc_blkcipher.
411 * In case of error the return value is an error pointer.
413 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
415 struct crypto_tfm *tfm;
419 struct crypto_alg *alg;
421 alg = crypto_alg_mod_lookup(alg_name, type, mask);
427 tfm = __crypto_alloc_tfm(alg, type, mask);
437 if (fatal_signal_pending(current)) {
445 EXPORT_SYMBOL_GPL(crypto_alloc_base);
447 void *crypto_create_tfm(struct crypto_alg *alg,
448 const struct crypto_type *frontend)
451 struct crypto_tfm *tfm = NULL;
452 unsigned int tfmsize;
456 tfmsize = frontend->tfmsize;
457 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
459 mem = kzalloc(total, GFP_KERNEL);
463 tfm = (struct crypto_tfm *)(mem + tfmsize);
464 tfm->__crt_alg = alg;
466 err = frontend->init_tfm(tfm);
470 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
471 goto cra_init_failed;
476 crypto_exit_ops(tfm);
479 crypto_shoot_alg(alg);
486 EXPORT_SYMBOL_GPL(crypto_create_tfm);
488 struct crypto_alg *crypto_find_alg(const char *alg_name,
489 const struct crypto_type *frontend,
493 type &= frontend->maskclear;
494 mask &= frontend->maskclear;
495 type |= frontend->type;
496 mask |= frontend->maskset;
499 return crypto_alg_mod_lookup(alg_name, type, mask);
501 EXPORT_SYMBOL_GPL(crypto_find_alg);
504 * crypto_alloc_tfm - Locate algorithm and allocate transform
505 * @alg_name: Name of algorithm
506 * @frontend: Frontend algorithm type
507 * @type: Type of algorithm
508 * @mask: Mask for type comparison
510 * crypto_alloc_tfm() will first attempt to locate an already loaded
511 * algorithm. If that fails and the kernel supports dynamically loadable
512 * modules, it will then attempt to load a module of the same name or
513 * alias. If that fails it will send a query to any loaded crypto manager
514 * to construct an algorithm on the fly. A refcount is grabbed on the
515 * algorithm which is then associated with the new transform.
517 * The returned transform is of a non-determinate type. Most people
518 * should use one of the more specific allocation functions such as
519 * crypto_alloc_blkcipher.
521 * In case of error the return value is an error pointer.
523 void *crypto_alloc_tfm(const char *alg_name,
524 const struct crypto_type *frontend, u32 type, u32 mask)
530 struct crypto_alg *alg;
532 alg = crypto_find_alg(alg_name, frontend, type, mask);
538 tfm = crypto_create_tfm(alg, frontend);
548 if (fatal_signal_pending(current)) {
556 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
559 * crypto_destroy_tfm - Free crypto transform
560 * @mem: Start of tfm slab
561 * @tfm: Transform to free
563 * This function frees up the transform and any associated resources,
564 * then drops the refcount on the associated algorithm.
566 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
568 struct crypto_alg *alg;
573 alg = tfm->__crt_alg;
575 if (!tfm->exit && alg->cra_exit)
577 crypto_exit_ops(tfm);
581 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
583 int crypto_has_alg(const char *name, u32 type, u32 mask)
586 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
595 EXPORT_SYMBOL_GPL(crypto_has_alg);
597 void crypto_req_done(struct crypto_async_request *req, int err)
599 struct crypto_wait *wait = req->data;
601 if (err == -EINPROGRESS)
605 complete(&wait->completion);
607 EXPORT_SYMBOL_GPL(crypto_req_done);
609 MODULE_DESCRIPTION("Cryptographic core API");
610 MODULE_LICENSE("GPL");