2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/err.h>
19 #include <linux/errno.h>
20 #include <linux/kernel.h>
21 #include <linux/kmod.h>
22 #include <linux/module.h>
23 #include <linux/param.h>
24 #include <linux/sched.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list);
30 EXPORT_SYMBOL_GPL(crypto_alg_list);
31 DECLARE_RWSEM(crypto_alg_sem);
32 EXPORT_SYMBOL_GPL(crypto_alg_sem);
34 BLOCKING_NOTIFIER_HEAD(crypto_chain);
35 EXPORT_SYMBOL_GPL(crypto_chain);
37 struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
39 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
41 EXPORT_SYMBOL_GPL(crypto_mod_get);
43 void crypto_mod_put(struct crypto_alg *alg)
45 struct module *module = alg->cra_module;
50 EXPORT_SYMBOL_GPL(crypto_mod_put);
52 static inline int crypto_is_test_larval(struct crypto_larval *larval)
54 return larval->alg.cra_driver_name[0];
57 static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
60 struct crypto_alg *q, *alg = NULL;
63 list_for_each_entry(q, &crypto_alg_list, cra_list) {
66 if (crypto_is_moribund(q))
69 if ((q->cra_flags ^ type) & mask)
72 if (crypto_is_larval(q) &&
73 !crypto_is_test_larval((struct crypto_larval *)q) &&
74 ((struct crypto_larval *)q)->mask != mask)
77 exact = !strcmp(q->cra_driver_name, name);
78 fuzzy = !strcmp(q->cra_name, name);
79 if (!exact && !(fuzzy && q->cra_priority > best))
82 if (unlikely(!crypto_mod_get(q)))
85 best = q->cra_priority;
97 static void crypto_larval_destroy(struct crypto_alg *alg)
99 struct crypto_larval *larval = (void *)alg;
101 BUG_ON(!crypto_is_larval(alg));
103 crypto_mod_put(larval->adult);
107 struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
109 struct crypto_larval *larval;
111 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
113 return ERR_PTR(-ENOMEM);
116 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
117 larval->alg.cra_priority = -1;
118 larval->alg.cra_destroy = crypto_larval_destroy;
120 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
121 init_completion(&larval->completion);
125 EXPORT_SYMBOL_GPL(crypto_larval_alloc);
127 static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
130 struct crypto_alg *alg;
131 struct crypto_larval *larval;
133 larval = crypto_larval_alloc(name, type, mask);
135 return ERR_CAST(larval);
137 atomic_set(&larval->alg.cra_refcnt, 2);
139 down_write(&crypto_alg_sem);
140 alg = __crypto_alg_lookup(name, type, mask);
143 list_add(&alg->cra_list, &crypto_alg_list);
145 up_write(&crypto_alg_sem);
147 if (alg != &larval->alg)
153 void crypto_larval_kill(struct crypto_alg *alg)
155 struct crypto_larval *larval = (void *)alg;
157 down_write(&crypto_alg_sem);
158 list_del(&alg->cra_list);
159 up_write(&crypto_alg_sem);
160 complete_all(&larval->completion);
163 EXPORT_SYMBOL_GPL(crypto_larval_kill);
165 static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167 struct crypto_larval *larval = (void *)alg;
170 timeout = wait_for_completion_interruptible_timeout(
171 &larval->completion, 60 * HZ);
175 alg = ERR_PTR(-EINTR);
177 alg = ERR_PTR(-ETIMEDOUT);
179 alg = ERR_PTR(-ENOENT);
180 else if (crypto_is_test_larval(larval) &&
181 !(alg->cra_flags & CRYPTO_ALG_TESTED))
182 alg = ERR_PTR(-EAGAIN);
183 else if (!crypto_mod_get(alg))
184 alg = ERR_PTR(-EAGAIN);
185 crypto_mod_put(&larval->alg);
190 struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
192 struct crypto_alg *alg;
194 down_read(&crypto_alg_sem);
195 alg = __crypto_alg_lookup(name, type, mask);
196 up_read(&crypto_alg_sem);
200 EXPORT_SYMBOL_GPL(crypto_alg_lookup);
202 struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
204 struct crypto_alg *alg;
207 return ERR_PTR(-ENOENT);
209 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
212 alg = crypto_alg_lookup(name, type, mask);
214 request_module("%s", name);
216 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
217 CRYPTO_ALG_NEED_FALLBACK))
218 request_module("%s-all", name);
220 alg = crypto_alg_lookup(name, type, mask);
224 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
226 return crypto_larval_add(name, type, mask);
228 EXPORT_SYMBOL_GPL(crypto_larval_lookup);
230 int crypto_probing_notify(unsigned long val, void *v)
234 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
235 if (ok == NOTIFY_DONE) {
236 request_module("cryptomgr");
237 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
242 EXPORT_SYMBOL_GPL(crypto_probing_notify);
244 struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
246 struct crypto_alg *alg;
247 struct crypto_alg *larval;
250 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
251 type |= CRYPTO_ALG_TESTED;
252 mask |= CRYPTO_ALG_TESTED;
255 larval = crypto_larval_lookup(name, type, mask);
256 if (IS_ERR(larval) || !crypto_is_larval(larval))
259 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
261 if (ok == NOTIFY_STOP)
262 alg = crypto_larval_wait(larval);
264 crypto_mod_put(larval);
265 alg = ERR_PTR(-ENOENT);
267 crypto_larval_kill(larval);
270 EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
272 static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
274 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
277 return type_obj->init(tfm, type, mask);
279 switch (crypto_tfm_alg_type(tfm)) {
280 case CRYPTO_ALG_TYPE_CIPHER:
281 return crypto_init_cipher_ops(tfm);
283 case CRYPTO_ALG_TYPE_COMPRESS:
284 return crypto_init_compress_ops(tfm);
294 static void crypto_exit_ops(struct crypto_tfm *tfm)
296 const struct crypto_type *type = tfm->__crt_alg->cra_type;
304 switch (crypto_tfm_alg_type(tfm)) {
305 case CRYPTO_ALG_TYPE_CIPHER:
306 crypto_exit_cipher_ops(tfm);
309 case CRYPTO_ALG_TYPE_COMPRESS:
310 crypto_exit_compress_ops(tfm);
318 static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
320 const struct crypto_type *type_obj = alg->cra_type;
323 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
325 return len + type_obj->ctxsize(alg, type, mask);
327 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
331 case CRYPTO_ALG_TYPE_CIPHER:
332 len += crypto_cipher_ctxsize(alg);
335 case CRYPTO_ALG_TYPE_COMPRESS:
336 len += crypto_compress_ctxsize(alg);
343 void crypto_shoot_alg(struct crypto_alg *alg)
345 down_write(&crypto_alg_sem);
346 alg->cra_flags |= CRYPTO_ALG_DYING;
347 up_write(&crypto_alg_sem);
349 EXPORT_SYMBOL_GPL(crypto_shoot_alg);
351 struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
354 struct crypto_tfm *tfm = NULL;
355 unsigned int tfm_size;
358 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
359 tfm = kzalloc(tfm_size, GFP_KERNEL);
363 tfm->__crt_alg = alg;
365 err = crypto_init_ops(tfm, type, mask);
369 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
370 goto cra_init_failed;
375 crypto_exit_ops(tfm);
378 crypto_shoot_alg(alg);
385 EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
388 * crypto_alloc_base - Locate algorithm and allocate transform
389 * @alg_name: Name of algorithm
390 * @type: Type of algorithm
391 * @mask: Mask for type comparison
393 * This function should not be used by new algorithm types.
394 * Plesae use crypto_alloc_tfm instead.
396 * crypto_alloc_base() will first attempt to locate an already loaded
397 * algorithm. If that fails and the kernel supports dynamically loadable
398 * modules, it will then attempt to load a module of the same name or
399 * alias. If that fails it will send a query to any loaded crypto manager
400 * to construct an algorithm on the fly. A refcount is grabbed on the
401 * algorithm which is then associated with the new transform.
403 * The returned transform is of a non-determinate type. Most people
404 * should use one of the more specific allocation functions such as
405 * crypto_alloc_blkcipher.
407 * In case of error the return value is an error pointer.
409 struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
411 struct crypto_tfm *tfm;
415 struct crypto_alg *alg;
417 alg = crypto_alg_mod_lookup(alg_name, type, mask);
423 tfm = __crypto_alloc_tfm(alg, type, mask);
433 if (signal_pending(current)) {
441 EXPORT_SYMBOL_GPL(crypto_alloc_base);
443 void *crypto_create_tfm(struct crypto_alg *alg,
444 const struct crypto_type *frontend)
447 struct crypto_tfm *tfm = NULL;
448 unsigned int tfmsize;
452 tfmsize = frontend->tfmsize;
453 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
455 mem = kzalloc(total, GFP_KERNEL);
459 tfm = (struct crypto_tfm *)(mem + tfmsize);
460 tfm->__crt_alg = alg;
462 err = frontend->init_tfm(tfm);
466 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
467 goto cra_init_failed;
472 crypto_exit_ops(tfm);
475 crypto_shoot_alg(alg);
482 EXPORT_SYMBOL_GPL(crypto_create_tfm);
484 struct crypto_alg *crypto_find_alg(const char *alg_name,
485 const struct crypto_type *frontend,
488 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
489 crypto_alg_mod_lookup;
492 type &= frontend->maskclear;
493 mask &= frontend->maskclear;
494 type |= frontend->type;
495 mask |= frontend->maskset;
497 if (frontend->lookup)
498 lookup = frontend->lookup;
501 return lookup(alg_name, type, mask);
503 EXPORT_SYMBOL_GPL(crypto_find_alg);
506 * crypto_alloc_tfm - Locate algorithm and allocate transform
507 * @alg_name: Name of algorithm
508 * @frontend: Frontend algorithm type
509 * @type: Type of algorithm
510 * @mask: Mask for type comparison
512 * crypto_alloc_tfm() will first attempt to locate an already loaded
513 * algorithm. If that fails and the kernel supports dynamically loadable
514 * modules, it will then attempt to load a module of the same name or
515 * alias. If that fails it will send a query to any loaded crypto manager
516 * to construct an algorithm on the fly. A refcount is grabbed on the
517 * algorithm which is then associated with the new transform.
519 * The returned transform is of a non-determinate type. Most people
520 * should use one of the more specific allocation functions such as
521 * crypto_alloc_blkcipher.
523 * In case of error the return value is an error pointer.
525 void *crypto_alloc_tfm(const char *alg_name,
526 const struct crypto_type *frontend, u32 type, u32 mask)
532 struct crypto_alg *alg;
534 alg = crypto_find_alg(alg_name, frontend, type, mask);
540 tfm = crypto_create_tfm(alg, frontend);
550 if (signal_pending(current)) {
558 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
561 * crypto_destroy_tfm - Free crypto transform
562 * @mem: Start of tfm slab
563 * @tfm: Transform to free
565 * This function frees up the transform and any associated resources,
566 * then drops the refcount on the associated algorithm.
568 void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
570 struct crypto_alg *alg;
575 alg = tfm->__crt_alg;
577 if (!tfm->exit && alg->cra_exit)
579 crypto_exit_ops(tfm);
583 EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
585 int crypto_has_alg(const char *name, u32 type, u32 mask)
588 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
597 EXPORT_SYMBOL_GPL(crypto_has_alg);
599 MODULE_DESCRIPTION("Cryptographic core API");
600 MODULE_LICENSE("GPL");